Android 硬编实例

来源:互联网 发布:话筒淘宝购买 编辑:程序博客网 时间:2024/04/29 14:33


众所周知,安卓中视频编码分为软、硬编两种,而这两种的区别是,软编通过代码实现,占用cpu资源,效率较低,一般采用ffmpeg、x264实现,在长时间的编码过程中会造成机器发热,而硬编利用的是GPU、DSP或者FPGA等硬件芯片进行编码,不占用cpu资源,效率高,直播项目或者视频监控项目中,因为需要长时间(几小时乃至几天)的视频处理,所以基本上是首选硬编,硬编不能使用的时候才采用软编,在安卓平台中,实现硬编主要靠MediaCodec这个类来实现,以下是本人一个封装好的硬编工具,可直接拿来使用。

public class HardwareEncoder {    private MediaCodec mediaCodec;    private MediaCodec.BufferInfo mBufferInfo;    private int supportColorFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar;    private Media m_media;    private int ysize;    private int uvsize;    private byte[] m_info = null;    private byte[] ubuffer;    private byte[] vbuffer;    private byte[] outFrame = null;    @SuppressLint({"NewApi"})    public HardwareEncoder(Media media, int width, int height, int framerate, int bitrate, int iFrameInterval)            throws Exception {        ysize = (width * height);        uvsize = (ysize / 4);        ubuffer = new byte[uvsize];        vbuffer = new byte[uvsize];        outFrame = new byte[width * height * 3 / 2];        m_media = media;        mBufferInfo = new MediaCodec.BufferInfo();        supportColorFormat = getSupportColorFormat();        int iFrame = iFrameInterval / framerate + (iFrameInterval % framerate == 0 ? 0 : 1);        if (iFrame == 0) {            iFrame = 1;        }        MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", width, height);        mediaFormat.setInteger("bitrate", bitrate);        mediaFormat.setInteger("frame-rate", framerate);        mediaFormat.setInteger("color-format", supportColorFormat);        mediaFormat.setInteger("i-frame-interval", iFrame);        mediaCodec = MediaCodec.createEncoderByType("video/avc");        mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);        mediaCodec.start();    }    public void release() {        if (mediaCodec != null) {            mediaCodec.release();            mediaCodec = null;        }    }    @SuppressLint({"NewApi"})    public int sendVideoData(byte[] input, int flag, boolean needAddTimeWm) {        if (mediaCodec == null) {            return -1;        }        if (needAddTimeWm) {            m_media.addTimewatermark(input);        }        try {            if (flag == 0) {                if (supportColorFormat == MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar                        || supportColorFormat == MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar) {                    NV21toYUV420Planar(input);                } else if (supportColorFormat == MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar) {                    NV21toYUV420SemiPlanar(input);                }            } else if (flag == 1) {                NV21toYUV420Planar(input);            } else if (flag == 2) {                NV21toYUV420SemiPlanar(input);            }            ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();            int inputBufferIndex = mediaCodec.dequeueInputBuffer(0L);            if (inputBufferIndex >= 0) {                ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];                inputBuffer.clear();                inputBuffer.put(input);                mediaCodec.queueInputBuffer(inputBufferIndex, 0, input.length, System.currentTimeMillis() * 1000L, 0);            }            ByteBuffer[] outputBuffers = mediaCodec.getOutputBuffers();            int outputBufferIndex = mediaCodec.dequeueOutputBuffer(mBufferInfo, 200000L);            if (outputBufferIndex >= 0) {                ByteBuffer outputBuffer = outputBuffers[outputBufferIndex];                if (outputBuffer == null) {                    mediaCodec.releaseOutputBuffer(outputBufferIndex, false);                    return -1;                }                if (m_info == null) {                    m_info = new byte[mBufferInfo.size];                    outputBuffer.get(m_info);                    mediaCodec.releaseOutputBuffer(outputBufferIndex, false);                    return 0;                }                int len = 0;                boolean isKeyFrame = false;                if ((mBufferInfo.flags & 0x1) != 0) {                    System.arraycopy(m_info, 0, outFrame, 0, m_info.length);                    outputBuffer.get(outFrame, m_info.length, mBufferInfo.size);                    len = m_info.length + mBufferInfo.size;                    isKeyFrame = true;                    if ((outputBuffer.get(4) & 0x1F) != 5) {                        return -2;                    }                } else {                    outputBuffer.get(outFrame, 0, mBufferInfo.size);                    len = mBufferInfo.size;                }                if (len > 0) {                    m_media.writeVideoData(outFrame, 0, len, isKeyFrame);                }                mediaCodec.releaseOutputBuffer(outputBufferIndex, false);            } else {                return -1;            }        } catch (Exception e) {            return -1;        }        return 0;    }    private void NV21toYUV420SemiPlanar(byte[] input) {        int idx = ysize;        for (int i = 0; i < uvsize; i++) {            byte tmp = input[idx];            input[idx] = input[(++idx)];            input[(idx++)] = tmp;        }    }    private void NV21toYUV420Planar(byte[] input) {        int idx = ysize;        for (int i = 0; i < uvsize; i++) {            vbuffer[i] = input[idx];            ubuffer[i] = input[(idx + 1)];            idx += 2;        }        System.arraycopy(ubuffer, 0, input, ysize, uvsize);        System.arraycopy(vbuffer, 0, input, ysize + uvsize, uvsize);    }    @SuppressLint({"NewApi"})    private int getSupportColorFormat() {        int numCodecs = MediaCodecList.getCodecCount();        MediaCodecInfo codecInfo = null;        for (int i = 0; (i < numCodecs) && (codecInfo == null); i++) {            MediaCodecInfo info = MediaCodecList.getCodecInfoAt(i);            if (info.isEncoder()) {                String types[] = info.getSupportedTypes();                boolean found = false;                for (int j = 0; (j < types.length) && (!found); j++) {                    if (types[j].equals("video/avc")) {                        found = true;                    }                }                if (found) {                    codecInfo = info;                }            }        }        MediaCodecInfo.CodecCapabilities capabilities = codecInfo.getCapabilitiesForType("video/avc");        int j[] = capabilities.colorFormats;        for (int types = 0; types < j.length; types++) {            int colorFormat = j[types];            switch (colorFormat) {                case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:                case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar:                case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:                case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar:                case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar:                case MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar:                    return colorFormat;            }        }        return -1;    }}
以上代码是硬编的核心代码,摄像头采集预览帧直接丢进去即可直接进行编码,关于android Camera 获取预览帧的代码可以看我这篇文章
http://blog.csdn.net/u012874222/article/details/70216700
原创粉丝点击