Mediacodec序

来源:互联网 发布:idea新建java项目 编辑:程序博客网 时间:2024/06/15 23:09

MediaFormat封装了媒体数据格式的描述信息,注意key如果没有标记为optional(可选的),则是强制设置的,不设置会出错具体key相关参考文档。
关于mediadormat的颜色空间需要特别注意,不同的手机支持性不一样,非常容易出问题。
一般还会遇到输出视频方向问题,前置摄像头录制镜像问题等

从surface编码

public class EncodeFromSurfaceActivity extends AppCompatActivity {    @BindView(R.id.start)    Button start;    @Override    protected void onCreate(Bundle savedInstanceState) {        super.onCreate(savedInstanceState);        setContentView(R.layout.activity_encode_from_surface);        ButterKnife.bind(this);    }    @OnClick(R.id.start)    public void onClick() {        try {            flow();        } catch (IOException e) {            e.printStackTrace();        }    }    @TargetApi(Build.VERSION_CODES.LOLLIPOP)    private void flow() throws IOException {        final MediaMuxer mediaMuxer = new MediaMuxer(Environment.getExternalStorageDirectory().getAbsolutePath() + "/surface_out", MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);        MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", 640, 480);        mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);        mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, 1300 * 1000);        mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 2);        mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 25);        final MediaCodec mediaCodec = MediaCodec.createEncoderByType("video/avc");        mediaCodec.setCallback(new MediaCodec.Callback() {            int videoTrack;            @Override            public void onInputBufferAvailable(MediaCodec codec, int index) {            }            @Override            public void onOutputBufferAvailable(MediaCodec codec, int index, MediaCodec.BufferInfo info) {                ByteBuffer outputBuffer = codec.getOutputBuffer(index);                System.out.println(outputBuffer.limit() + "===");                mediaMuxer.writeSampleData(videoTrack, outputBuffer, info);                codec.releaseOutputBuffer(index, false);                if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {                    mediaCodec.release();                    mediaMuxer.release();                    //为了方便看出流程没有写全局变量,这里应该释放//                    inputSurface.release();                }            }            @Override            public void onError(MediaCodec codec, MediaCodec.CodecException e) {            }            @Override            public void onOutputFormatChanged(MediaCodec codec, MediaFormat format) {                videoTrack = mediaMuxer.addTrack(format);                mediaMuxer.start();            }        });        mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);//        methoh 1        //必须在configure之后,start之前        final Surface inputSurface = mediaCodec.createInputSurface();//        method 2//        final Surface inputSurface = MediaCodec.createPersistentInputSurface();//        //必须在configure之后,start之前//        mediaCodec.setInputSurface(inputSurface);        mediaCodec.start();        new Thread() {            @Override            public void run() {                super.run();                Paint paint=new Paint(Paint.ANTI_ALIAS_FLAG);                paint.setTextSize(60);                paint.setColor(Color.RED);                paint.setTextAlign(Paint.Align.CENTER);                long startTime=System.currentTimeMillis();                while(System.currentTimeMillis()-startTime<10*1000){                    Canvas canvas = inputSurface.lockCanvas(null);                    canvas.drawColor(Color.WHITE);                    canvas.drawText(String.valueOf(new Random().nextInt(100)),canvas.getWidth()/2,canvas.getHeight()/2,paint);                    inputSurface.unlockCanvasAndPost(canvas);                    try {                        Thread.sleep(60);                    } catch (InterruptedException e) {                        e.printStackTrace();                    }                }                //因为surface作为输入时InputBuffer是不可用的,所以这个方法发送EOS信号                mediaCodec.signalEndOfInputStream();                Log.i("tag", "finish");            }        }.start();    }}

从camera编码

public class EncodeFromCameraActivity extends AppCompatActivity {    @BindView(R.id.start)    Button start;    @BindView(R.id.end)    Button end;    @BindView(R.id.preview)    SurfaceView preview;    private Camera camera;    private MediaCodec mediaCodec;    private MediaMuxer mediaMuxer;    volatile boolean running=true;    FileOutputStream fileOutputStream = null;    @TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR2)    @Override    protected void onCreate(Bundle savedInstanceState) {        super.onCreate(savedInstanceState);        setContentView(R.layout.activity_encode_from_camera);        ButterKnife.bind(this);        try {            mediaMuxer = new MediaMuxer(Environment.getExternalStorageDirectory().getAbsolutePath() + "/camera_out", MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);        } catch (IOException e) {            e.printStackTrace();        }        preview.getHolder().addCallback(new SurfaceHolder.Callback() {            @Override            public void surfaceCreated(SurfaceHolder holder) {                try {                    initCamera();                    initMediacodec();                } catch (IOException e) {                    e.printStackTrace();                }            }            @Override            public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {                try {                    camera.setPreviewDisplay(holder);                } catch (IOException e) {                    e.printStackTrace();                }                camera.startPreview();            }            @Override            public void surfaceDestroyed(SurfaceHolder holder) {                uninitCamera();            }        });    }    private void initCamera() throws IOException {        camera = Camera.open(/*Camera.CameraInfo.CAMERA_FACING_FRONT*/);        camera.setPreviewDisplay(preview.getHolder());        if (getResources().getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT) {            //调整预览方向            camera.setDisplayOrientation(90);        }        Camera.Parameters parameters = camera.getParameters();        parameters.setPreviewSize(640, 480);        parameters.setPictureSize(640, 480);        //使用nv21录出来的视频会出现些微色块        parameters.setPreviewFormat(/*ImageFormat.NV21*/ImageFormat.YV12);        camera.setParameters(parameters);    }    private void uninitCamera() {        camera.stopPreview();        camera.release();        camera = null;    }    @TargetApi(M)    private void initMediacodec() throws IOException {        mediaCodec = MediaCodec.createEncoderByType("video/avc");        MediaFormat videoFormat = MediaFormat.createVideoFormat("video/avc", 640, 480);        //使用颜色空间最好遍历支持性然后设置        int[] colorFormats = mediaCodec.getCodecInfo().getCapabilitiesForType("video/avc").colorFormats;        for (int colorFormat : colorFormats) {            Log.i("tag", Integer.toHexString(colorFormat));        }        //注意一些key是必须设置的,参考MediaFormat文档        videoFormat.setInteger(MediaFormat.KEY_BIT_RATE, 1300 * 1000);        videoFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible);        videoFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 25);        videoFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 2);        mediaCodec.configure(videoFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);    }    private void unitMediacodec() {        if (mediaCodec != null) {            mediaCodec.release();            mediaCodec = null;        }    }    long recordTime;    private void encodeVideo() {        camera.addCallbackBuffer(new byte[640 * 480 * 3 / 2]);        mediaCodec.start();        recordTime = System.nanoTime();        new Thread(){            @TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR2)            @Override            public void run() {                super.run();                try {                    fileOutputStream=new FileOutputStream(Environment.getExternalStorageDirectory().getAbsolutePath()+"/out_camera.h264");                } catch (FileNotFoundException e) {                    e.printStackTrace();                }                while (running){                   camera.setPreviewCallbackWithBuffer(new Camera.PreviewCallback() {                       @Override                       public void onPreviewFrame(byte[] data, Camera camera) {                           addVideoTrack(data);                           camera.addCallbackBuffer(data);                       }                   });                }                try {                    fileOutputStream.close();                } catch (IOException e) {                    e.printStackTrace();                }                mediaMuxer.release();                unitMediacodec();            }        }.start();    }    MediaCodec.BufferInfo bufferInfo=new MediaCodec.BufferInfo();    int videoTrack;    @TargetApi(Build.VERSION_CODES.LOLLIPOP)    private void addVideoTrack(byte[] data){        int inputIndex = mediaCodec.dequeueInputBuffer(1000);        if(inputIndex>=0){            ByteBuffer inputBuffer = mediaCodec.getInputBuffer(inputIndex);            inputBuffer.put(data);            mediaCodec.queueInputBuffer(inputIndex,0,data.length,Math.abs(System.nanoTime()-recordTime)/1000,0);        }        int outputIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 1000);        switch (outputIndex){            case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:                videoTrack=mediaMuxer.addTrack(mediaCodec.getOutputFormat());                mediaMuxer.start();                break;            case MediaCodec.INFO_TRY_AGAIN_LATER:            case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:                break;            default:                if(outputIndex>=0){                    ByteBuffer outputBuffer = mediaCodec.getOutputBuffer(outputIndex);                    //file write                    try {                        byte[] bytes=new byte[outputBuffer.limit()];                        outputBuffer.get(bytes);                        outputBuffer.flip();                        fileOutputStream.write(bytes);                    } catch (IOException e) {                        e.printStackTrace();                    }//                    mediaMuxer.writeSampleData(videoTrack,outputBuffer,bufferInfo);                    mediaCodec.releaseOutputBuffer(outputIndex,false);                }        }    }    @OnClick({R.id.start, R.id.end})    public void onClick(View view) {        switch (view.getId()) {            case R.id.start:                encodeVideo();                break;            case R.id.end:                running=false;                //这里需要注意清空回调,不然结束的最后一次添加的回调引发下一次的处理,然而anger时候已经执行了unitMediacodec,引发空指针                camera.setPreviewCallbackWithBuffer(null);                break;        }    }}
0 0
原创粉丝点击