Android(java方法)上实现mp4的分割和拼接 (二)

来源:互联网 发布:ubuntu配置ssh 编辑:程序博客网 时间:2024/05/22 12:34

这节谈一下如何在android上实现mp4文件的高效率切割。

        业务需求举例:把一段2分钟的mp4文件切割出00:42 至 01:16这段时间的视频,要求足够短的执行时间和尽量少的误差。

        分析:mp4Parser只能在关键帧切割,比如,在00:40和00:45分别存在一个可切割关键帧,那么切割视频的头和尾,都应该选择短切割。然后获取到误差的视频短,如果这个误差大于0.5S,用FFmpeg进行一帧一帧编解码切割文件。这样最多会有三段mp4文件,再次将这三段mp4拼接起来就可以了。

        下面直接上关键代码,这些代码在PC上新建一个java工程也可以实现。

        1.切割文件方法:

 /**

需要使用isoviewer-1.0-RC-27包

返回值是目标mp4的开头和结尾时刻

 **/       

public static double[] startTrim(File src, File dst, int startMs, int endMs) throws IOException {        Movie movie = MovieCreator.build(src.getAbsolutePath());        List<Track> tracks = movie.getTracks();        movie.setTracks(new LinkedList<Track>());        double startTime = startMs/1000;        double endTime = endMs/1000;        boolean timeCorrected = false;        // Here we try to find a track that has sync samples. Since we can only start decoding        // at such a sample we SHOULD make sure that the start of the new fragment is exactly        // such a frame        for (Track track : tracks) {            if (track.getSyncSamples() != null && track.getSyncSamples().length > 0) {                if (timeCorrected) {                              throw new RuntimeException("The startTime has already been corrected by another track with SyncSample. Not Supported.");                }                //true,false表示短截取;false,true表示长截取                startTime = correctTimeToSyncSample(track, startTime, true);                endTime = correctTimeToSyncSample(track, endTime, false);                timeCorrected = true;            }        }        int x = 0;        for (Track track : tracks) {            long currentSample = 0;            double currentTime = 0;            long startSample = -1;            long endSample = -1;            x++;            for (int i = 0; i < track.getDecodingTimeEntries().size(); i++) {                TimeToSampleBox.Entry entry = track.getDecodingTimeEntries().get(i);                for (int j = 0; j < entry.getCount(); j++) {                    // entry.getDelta() is the amount of time the current sample covers.                    if (currentTime <= startTime) {                        // current sample is still before the new starttime                        startSample = currentSample;                    }                    if (currentTime <= endTime) {                        // current sample is after the new start time and still before the new endtime                        endSample = currentSample;                    } else {                        // current sample is after the end of the cropped video                        break;                    }                    currentTime += (double) entry.getDelta() / (double) track.getTrackMetaData().getTimescale();                    currentSample++;                }            }            movie.addTrack(new CroppedTrack(track, startSample, endSample));            break;        }        Container container = new DefaultMp4Builder().build(movie);          if (!dst.exists()) {            dst.createNewFile();        }         FileOutputStream fos = new FileOutputStream(dst);        FileChannel fc = fos.getChannel();        container.writeContainer(fc);              fc.close();        fos.close();        double[] doubleArray = new double[2] ;        doubleArray[0] = startTime;        doubleArray[1] = endTime;        return doubleArray;            }
2.ffmpeg切割方法,需要jni实现。稍后补充

 public String getMp4ByFFmpeg(double mTimeStart,double mTimeEnd,String videoPath){    try{    String mFinalVideoPath = videoPath;    int audioChannels = 2;            FFmpegRecorder recorder = new FFmpegRecorder(                    mFinalVideoPath, RecorderConfig.TARGET_VIDEO_WIDTH,                    RecorderConfig.TARGET_VIDEO_HEIGHT, audioChannels);            RecorderConfig.setRecorderConfig(recorder, RecorderConfig.CONFIG_TYPE_MPEG4_HIGH);            int totalFrames = 0;            FFmpegGrabber grabber = FFmpegGrabber.createDefault(mPath);            grabber.setSquareSize(RecorderConfig.TARGET_VIDEO_WIDTH);            int degree = VideoFileUtil.getRotate(mPath);            grabber.setOrientation(degree);            grabber.start();            if (mTimeStart > 0) {                grabber.setTimestamp((long)mTimeStart);            }            totalFrames = grabber.getLengthInFrames();            VideoClip mFinalClip = new VideoClip();            mFinalClip.mIsFromLocal = true;            mFinalClip.mHeight = RecorderConfig.TARGET_VIDEO_HEIGHT;            mFinalClip.mWidth = RecorderConfig.TARGET_VIDEO_WIDTH;             recorder.setAudioChannels(grabber.getAudioChannels());            recorder.setSampleRate(grabber.getSampleRate());            recorder.setAudioCodec(avcodec.AV_CODEC_ID_AAC);            recorder.setFrameRate(FFmpegRecorder.DEFAULT_FRAME_RATE);            recorder.setVideoCodec(avcodec.AV_CODEC_ID_MPEG4);            recorder.start();            mFinalClip.mOrientation = 0;            mFinalClip.mFrameRate = (int) recorder.getFrameRate();            mFinalClip.mSampleRate = recorder.getSampleRate();            mFinalClip.mAudioBitrate = recorder.getAudioBitrate();            mFinalClip.mAudioChannels = recorder.getAudioChannels();            Frame grabbedFrame = new Frame();            int j = 0;            boolean videoTimeout = false;            boolean audioTimeout = false;            while (grabber.grabFrame(grabbedFrame)) {                                long i = grabber.getTimestamp();                long k = grabber.getFrameNumber();                if (videoTimeout && audioTimeout) {                    break;                }                if (grabbedFrame.hasVideoFrame()) {                    int progress = 100 * (int) (i - mTimeStart) / mTotalTimeSpan;                    publishProgress(progress);                }                if (i > mTimeEnd) {                    if (grabbedFrame.hasAudioFrame()) {                        audioTimeout = true;                    }                    if (grabbedFrame.hasVideoFrame()) {                        videoTimeout = true;                    }                    continue;                }                grabbedFrame.setTimeStamp((long)(i - mTimeStart));                recorder.recordFrameNoException(grabbedFrame);                SLog.v(TAG, "record image at {}, #{}", i, k);                j++;            }            grabbedFrame.releaseNativeAllocation();            grabber.stop();            grabber.release();            recorder.stop();            recorder.release();            mFinalClip.mClipPath = mFinalVideoPath;            mFinalClip.mDuration = (long) (MP4ParserUtil.getDuration(mFinalVideoPath) * 1000);            mFinalClip.mTargetMills = mFinalClip.mDuration;            return mFinalVideoPath;        } catch (Exception ex) {             return null;        }    }

3.拼接三段视频代码

public boolean newClipMethod(String dstFile,String srcFile){    try {    double[] results = ClipMp4Util.startTrim(new File(dstFile),new File(srcFile),mTimeStart,mTimeEnd);    if(results == null){    return false;    }    Log.d("","newClipMethod-->results[0]-mTimeStart"+results[0]+" "+mTimeStart/1000);    Log.d("","newClipMethod-->mTimeEnd-results[1]"+mTimeEnd/1000+" "+results[1]);       //下面是短截取然后拼接的逻辑        if(results[0]-mTimeStart/1000>GAP){    String startMp4 =  getMp4ByFFmpeg(mTimeStart,results[0]*1000,begin);    }        if(mTimeEnd/1000-results[1]>GAP){    String endMp4 =  getMp4ByCode(results[1]*吧1000,mTimeEnd,end);    }        String[] videos = new String[3];    videos[0] = begin;    videos[1] = dst;    videos[2] = end;    appendVideo(videos);    } catch (Exception e) {//如果不是同一格式的视频,这里合成会报错,直接返回中间视频.所以长视频选取长误差的方式,前后都多截取一段Log.d("","new Method exception-->"+e);e.printStackTrace();}    return true;    }
相关工程后续会上传。
         1.点击下载工程1

1 0
原创粉丝点击