FFMPEG 之视频解码

来源:互联网 发布:pink ward知乎 编辑:程序博客网 时间:2024/05/22 03:38

我这里的so包是来自我的FFMPEG编译生成的,如果没有可以按照上一篇编译生成一下,在我的资源里边也有

首先先看项目目录

一、过程

1、首先创建native方法

   VideoUtils.java

public class VideoUtils {    public native static void decode(String input,String output);    static{        System.loadLibrary("avutil-54");        System.loadLibrary("swresample-1");        System.loadLibrary("avcodec-56");        System.loadLibrary("avformat-56");        System.loadLibrary("swscale-3");        System.loadLibrary("postproc-53");        System.loadLibrary("avfilter-5");        System.loadLibrary("avdevice-56");        System.loadLibrary("native-lib");    }}
2、使用javah命令生成头文件

3、在defaultConfig的和cmake平级的地方加入,因为我们只编译了armeabi平台

    ndk{
                abiFilters "armeabi"
            }

4、在CMakeLists中添加编译需要的so文件

cmake_minimum_required(VERSION 3.4.1)add_library(native-lib             SHARED             src/main/cpp/native-lib.c )find_library(log-lib              log )add_library(libavcodec-56            SHARED            IMPORTED)set_target_properties(libavcodec-56                      PROPERTIES IMPORTED_LOCATION                      ${CMAKE_CURRENT_SOURCE_DIR}/src/main/jniLibs/armeabi/libavcodec-56.so)add_library(libavdevice-56            SHARED            IMPORTED)set_target_properties(libavdevice-56                      PROPERTIES IMPORTED_LOCATION                      ${CMAKE_CURRENT_SOURCE_DIR}/src/main/jniLibs/armeabi/libavdevice-56.so)add_library(libavfilter-5            SHARED            IMPORTED)set_target_properties(libavfilter-5                      PROPERTIES IMPORTED_LOCATION                      ${CMAKE_CURRENT_SOURCE_DIR}/src/main/jniLibs/armeabi/libavfilter-5.so)add_library(libavformat-56            SHARED            IMPORTED)set_target_properties(libavformat-56                      PROPERTIES IMPORTED_LOCATION                      ${CMAKE_CURRENT_SOURCE_DIR}/src/main/jniLibs/armeabi/libavformat-56.so)add_library(libavutil-54            SHARED            IMPORTED)set_target_properties(libavutil-54                      PROPERTIES IMPORTED_LOCATION                      ${CMAKE_CURRENT_SOURCE_DIR}/src/main/jniLibs/armeabi/libavutil-54.so)add_library(libpostproc-53            SHARED            IMPORTED)set_target_properties(libpostproc-53                      PROPERTIES IMPORTED_LOCATION                      ${CMAKE_CURRENT_SOURCE_DIR}/src/main/jniLibs/armeabi/libpostproc-53.so)add_library(libswresample-1            SHARED            IMPORTED)set_target_properties(libswresample-1                      PROPERTIES IMPORTED_LOCATION                      ${CMAKE_CURRENT_SOURCE_DIR}/src/main/jniLibs/armeabi/libswresample-1.so)add_library(libswscale-3           SHARED           IMPORTED)set_target_properties(libswscale-3                      PROPERTIES IMPORTED_LOCATION                      ${CMAKE_CURRENT_SOURCE_DIR}/src/main/jniLibs/armeabi/libswscale-3.so)target_link_libraries(  native-lib                        libavcodec-56                        libavdevice-56                        libavfilter-5                        libavformat-56                        libavutil-54                        libpostproc-53                        libswresample-1                        libswscale-3                       ${log-lib} )
如此以上,准备工作就做的差不多了,就只剩下编写ndk文件了

4、引入头文件和编译生成的头文件,并编写c文件

native-lib.c

#include "com_xiaofan_testffmpeg_VideoUtils.h"#include <android/log.h>#include <stdio.h>#include <stdlib.h>#define LOGI(FORMAT, ...) __android_log_print(ANDROID_LOG_INFO,"wanxiaofan",FORMAT,##__VA_ARGS__);#define LOGE(FORMAT, ...) __android_log_print(ANDROID_LOG_ERROR,"wanxiaofan",FORMAT,##__VA_ARGS__);//封装格式#include "include/libavformat/avformat.h"//解码#include "include/libavcodec/avcodec.h"//缩放#include "include/libswscale/swscale.h"#include "include/libavutil/avutil.h"#include "include/libavutil/frame.h"JNIEXPORT void JNICALL Java_com_xiaofan_testffmpeg2_VideoUtils_decode(        JNIEnv *env, jclass cls, jstring input_jstr, jstring output_jstr) {    const char *input_cstr = (*env)->GetStringUTFChars(env, input_jstr,                                                       JNI_FALSE);    const char *output_cstr = (*env)->GetStringUTFChars(env, output_jstr,                                                        JNI_FALSE);    //1、注册组件    av_register_all();    //2、打开输入视频文件    AVFormatContext *pFormatCtx = avformat_alloc_context();    if (avformat_open_input(&pFormatCtx, input_cstr, NULL, NULL) != 0) {        LOGE("%s", "打开输入视频文件失败!");        return;    };    //3、获取视频信息    if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {        LOGE("%s", "获取视频信息失败!");        return;    }    //视频解码,需要找到视频对应的AVStream所在pFormatCtx->streams的索引位置    int video_stream_idx = -1;    int i = 0;    for (; i < pFormatCtx->nb_streams; i++) {        if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {            video_stream_idx = i;            break;        }    }    //4.获取视频解码器    AVCodecContext *pCodeCtx =            pFormatCtx->streams[video_stream_idx]->codec;    AVCodec *pCodec = avcodec_find_decoder(pCodeCtx->codec_id);    if (pCodec == NULL) {        LOGE("%s", "无法解码!");        return;    }    //5、打开解码器    if (avcodec_open2(pCodeCtx, pCodec, NULL) < 0) {        LOGE("%s", "无法打开解码器!");        return;    };    //压缩数据    AVPacket *packet = (AVPacket *) av_malloc(sizeof(AVPacket));    //像素数据(解码数据)    AVFrame *pFrame = av_frame_alloc();    AVFrame *yuvFrame = av_frame_alloc();    //只有指定了AVFrame的像素格式、画面大小才能真正分配内存    //缓冲区分配内存    uint8_t *out_buffer = (uint8_t *) av_malloc            (avpicture_get_size(AV_PIX_FMT_YUV420P, pCodeCtx->width, pCodeCtx->height));        //初始化缓冲区    avpicture_fill((AVPicture *) yuvFrame, out_buffer, AV_PIX_FMT_YUV420P,                   pCodeCtx->width, pCodeCtx->height);        //输出文件    FILE *fp_yuv = fopen(output_cstr, "wb");    struct SwsContext *sws_ctx = sws_getContext(            pCodeCtx->width, pCodeCtx->height, pCodeCtx->pix_fmt,            pCodeCtx->width, pCodeCtx->height, AV_PIX_FMT_YUV420P,            SWS_BILINEAR, NULL, NULL, NULL);        //6、一帧一帧读取压缩的视频数据AVPacket    int len, got_frame, framecount = 0;    while (av_read_frame(pFormatCtx, packet) >= 0) {        //AVPacket        len = avcodec_decode_video2(pCodeCtx, pFrame, &got_frame, packet);        //非零,正在解码        if (got_frame) {            //          转为指定的YUV420P像素帧            sws_scale(sws_ctx, pFrame->data, pFrame->linesize, 0,                      pFrame->height, yuvFrame->data, yuvFrame->linesize);            //向YUV文件保存解码之后的帧数据            int y_size = pCodeCtx->width * pCodeCtx->height;            fwrite(yuvFrame->data[0], 1, y_size, fp_yuv);            fwrite(yuvFrame->data[1], 1, y_size / 4, fp_yuv);            fwrite(yuvFrame->data[2], 1, y_size / 4, fp_yuv);            LOGI("解码%d帧", framecount++);        }        av_free_packet(packet);    }    fclose(fp_yuv);    av_frame_free(&pFrame);    avcodec_close(pCodeCtx);    avformat_free_context(pFormatCtx);    (*env)->ReleaseStringUTFChars(env, input_jstr, input_cstr);    (*env)->ReleaseStringUTFChars(env, output_jstr, output_cstr);}
5、调用转码方法

String input = new File(Environment.getExternalStorageDirectory(), "aaaa.mp4").getAbsolutePath();String output = new File(Environment.getExternalStorageDirectory(), "output_1280x720_yuv420p.yuv").getAbsolutePath();VideoUtils.decode(input, output);

6、最后别忘了加入读写sd卡的权限

<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE"/><uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>

7、注意:

  

该文件来自编译FFMPEG生成的


原创粉丝点击