Android Studio NKD开发之 FFmpeg库的引入--简单播放器(主要验证是否正确引入ffmpeg库)

来源:互联网 发布:ubuntu 图形界面 编辑:程序博客网 时间:2024/06/10 17:34

缘由

  • 之前一直在linux系统下编译除了ffmpeg的动态库,但是一直没用上,刚好今天实验上,本篇文章参考了http://blog.csdn.net/nothingl3/article/details/52774550,加上自己的理解,对部分内容有具体的解释,比较适合像我这种刚接触的新手的

预备条件

  • 已经在linux系统下编译好的ffmpeg的库,这里如何编译将在下一篇文章中详细讲解。编译好的目录如下:这里写图片描述这里写图片描述这里写图片描述

  • 这里是我从虚拟机上copy到window上的目录结构

核心步骤

  1. 创建工程,java部分代码如下:
MainActivity.java:public class MainActivity        extends AppCompatActivity        implements SurfaceHolder.Callback{    private SurfaceHolder mSurfaceHolder;    @Override    protected void onCreate(Bundle savedInstanceState) {        super.onCreate(savedInstanceState);        setContentView(R.layout.activity_main);        // Example of a call to a native method        TextView    tv          = (TextView) findViewById(R.id.sample_text);        SurfaceView surfaceView = (SurfaceView) findViewById(R.id.surface_view);        mSurfaceHolder = surfaceView.getHolder();        mSurfaceHolder.addCallback(this);        tv.setText("测试");    }    /**     * A native method that is implemented by the 'native-lib' native library,     * which is packaged with this application.     */    public native String stringFromJNI();    @Override    public void surfaceCreated(SurfaceHolder holder) {        new Thread(new Runnable() {            @Override            public void run() {            //这里调用了本地方法传递一个surface进去                VideoPlayer.play(mSurfaceHolder.getSurface());            }        }).start();    }    @Override    public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {    }    @Override    public void surfaceDestroyed(SurfaceHolder holder) {    }}
VideoPlayer.javapublic class VideoPlayer {    private static final String TAG ="HYVideoPlayer" ;    static {        Log.d(TAG, "static initializer: 开始加载");        System.loadLibrary("videoplay");     //静态加载动态库       }//声明本地方法(这是ndk开发的基本步骤)    public static native int play(Object surface);}
xml布局文件:<?xml version="1.0" encoding="utf-8"?><LinearLayout    android:id="@+id/activity_main"    xmlns:android="http://schemas.android.com/apk/res/android"    xmlns:tools="http://schemas.android.com/tools"    android:layout_width="match_parent"    android:orientation="vertical"    android:layout_height="match_parent"    tools:context="com.example.lyc.sampleffpeg.MainActivity">    <TextView        android:id="@+id/sample_text"        android:layout_width="wrap_content"        android:layout_height="wrap_content"        android:text="Hello World!"/>    <SurfaceView        android:id="@+id/surface_view"        android:layout_width="match_parent"        android:layout_height="match_parent" /></LinearLayout>
这里注意,在配置文件中记得加入读取的权限,不然在cpp文件中openfile的时候会一直报错,就是因为没有读取外部存储文件的权限。  <uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />  这里卡主了我很久的
  1. build.gradle配置文件的代码:
android {    compileSdkVersion 23    buildToolsVersion "25.0.1"    defaultConfig {        applicationId "com.example.lyc.sampleffpeg"        minSdkVersion 21        targetSdkVersion 23        versionCode 1        versionName "1.0"        testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"        ndk {  # 这里控制NDK编译哪些类型的ABI so文件,用来适配不同平台    # 我的ffmpeg用的是arme的编译类型的,这个下一篇文章中会讲到            abiFilters 'armeabi'        }        externalNativeBuild {            cmake {    # 这里其实可以不写,cmake默认的配置就是这两个    # 翻墙:https://developer.android.com/ndk/guides/cmake.html#variables在这里可以看到    # -DANDROID_TOOLCHAIN=clang'具体指什么不太清楚,有清楚的大神可以指教一下的,后面的clang是ndk默认的配置    #-DANDROID_STL 是标准库,可以查询STL,有关C++的                    arguments '-DANDROID_TOOLCHAIN=clang','-DANDROID_STL=gnustl_static'            }        }    }    buildTypes {        release {            minifyEnabled false            proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'        }    }    externalNativeBuild {        cmake {        #编译的入口,指明编译的路径,            path "/src/main/cpp/CMakeLists.txt"        }    }}dependencies {    compile fileTree(dir: 'libs', include: ['*.jar'])    androidTestCompile('com.android.support.test.espresso:espresso-core:2.2.2', {        exclude group: 'com.android.support', module: 'support-annotations'    })    compile 'com.android.support:appcompat-v7:23.4.0'    testCompile 'junit:junit:4.12'}

2.CMakelist.txt文件:
去掉了一些没有用的代码

cmake_minimum_required(VERSION 3.4.1)# 这里就相当于一个定义的语法,要注意的地方就是${CMAKE_SOURCE_DIR}这个等价于获取到CMakeList.txt文件的根目录的,后面跟的../等于是他的根目录的上一级目录,在这里的目录里面才有jniLibs这个目录,而ANDROID_ABI这个目录就是你在build.gradle中ndk{指定的编译平台},直接开始一个新的工程生成的CM文件是不在cpp这个目录下的,我手动移动到了这个目录下,也可以不用,不过这里路径就要指明一下set(lib_src_DIR ${CMAKE_SOURCE_DIR}/../jniLibs/${ANDROID_ABI})#指定头文件的搜索路径,在cpp文件中有include “xx.h”头文件的,搜索的根目录就由这里来决定include_directories(        ${CMAKE_SOURCE_DIR}/include)#这里add_library和set_target_properties可以看做一堆,添加一个已经存在的第三方的动态库(.so库),具体含义TODO:add_library(avcodec-56_lib SHARED IMPORTED)set_target_properties(avcodec-56_lib PROPERTIES IMPORTED_LOCATION                             ${lib_src_DIR}/libavcodec-56.so)add_library(avformat-56_lib SHARED IMPORTED)set_target_properties(avformat-56_lib PROPERTIES IMPORTED_LOCATION                        ${lib_src_DIR}/libavformat-56.so)add_library(avutil-54_lib SHARED IMPORTED)set_target_properties(avutil-54_lib PROPERTIES IMPORTED_LOCATION                        ${lib_src_DIR}/libavutil-54.so)add_library(swresample-1_lib SHARED IMPORTED)set_target_properties(swresample-1_lib PROPERTIES IMPORTED_LOCATION                        ${lib_src_DIR}/libswresample-1.so)add_library(swscale-3_lib SHARED IMPORTED)set_target_properties(swscale-3_lib PROPERTIES IMPORTED_LOCATION                        ${lib_src_DIR}/libswscale-3.so)#这里就是我们自己的一个cpp文件,可以在这里面来引用内容,使用ffmpeg内的函数# build application's shared libadd_library(videoplay SHARED            videoplay.cpp)# Include libraries needed for videoplay lib# 这里要特别注意,这里的意思就是target_link_libraries(目标动态库,需要绑定的库1,2,3,4,5,6,7...)等于videoplay里面所有的cpp文件可以使用1,2,3,4,5,6,7...的内容target_link_libraries(videoplaylogandroidavcodec-56_libavformat-56_libavutil-54_libswresample-1_libswscale-3_lib)

3.videoplay.cpp:
这里基本参考是文章开头的那位博主的内容,不过,在这里有个忽略的地方让我get到一个新技能的,如果调试有bug的同学可以看一下的,之前因为没有在配置文件中去申请 读取存储空间的权限 所以一直卡在:

 if (avformat_open_input(&pFormatCtx, file_name, NULL, NULL) != 0) {        int err_code = avformat_open_input(&pFormatCtx, file_name, NULL, NULL);        char buf[1024];       av_strerror(err_code, buf, 1024);        LOGD("Couldn't open file %s: %d(%s)", file_name, err_code, buf);        LOGD("Couldn't open file:%s\n", file_name);        return -1; // Couldn't open file    }

就卡在这里无法打开文件,后来av_strerro通过这个函数可以将返回的错误值转换成文字信息的,发现是perssion 拒绝。如果哪个同志卡主了可以在这里看一下的
下面是videoplay.cpp的全部代码的

//// Created by LYC2 on 2017/1/13.//#include <jni.h>#include <android/log.h>#include <android/native_window.h>#include <android/native_window_jni.h>extern "C"{#include "include/libavcodec/avcodec.h"#include "include/libavformat/avformat.h"#include "include/libswscale/swscale.h"#include "include/libavutil/imgutils.h"#define  LOG_TAG    "videoplay"#define  LOGD(...)  __android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS__)JNIEXPORT jint JNICALLJava_com_example_lyc_sampleffpeg_VideoPlayer_play(JNIEnv *env, jclass type, jobject surface) {    // TODO    LOGD("HYplay");    // sd卡中的视频文件地址,可自行修改或者通过jni传入    char *file_name = "/storage/emulated/0/928001.mp4";    av_register_all();    AVFormatContext *pFormatCtx = avformat_alloc_context();    // Open video file    if (avformat_open_input(&pFormatCtx, file_name, NULL, NULL) != 0) {        int err_code = avformat_open_input(&pFormatCtx, file_name, NULL, NULL);        char buf[1024];       av_strerror(err_code, buf, 1024);        LOGD("Couldn't open file %s: %d(%s)", file_name, err_code, buf);        LOGD("Couldn't open file:%s\n", file_name);        return -1; // Couldn't open file    }    // Retrieve stream information    if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {        LOGD("Couldn't find stream information.");        return -1;    }    // Find the first video stream    int videoStream = -1, i;    for (i = 0; i < pFormatCtx->nb_streams; i++) {        if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO            && videoStream < 0) {            videoStream = i;        }    }    if (videoStream == -1) {        LOGD("Didn't find a video stream.");        return -1; // Didn't find a video stream    }    // Get a pointer to the codec context for the video stream    AVCodecContext *pCodecCtx = pFormatCtx->streams[videoStream]->codec;    // Find the decoder for the video stream    AVCodec *pCodec = avcodec_find_decoder(pCodecCtx->codec_id);    if (pCodec == NULL) {        LOGD("Codec not found.");        return -1; // Codec not found    }    if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {        LOGD("Could not open codec.");        return -1; // Could not open codec    }    // 获取native window    ANativeWindow *nativeWindow = ANativeWindow_fromSurface(env, surface);    // 获取视频宽高    int videoWidth = pCodecCtx->width;    int videoHeight = pCodecCtx->height;    // 设置native window的buffer大小,可自动拉伸    ANativeWindow_setBuffersGeometry(nativeWindow, videoWidth, videoHeight,                                     WINDOW_FORMAT_RGBA_8888);    ANativeWindow_Buffer windowBuffer;    if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {        LOGD("Could not open codec.");        return -1; // Could not open codec    }    // Allocate video frame    AVFrame *pFrame = av_frame_alloc();    // 用于渲染    AVFrame *pFrameRGBA = av_frame_alloc();    if (pFrameRGBA == NULL || pFrame == NULL) {        LOGD("Could not allocate video frame.");        return -1;    }    // Determine required buffer size and allocate buffer    // buffer中数据就是用于渲染的,且格式为RGBA    int numBytes = av_image_get_buffer_size(AV_PIX_FMT_RGBA, pCodecCtx->width, pCodecCtx->height,                                            1);    uint8_t *buffer = (uint8_t *) av_malloc(numBytes * sizeof(uint8_t));    av_image_fill_arrays(pFrameRGBA->data, pFrameRGBA->linesize, buffer, AV_PIX_FMT_RGBA,                         pCodecCtx->width, pCodecCtx->height, 1);    // 由于解码出来的帧格式不是RGBA的,在渲染之前需要进行格式转换    struct SwsContext *sws_ctx = sws_getContext(pCodecCtx->width,                                                pCodecCtx->height,                                                pCodecCtx->pix_fmt,                                                pCodecCtx->width,                                                pCodecCtx->height,                                                AV_PIX_FMT_RGBA,                                                SWS_BILINEAR,                                                NULL,                                                NULL,                                                NULL);    int frameFinished;    AVPacket packet;    while (av_read_frame(pFormatCtx, &packet) >= 0) {       // LOGD("while");        // Is this a packet from the video stream?        if (packet.stream_index == videoStream) {            // Decode video frame            avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);            // 并不是decode一次就可解码出一帧            if (frameFinished) {             //   LOGD("while  if (frameFinished)");                // lock native window buffer                ANativeWindow_lock(nativeWindow, &windowBuffer, 0);                // 格式转换                sws_scale(sws_ctx, (uint8_t const *const *) pFrame->data,                          pFrame->linesize, 0, pCodecCtx->height,                          pFrameRGBA->data, pFrameRGBA->linesize);                // 获取stride                uint8_t *dst = (uint8_t *) windowBuffer.bits;                int dstStride = windowBuffer.stride * 4;                uint8_t *src = (pFrameRGBA->data[0]);                int srcStride = pFrameRGBA->linesize[0];                // 由于window的stride和帧的stride不同,因此需要逐行复制                int h;                for (h = 0; h < videoHeight; h++) {                    memcpy(dst + h * dstStride, src + h * srcStride, srcStride);                    //LOGD("while  if (frameFinished)  for (h = 0; h < videoHeight; h++)");                }                ANativeWindow_unlockAndPost(nativeWindow);            }        }        av_packet_unref(&packet);    }    av_free(buffer);    av_free(pFrameRGBA);    // Free the YUV frame    av_free(pFrame);    // Close the codecs    avcodec_close(pCodecCtx);    // Close the video file    avformat_close_input(&pFormatCtx);    return 0;}};

未完的部分

  • 在添加so库 和set。。。。的那个TODO没有看完。

编译好的ffmpeg库的下载地址

  • 链接:http://pan.baidu.com/s/1kVrzs15 密码:onc9
0 0