google官方android ndk demo解析(一)——audio echo

来源:互联网 发布:电磁场有限元分析软件 编辑:程序博客网 时间:2024/06/05 06:23

最近重新学习ndk,主要关注使用android studio进行ndk开发。google官方的demo无疑是最好的学习源码。

首先分析audio echo这个项目。

一、CMakeList.txt

cmake_minimum_required(VERSION 3.4.1)set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11 -Wall -Werror")add_library(echo SHARED            audio_common.cpp            audio_main.cpp            audio_player.cpp            audio_recorder.cpp            debug_utils.cpp)# include libraries needed for hello-jni libtarget_link_libraries(echo                      android                      atomic                      log                      OpenSLES)# Android Studio 2.2.0 with CMake support does not pack stl shared libraries,# so app needs to pack the right shared lib into APK. This sample uses solution# from https://github.com/jomof/ndk-stl to find the right stl shared lib to use# and copy it to the right place for Android Studio to pack# Usage: download ndk-stl-config.cmake into app's directory hosting CMakeLists.txt#        and just use it with the following lineinclude(ndk-stl-config.cmake)

cmake_minimum_required(VERSION 3.4.1)声明要求最低的cmake版本是3.4.1。接下来,
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11 -Wall -Werror")
设置了cmake的编译参数,要求打开所有警告,并且将所有警告当做错误处理。同时声明,使用标准c++11进行编译。
add_library(echo SHARED            audio_common.cpp            audio_main.cpp            audio_player.cpp            audio_recorder.cpp            debug_utils.cpp)
声明,由这个源代码文件编译生成动态库echo。
# include libraries needed for hello-jni libtarget_link_libraries(echo                      android                      atomic                      log                      OpenSLES)
这里声明要链接的库,这里包括上面编码生成的echo,以及用到的音频库openSLES,android系统,log库,等等。
# Android Studio 2.2.0 with CMake support does not pack stl shared libraries,# so app needs to pack the right shared lib into APK. This sample uses solution# from https://github.com/jomof/ndk-stl to find the right stl shared lib to use# and copy it to the right place for Android Studio to pack# Usage: download ndk-stl-config.cmake into app's directory hosting CMakeLists.txt#        and just use it with the following lineinclude(ndk-stl-config.cmake)
如注释所说,这里要告诉app引入正确的stl库,因为Android Studio 2.2.0的CMake本身没有包含STL。

二、MainActivity.java

/* * Copyright 2015 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * *      http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */package com.google.sample.echo;import android.Manifest;import android.app.Activity;import android.content.Context;import android.content.pm.PackageManager;import android.media.AudioFormat;import android.media.AudioManager;import android.media.AudioRecord;import android.os.Bundle;import android.support.annotation.NonNull;import android.support.v4.app.ActivityCompat;import android.view.Menu;import android.view.MenuItem;import android.view.View;import android.widget.Button;import android.widget.TextView;import android.widget.Toast;public class MainActivity extends Activity        implements ActivityCompat.OnRequestPermissionsResultCallback {    private static final int AUDIO_ECHO_REQUEST = 0;    Button   controlButton;    TextView statusView;    String  nativeSampleRate;    String  nativeSampleBufSize;    boolean supportRecording;    Boolean isPlaying = false;    @Override    protected void onCreate(Bundle savedInstanceState) {        super.onCreate(savedInstanceState);        setContentView(R.layout.activity_main);        controlButton = (Button)findViewById((R.id.capture_control_button));        statusView = (TextView)findViewById(R.id.statusView);        queryNativeAudioParameters();        // initialize native audio system        updateNativeAudioUI();        if (supportRecording) {            createSLEngine(Integer.parseInt(nativeSampleRate), Integer.parseInt(nativeSampleBufSize));        }    }    @Override    protected void onDestroy() {        if (supportRecording) {            if (isPlaying) {                stopPlay();            }            deleteSLEngine();            isPlaying = false;        }        super.onDestroy();    }    @Override    public boolean onCreateOptionsMenu(Menu menu) {        // Inflate the menu; this adds items to the action bar if it is present.        getMenuInflater().inflate(R.menu.menu_main, menu);        return true;    }    @Override    public boolean onOptionsItemSelected(MenuItem item) {        // Handle action bar item clicks here. The action bar will        // automatically handle clicks on the Home/Up button, so long        // as you specify a parent activity in AndroidManifest.xml.        int id = item.getItemId();        //noinspection SimplifiableIfStatement        if (id == R.id.action_settings) {            return true;        }        return super.onOptionsItemSelected(item);    }    private void startEcho() {        if(!supportRecording){            return;        }        if (!isPlaying) {            if(!createSLBufferQueueAudioPlayer()) {                statusView.setText(getString(R.string.error_player));                return;            }            if(!createAudioRecorder()) {                deleteSLBufferQueueAudioPlayer();                statusView.setText(getString(R.string.error_recorder));                return;            }            startPlay();   // this must include startRecording()            statusView.setText(getString(R.string.status_echoing));        } else {            stopPlay();  //this must include stopRecording()            updateNativeAudioUI();            deleteAudioRecorder();            deleteSLBufferQueueAudioPlayer();        }        isPlaying = !isPlaying;        controlButton.setText(getString((isPlaying == true) ?                R.string.StopEcho: R.string.StartEcho));    }    public void onEchoClick(View view) {        if (ActivityCompat.checkSelfPermission(this, Manifest.permission.RECORD_AUDIO) !=                                               PackageManager.PERMISSION_GRANTED) {            statusView.setText(getString(R.string.status_record_perm));            ActivityCompat.requestPermissions(                    this,                    new String[] { Manifest.permission.RECORD_AUDIO },                    AUDIO_ECHO_REQUEST);            return;        }        startEcho();    }    public void getLowLatencyParameters(View view) {        updateNativeAudioUI();        return;    }    private void queryNativeAudioParameters() {        AudioManager myAudioMgr = (AudioManager) getSystemService(Context.AUDIO_SERVICE);        nativeSampleRate  =  myAudioMgr.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE);        nativeSampleBufSize =myAudioMgr.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER);        int recBufSize = AudioRecord.getMinBufferSize(                Integer.parseInt(nativeSampleRate),                AudioFormat.CHANNEL_IN_MONO,                AudioFormat.ENCODING_PCM_16BIT);        supportRecording = true;        if (recBufSize == AudioRecord.ERROR ||            recBufSize == AudioRecord.ERROR_BAD_VALUE) {            supportRecording = false;        }    }    private void updateNativeAudioUI() {        if (!supportRecording) {            statusView.setText(getString(R.string.error_no_mic));            controlButton.setEnabled(false);            return;        }        statusView.setText("nativeSampleRate    = " + nativeSampleRate + "\n" +                "nativeSampleBufSize = " + nativeSampleBufSize + "\n");    }    @Override    public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions,                                           @NonNull int[] grantResults) {        /*         * if any permission failed, the sample could not play         */        if (AUDIO_ECHO_REQUEST != requestCode) {            super.onRequestPermissionsResult(requestCode, permissions, grantResults);            return;        }        if (grantResults.length != 1  ||            grantResults[0] != PackageManager.PERMISSION_GRANTED) {            /*             * When user denied permission, throw a Toast to prompt that RECORD_AUDIO             * is necessary; also display the status on UI             * Then application goes back to the original state: it behaves as if the button             * was not clicked. The assumption is that user will re-click the "start" button             * (to retry), or shutdown the app in normal way.             */            statusView.setText(getString(R.string.error_no_permission));            Toast.makeText(getApplicationContext(),                    getString(R.string.prompt_permission),                    Toast.LENGTH_SHORT).show();            return;        }        /*         * When permissions are granted, we prompt the user the status. User would         * re-try the "start" button to perform the normal operation. This saves us the extra         * logic in code for async processing of the button listener.         */        statusView.setText("RECORD_AUDIO permission granted, touch " +                           getString(R.string.StartEcho) + " to begin");        // The callback runs on app's thread, so we are safe to resume the action        startEcho();    }    /*     * Loading our Libs     */    static {        System.loadLibrary("echo");    }    /*     * jni function implementations...     */    public static native void createSLEngine(int rate, int framesPerBuf);    public static native void deleteSLEngine();    public static native boolean createSLBufferQueueAudioPlayer();    public static native void deleteSLBufferQueueAudioPlayer();    public static native boolean createAudioRecorder();    public static native void deleteAudioRecorder();    public static native void startPlay();    public static native void stopPlay();}

首先,看onCreate方法中,
queryNativeAudioParameters()方法,通过AudioManager,获取输出的采样率,以及输出缓冲区的大小。通过AudioRecord,获取要求的最小的输出缓冲区的大小;如果发生错误,将标记系统不支持录音。
updateNativeAudioUI()方法来更新UI,分为支持和不支持录音两种情况。在button控件上注册了点击事件,当点击button时,触发调用onEchoClick(方法),方法首先检查是否有录音权限,
如果没有尝试申请该权限;如果已经具有了录音权限,则会调用startEcho()方法。startEcho首先检查是否支持录音,如果不支持,
直接返回。如果系统支持录音,则判断是否有播放正在进行中。加入没有播放正在进行,则调用native方法
createSLBufferQueueAudioPlayer(),该函数的具体功能在下文分析。
如果该方法调用成功,则会继续调用native函数
createAudioRecorder(),创建recorder,如果创建失败,则调用native函数
deleteSLBufferQueueAudioPlayer()
如果一切顺利,那么调用native函数startPlay(),播放音频。如果没有音频正在播放,则一次调用native函数stopPlay(),
updateNativeAudioUI(),deleteAudioRecorder(),deleteSLBufferQueueAudioPlayer()。之后,将isPlaying的值置反。

三、native方法分析

createSLEngine(int rate, int framesPerBuf)

createSLEngine(int rate, int framesPerBuf)在audio_main.cpp中定义,其源码如下:

JNIEXPORT void JNICALLJava_com_google_sample_echo_MainActivity_createSLEngine(        JNIEnv *env, jclass type, jint sampleRate, jint framesPerBuf) {    SLresult result;    memset(&engine, 0, sizeof(engine));    engine.fastPathSampleRate_   = static_cast<SLmilliHertz>(sampleRate) * 1000;    engine.fastPathFramesPerBuf_ = static_cast<uint32_t>(framesPerBuf);    engine.sampleChannels_   = AUDIO_SAMPLE_CHANNELS;    engine.bitsPerSample_    = SL_PCMSAMPLEFORMAT_FIXED_16;    result = slCreateEngine(&engine.slEngineObj_, 0, NULL, 0, NULL, NULL);    SLASSERT(result);    result = (*engine.slEngineObj_)->Realize(engine.slEngineObj_, SL_BOOLEAN_FALSE);    SLASSERT(result);    result = (*engine.slEngineObj_)->GetInterface(engine.slEngineObj_, SL_IID_ENGINE, &engine.slEngineItf_);    SLASSERT(result);    // compute the RECOMMENDED fast audio buffer size:    //   the lower latency required    //     *) the smaller the buffer should be (adjust it here) AND    //     *) the less buffering should be before starting player AFTER    //        receiving the recordered buffer    //   Adjust the bufSize here to fit your bill [before it busts]    uint32_t bufSize = engine.fastPathFramesPerBuf_ * engine.sampleChannels_                       * engine.bitsPerSample_;    bufSize = (bufSize + 7) >> 3;            // bits --> byte    engine.bufCount_ = BUF_COUNT;    engine.bufs_ = allocateSampleBufs(engine.bufCount_, bufSize);    assert(engine.bufs_);    engine.freeBufQueue_ = new AudioQueue (engine.bufCount_);    engine.recBufQueue_  = new AudioQueue (engine.bufCount_);    assert(engine.freeBufQueue_ && engine.recBufQueue_);    for(uint32_t i=0; i<engine.bufCount_; i++) {        engine.freeBufQueue_->push(&engine.bufs_[i]);    }}

首先创建一个SLresult对象result,然后将变量engine(类型为static EchoAudioEngine)的内存清空。然后设置engine中的属性:fastPathSampleRate_,fastPathFramesPerBuf_,sampleChannels_,bitsPerSample_。之后调用函数slCreateEngine()创建engine,宏SLASSERT(result)assert result的值。之后依次调用engine中成员slEngineObj_的方法Realize()以及GetInterface()。之后,计算buffer size,首先计算出bit,然后右移三位,转换成byte。BUF_COUNT的值为16,默认buffer的数量。分配BUF_COUNT个buffer size的buffer,使engine.bufs_指向这块区域。然后,分别创建freeBufQueue_和recBufQueue_。之后,将engine.buf_中的各个buffer压入engine.freeBufQueue_。

deleteSLEngine()

deleteSLEngine()方法定义在audo_main.cpp中:

JNIEXPORT void JNICALLJava_com_google_sample_echo_MainActivity_deleteSLEngine(JNIEnv *env, jclass type) {    delete engine.recBufQueue_;    delete engine.freeBufQueue_;    releaseSampleBufs(engine.bufs_, engine.bufCount_);    if (engine.slEngineObj_ != NULL) {        (*engine.slEngineObj_)->Destroy(engine.slEngineObj_);        engine.slEngineObj_ = NULL;        engine.slEngineItf_ = NULL;    }}
首先,释放engine中的refBufQuere_和freeBufQueue。然后调用releaseSampleBufs()方法。releaseSampleBufs()是内联函数,其定义如下:

__inline__ void releaseSampleBufs(sample_buf* bufs, uint32_t& count) {    if(!bufs || !count) {        return;    }    for(uint32_t i=0; i<count; i++) {        if(bufs[i].buf_) delete [] bufs[i].buf_;    }    delete [] bufs;}

该方法逐一释放掉bufs列表中的每一个smaple_buf对象的buf_成员。然后,销毁engin中的engine.slEngineObj_成员,并将engine.slEngineObj_和engine.slEngineItf_置为NULL。

createSLBufferQueueAudioPlayer()

方法createSLBufferQueueAudioPlayer()定义在audio_main.cpp中:

JNIEXPORT jboolean JNICALLJava_com_google_sample_echo_MainActivity_createSLBufferQueueAudioPlayer(JNIEnv *env, jclass type) {    SampleFormat sampleFormat;    memset(&sampleFormat, 0, sizeof(sampleFormat));    sampleFormat.pcmFormat_ = (uint16_t)engine.bitsPerSample_;    sampleFormat.framesPerBuf_ = engine.fastPathFramesPerBuf_;    // SampleFormat.representation_ = SL_ANDROID_PCM_REPRESENTATION_SIGNED_INT;    sampleFormat.channels_ = (uint16_t)engine.sampleChannels_;    sampleFormat.sampleRate_ = engine.fastPathSampleRate_;    engine.player_ = new AudioPlayer(&sampleFormat, engine.slEngineItf_);    assert(engine.player_);    if(engine.player_ == nullptr)        return JNI_FALSE;    engine.player_->SetBufQueue(engine.recBufQueue_, engine.freeBufQueue_);    engine.player_->RegisterCallback(EngineService, (void*)&engine);    return JNI_TRUE;}

首先创建SampleFormat对象sampleFormat,之后调用memset清空sampleFormat内存。SampleFormat的成员pcmFormat定义了pcm采样率,这里用engine的bitsPerSample_来初始化它;用engine的fastpathFramesPerBuf_来初始化sampleFormat的framesPerBuf_。接下来分别用engine的sampleChannels和fastPathSampleRate_初始化sampleFormat的channels和smpleRate。之后,以smpleFormat和engine的slEngineItf为参数,创建AudiPlayer,并使engine的player_

指向该对象。如果创建失败,那么函数返回JNI_FALSE。如果成功,那么接下来依次调用engine的player_成员的SetBufQueue和RegisterCallback方法,设置缓冲区队列和回调。最后,方法发挥JNI_TRUE。

deleteSLBufferQueueAudioPlayer()

deleteSLBufferQueueAudioPlayer()方法定义在audio_main.cpp中:

JNIEXPORT void JNICALLJava_com_google_sample_echo_MainActivity_deleteSLBufferQueueAudioPlayer(JNIEnv *env, jclass type) {    if(engine.player_) {        delete engine.player_;        engine.player_= nullptr;    }}

如果engine的player_成员不为空,那么就释放player_所占的内存控件,并且使player_指向nullptr。

createAudioRecorder()

createAudioRecorder()定义在audio_main.cpp中:

JNIEXPORT jboolean JNICALLJava_com_google_sample_echo_MainActivity_createAudioRecorder(JNIEnv *env, jclass type) {    SampleFormat sampleFormat;    memset(&sampleFormat, 0, sizeof(sampleFormat));    sampleFormat.pcmFormat_ = static_cast<uint16_t>(engine.bitsPerSample_);    // SampleFormat.representation_ = SL_ANDROID_PCM_REPRESENTATION_SIGNED_INT;    sampleFormat.channels_ = engine.sampleChannels_;    sampleFormat.sampleRate_ = engine.fastPathSampleRate_;    sampleFormat.framesPerBuf_ = engine.fastPathFramesPerBuf_;    engine.recorder_ = new AudioRecorder(&sampleFormat, engine.slEngineItf_);    if(!engine.recorder_) {        return JNI_FALSE;    }    engine.recorder_->SetBufQueues(engine.freeBufQueue_, engine.recBufQueue_);    engine.recorder_->RegisterCallback(EngineService, (void*)&engine);    return JNI_TRUE;}
首先创建SampleFormat对象sampleFormat,然后清空sampleFormat所占内存空间。然后用engine的bitsPerSample_成员初始化sampleFormat的pcmFormat_成员,分别把engine的sampleChannels,fastPathSampleRate_,fastPathFramesPerBuf_赋值给sampleFormat的channels_,sampleRate_和framesPerBuf_。然后,创建AudioRecorder的对象,并且赋值给engine的recorder_成员。如果创建失败,返回JNI_FALSE;如果创建发成功,继续调用recorder_的setBufQueues方法,设置audioRecorder_的freeBufQueue_和refBufQueue_。最后,调用recorder_的RegisterCallback方法,设置callback(callback是一个函数指针)。至此,函数返回JNI_TRUE。

deleteAudioRecorder()

deleteAudioRecorder()在audio_main.cpp中定义:

JNIEXPORT void JNICALLJava_com_google_sample_echo_MainActivity_deleteAudioRecorder(JNIEnv *env, jclass type) {    if(engine.recorder_)        delete engine.recorder_;    engine.recorder_ = nullptr;}

如果engine的recorder_成员不为空,就释放recorder_所占空间,然后使recorder_指向nullprt。

startPlay()

startPlay()定义在audio_main.cpp中:

JNIEXPORT void JNICALLJava_com_google_sample_echo_MainActivity_startPlay(JNIEnv *env, jclass type) {    engine.frameCount_  = 0;    /*     * start player: make it into waitForData state     */    if(SL_BOOLEAN_FALSE == engine.player_->Start()){        LOGE("====%s failed", __FUNCTION__);        return;    }    engine.recorder_->Start();}

首先,将engine的frameCount_成员初始化为0;然后,调用engine中play_成员的Start()方法,如果调用返回SL_BOOLEAN_FALSE,记录日志,并返回;如果成功,接下来调用engine中recorder_成员的Start()方法。

stopPlay()

stopPlay()定义在audio_main.cpp中:

JNIEXPORT void JNICALLJava_com_google_sample_echo_MainActivity_stopPlay(JNIEnv *env, jclass type) {    engine.recorder_->Stop();    engine.player_ ->Stop();    delete engine.recorder_;    delete engine.player_;    engine.recorder_ = NULL;    engine.player_ = NULL;}
依次调用enging中recorder_和player_成员的Stop()方法,然后删除recorder_和player_,并且使他们指向NULL。