audio录音在JAVA层的调用
来源:互联网 发布:一线城市 知乎 编辑:程序博客网 时间:2024/06/05 02:05
用android 7.0版本的录音应用为例进行跟进下。
STEP 1:
\packages\apps\SoundRecorder\src\com\android\soundrecorder\SoundRecorder.java
public void onCreate(Bundle icycle) {
super.onCreate(icycle);
Intent i = getIntent();
if (i != null) {
String s = i.getType();
if (AUDIO_AMR.equals(s) || AUDIO_3GPP.equals(s) || AUDIO_ANY.equals(s)
|| ANY_ANY.equals(s)) {
mRequestedType = s;
} else if (s != null) {
// we only support amr and 3gppformats right now
setResult(RESULT_CANCELED);
finish();
return;
}
final String EXTRA_MAX_BYTES
=android.provider.MediaStore.Audio.Media.EXTRA_MAX_BYTES;
mMaxFileSize = i.getLongExtra(EXTRA_MAX_BYTES, -1);
}
if (AUDIO_ANY.equals(mRequestedType) || ANY_ANY.equals(mRequestedType)){
mRequestedType = AUDIO_3GPP;
}
setContentView(R.layout.main);
mRecorder= new Recorder();
mRecorder.setOnStateChangedListener(this);
mRemainingTimeCalculator = new RemainingTimeCalculator();
PowerManager pm
= (PowerManager) getSystemService(Context.POWER_SERVICE);
mWakeLock = pm.newWakeLock(PowerManager.SCREEN_DIM_WAKE_LOCK,
"SoundRecorder");
initResourceRefs();
setResult(RESULT_CANCELED);
registerExternalStorageListener();
if (icycle != null) {
Bundle recorderState = icycle.getBundle(RECORDER_STATE_KEY);
if (recorderState != null) {
mRecorder.restoreState(recorderState);
mSampleInterrupted =recorderState.getBoolean(SAMPLE_INTERRUPTED_KEY, false);
mMaxFileSize =recorderState.getLong(MAX_FILE_SIZE_KEY, -1);
}
}
updateUi();
}
/*
* Handle the buttons.
*/
public void onClick(View button) {
if (!button.isEnabled())
return;
switch (button.getId()) {
case R.id.recordButton:
mRemainingTimeCalculator.reset();
if(!Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED)) {
mSampleInterrupted = true;
mErrorUiMessage =getResources().getString(R.string.insert_sd_card);
updateUi();
} else if(!mRemainingTimeCalculator.diskSpaceAvailable()) {
mSampleInterrupted = true;
mErrorUiMessage =getResources().getString(R.string.storage_is_full);
updateUi();
} else {
stopAudioPlayback();
if(AUDIO_AMR.equals(mRequestedType)) {
mRemainingTimeCalculator.setBitRate(BITRATE_AMR);
mRecorder.startRecording(MediaRecorder.OutputFormat.AMR_NB,".amr", this);
} else if(AUDIO_3GPP.equals(mRequestedType)) {
mRemainingTimeCalculator.setBitRate(BITRATE_3GPP);
mRecorder.startRecording(MediaRecorder.OutputFormat.THREE_GPP,".3gpp",
this);
} else {
throw newIllegalArgumentException("Invalid output file type requested");
}
if (mMaxFileSize != -1) {
mRemainingTimeCalculator.setFileSizeLimit(
mRecorder.sampleFile(), mMaxFileSize);
}
}
break;
case R.id.playButton:
mRecorder.startPlayback();
break;
case R.id.stopButton:
mRecorder.stop();
break;
case R.id.acceptButton:
mRecorder.stop();
saveSample();
finish();
break;
case R.id.discardButton:
mRecorder.delete();
finish();
break;
}
}
STEP 2:
\packages\apps\SoundRecorder\src\com\android\soundrecorder\Recorder.java
public void startRecording(intoutputfileformat, String extension, Context context) {
stop();
if (mSampleFile == null) {
File sampleDir = Environment.getExternalStorageDirectory();
if (!sampleDir.canWrite()) //Workaround for broken sdcard support on the device.
sampleDir = newFile("/sdcard/sdcard");
try {
mSampleFile = File.createTempFile(SAMPLE_PREFIX,extension, sampleDir);
} catch (IOException e) {
setError(SDCARD_ACCESS_ERROR);
return;
}
}
/*
这里设置了些关键的参数,后面会用到。
*/
mRecorder = new MediaRecorder();
mRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mRecorder.setOutputFormat(outputfileformat);
mRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
mRecorder.setOutputFile(mSampleFile.getAbsolutePath());
// Handle IOException
try {
mRecorder.prepare();
} catch(IOException exception) {
setError(INTERNAL_ERROR);
mRecorder.reset();
mRecorder.release();
mRecorder = null;
return;
}
// Handle RuntimeException if therecording couldn't start
try {
/*
上面有定义mRecorder = new MediaRecorder();
*/
mRecorder.start();
} catch (RuntimeException exception) {
AudioManager audioMngr =(AudioManager)context.getSystemService(Context.AUDIO_SERVICE);
boolean isInCall =((audioMngr.getMode() == AudioManager.MODE_IN_CALL) ||
(audioMngr.getMode() ==AudioManager.MODE_IN_COMMUNICATION));
if (isInCall) {
setError(IN_CALL_RECORD_ERROR);
} else {
setError(INTERNAL_ERROR);
}
mRecorder.reset();
mRecorder.release();
mRecorder = null;
return;
}
mSampleStart =System.currentTimeMillis();
setState(RECORDING_STATE);
}
STEP 3:
\frameworks\base\media\java\android\media\MediaRecorder.java
/*
在本地类中具体实现。
*/
public native void start() throwsIllegalStateException;
STEP 4:
\frameworks\base\media\jni\android_media_MediaRecorder.cpp
static void
android_media_MediaRecorder_start(JNIEnv*env, jobject thiz)
{
ALOGV("start");
/*
这个是本地类的MediaRecorder,与JAVA层的名字相同,但是是不一样的。容易被绕进去。
*/
sp<MediaRecorder> mr = getMediaRecorder(env, thiz);
process_media_recorder_call(env, mr->start(),"java/lang/RuntimeException", "start failed.");
}
STEP 5:
frameworks\av\media\libmedia\mediarecorder.cpp
status_tMediaRecorder::start()
{
ALOGV("start");
if (mMediaRecorder == NULL) {
ALOGE("media recorder is notinitialized yet");
return INVALID_OPERATION;
}
if (!(mCurrentState &MEDIA_RECORDER_PREPARED)) {
ALOGE("start called in an invalidstate: %d", mCurrentState);
return INVALID_OPERATION;
}
/*
这里的mMediaRecorder在构造函数中定义。
MediaRecorder::MediaRecorder(const String16&opPackageName) : mSurfaceMediaSource(NULL)
{
ALOGV("constructor");
const sp<IMediaPlayerService> service(getMediaPlayerService());
if (service !=NULL) {
mMediaRecorder =service->createMediaRecorder(opPackageName);
}
if(mMediaRecorder != NULL) {
mCurrentState = MEDIA_RECORDER_IDLE;
}
doCleanUp();
}
*/
status_t ret = mMediaRecorder->start();
if (OK != ret) {
ALOGE("start failed: %d",ret);
mCurrentState = MEDIA_RECORDER_ERROR;
return ret;
}
mCurrentState = MEDIA_RECORDER_RECORDING;
return ret;
}
STEP 6:
/*
通过BINDER机制,实际调研的就是MediaRecorderClient.cpp中的START函数。
*/
\frameworks\av\media\libmediaplayerservice\MediaRecorderClient.cpp
status_tMediaRecorderClient::start()
{
ALOGV("start");
Mutex::Autolock lock(mLock);
if (mRecorder == NULL) {
ALOGE("recorder is notinitialized");
return NO_INIT;
}
/*
前面有mRecorder 的定义:
MediaRecorderClient::MediaRecorderClient(constsp<MediaPlayerService>& service, pid_t pid,
constString16& opPackageName)
{
ALOGV("Client constructor");
mPid = pid;
mRecorder = newStagefrightRecorder(opPackageName);
mMediaPlayerService = service;
}
所以mRecorder就是创建了一个StagefrightRecorder的类。
*/
returnmRecorder->start();
}
STEP 7:
\frameworks\av\media\libmediaplayerservice\StagefrightRecorder.cpp
/*
通过层层的封装映射,终于到了执行主体的关键函数了。
*/
/*
回头看STEP 2中的设置参数,进入相应的处理函数。
mRecorder = new MediaRecorder();
mRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mRecorder.setOutputFormat(outputfileformat);
mRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
mRecorder.setOutputFile(mSampleFile.getAbsolutePath());
*/
status_tStagefrightRecorder::start() {
ALOGV("start");
if (mOutputFd < 0) {
ALOGE("Output file descriptor isinvalid");
return INVALID_OPERATION;
}
status_t status = OK;
if (mVideoSource != VIDEO_SOURCE_SURFACE) {
/*
初始化对应的mWriter
*/
status = prepareInternal();
if (status != OK) {
return status;
}
}
if (mWriter == NULL) {
ALOGE("File writer is notavaialble");
return UNKNOWN_ERROR;
}
switch (mOutputFormat) {
case OUTPUT_FORMAT_DEFAULT:
case OUTPUT_FORMAT_THREE_GPP:
case OUTPUT_FORMAT_MPEG_4:
case OUTPUT_FORMAT_WEBM:
{
bool isMPEG4 = true;
if (mOutputFormat ==OUTPUT_FORMAT_WEBM) {
isMPEG4 = false;
}
sp<MetaData> meta = newMetaData;
/*
Meta为设置参数
*/
setupMPEG4orWEBMMetaData(&meta);
/*
mWriter在前面的
*/
status = mWriter->start(meta.get());
break;
}
#ifdefMTK_AOSP_ENHANCEMENT
case OUTPUT_FORMAT_AMR_NB:
case OUTPUT_FORMAT_AMR_WB:
case OUTPUT_FORMAT_AAC_ADIF:
case OUTPUT_FORMAT_AAC_ADTS:
case OUTPUT_FORMAT_MPEG2TS:
case OUTPUT_FORMAT_WAV:
case OUTPUT_FORMAT_OGG:
{
status = mWriter->start();
break;
}
case OUTPUT_FORMAT_RTP_AVP:
{
sp<MetaData> meta = newMetaData;
if (mRTPTarget.length() > 0) {
meta->setCString(kKeyRTPTarget, mRTPTarget.string());
}
status = mWriter->start(meta.get());
break;
}
#else
case OUTPUT_FORMAT_AMR_NB:
case OUTPUT_FORMAT_AMR_WB:
case OUTPUT_FORMAT_AAC_ADIF:
case OUTPUT_FORMAT_AAC_ADTS:
case OUTPUT_FORMAT_RTP_AVP:
case OUTPUT_FORMAT_MPEG2TS:
{
status = mWriter->start();
break;
}
#endif
default:
{
ALOGE("Unsupported output fileformat: %d", mOutputFormat);
status = UNKNOWN_ERROR;
break;
}
}
if (status != OK) {
mWriter.clear();
mWriter = NULL;
}
if ((status == OK) && (!mStarted)){
mStarted = true;
uint32_t params =IMediaPlayerService::kBatteryDataCodecStarted;
if (mAudioSource != AUDIO_SOURCE_CNT) {
params |=IMediaPlayerService::kBatteryDataTrackAudio;
}
if (mVideoSource !=VIDEO_SOURCE_LIST_END) {
params |=IMediaPlayerService::kBatteryDataTrackVideo;
}
addBatteryData(params);
}
MM_LOGD("- status=%d",status);
return status;
}
/*
录音的大体JAVA调用流程到这里基本结束了,下面就是具体的写入文档的操作。
MediaWriter,我们简单的跟进下,不深究,待学习。
*/
STEP 8-1:
\frameworks\av\media\libmediaplayerservice\StagefrightRecorder.cpp
status_t StagefrightRecorder::prepareInternal(){
ALOGV("prepare");
if (mOutputFd < 0) {
ALOGE("Output file descriptor isinvalid");
return INVALID_OPERATION;
}
// Get UID and PID here for permissionchecking
mClientUid = IPCThreadState::self()->getCallingUid();
mClientPid =IPCThreadState::self()->getCallingPid();
status_t status = OK;
switch (mOutputFormat) {
case OUTPUT_FORMAT_DEFAULT:
case OUTPUT_FORMAT_THREE_GPP:
case OUTPUT_FORMAT_MPEG_4:
case OUTPUT_FORMAT_WEBM:
status = setupMPEG4orWEBMRecording();
break;
case OUTPUT_FORMAT_AMR_NB:
case OUTPUT_FORMAT_AMR_WB:
status = setupAMRRecording();
break;
case OUTPUT_FORMAT_AAC_ADIF:
case OUTPUT_FORMAT_AAC_ADTS:
status = setupAACRecording();
break;
case OUTPUT_FORMAT_RTP_AVP:
status = setupRTPRecording();
break;
case OUTPUT_FORMAT_MPEG2TS:
status = setupMPEG2TSRecording();
break;
#ifdefMTK_AOSP_ENHANCEMENT
case OUTPUT_FORMAT_WAV:
if(AUDIO_ENCODER_MS_ADPCM ==mAudioEncoder || AUDIO_ENCODER_DVI_IMA_ADPCM == mAudioEncoder) {
#ifdef HAVE_ADPCMENCODE_FEATURE
status = setupADPCMRecording();
#endif
}
else {
#ifdefMTK_PCM_RECORD_SUPPORT
status = setupPCMRecording();
#endif
}
break;
case OUTPUT_FORMAT_OGG:
#ifdefMTK_OGG_RECORD_SUPPORT
status = setupOGGRecording();
#endif
break;
#endif
default:
ALOGE("Unsupported output fileformat: %d", mOutputFormat);
status = UNKNOWN_ERROR;
break;
}
return status;
}
STEP 8-2:
status_tStagefrightRecorder::setupMPEG4orWEBMRecording() {
MM_LOGD("+");
mWriter.clear();
mTotalBitRate = 0;
status_t err = OK;
sp<MediaWriter> writer;
sp<MPEG4Writer> mp4writer;
if (mOutputFormat == OUTPUT_FORMAT_WEBM) {
writer = new WebmWriter(mOutputFd);
} else {
writer = mp4writer = newMPEG4Writer(mOutputFd);
}
if (mVideoSource <VIDEO_SOURCE_LIST_END) {
setDefaultVideoEncoderIfNecessary();
sp<MediaSource> mediaSource;
err =setupMediaSource(&mediaSource);
if (err != OK) {
MM_LOGE("setupMediaSource Failerr=%d",err);
return err;
}
#ifdefined(MTK_AOSP_ENHANCEMENT) && defined(MTK_SLOW_MOTION_VIDEO_SUPPORT)
int32_t colorFormat = 0;// OMX_COLOR_FormatUnused;
if(mediaSource != NULL){
sp<MetaData> meta =mediaSource->getFormat();
CHECK(meta->findInt32(kKeyColorFormat, &colorFormat));
}
char path[PROPERTY_VALUE_MAX];
int64_t pathvalue;//default 0 path2, if!0 path1(MtkBSSource)
property_get("vr.slowmotion.dl.path", path, "0");
pathvalue = atol(path);
ALOGD("vr.slowmotion.dl.path=%" PRId64 "",pathvalue);
if(pathvalue&&colorFormat ==OMX_MTK_COLOR_FormatBitStream){
sp<MediaSource> encoder;
ALOGI("Create MtkBSSource forhw direct link");
setupMtkBSSource(mediaSource,&encoder);
writer->addSource(encoder);
//mVideoEncoderSource = encoder;
} else {
sp<MediaCodecSource> encoder;
err =setupVideoEncoder(mediaSource, &encoder);
if (err != OK) {
MM_LOGE("setupVideoEncoderFail err=%d",err);
return err;
}
writer->addSource(encoder);
mVideoEncoderSource = encoder;
}
#else
sp<MediaCodecSource> encoder;
err = setupVideoEncoder(mediaSource,&encoder);
if (err != OK) {
MM_LOGE("setupVideoEncoderFail err=%d",err);
return err;
}
writer->addSource(encoder);
mVideoEncoderSource = encoder;
#endif
mTotalBitRate += mVideoBitRate;
}
if (mOutputFormat != OUTPUT_FORMAT_WEBM) {
// Audio source is added at the end ifit exists.
// This help make sure that the"recoding" sound is suppressed for
// camcorder applications in therecorded files.
// TODO Audio source is currentlyunsupported for webm output; vorbis encoder needed.
// disable audio for time lapserecording
bool disableAudio = mCaptureFpsEnable&& mCaptureFps < mFrameRate;
if (!disableAudio && mAudioSource!= AUDIO_SOURCE_CNT) {
err = setupAudioEncoder(writer);
if (err != OK) return err;
mTotalBitRate += mAudioBitRate;
}
if (mCaptureFpsEnable) {
mp4writer->setCaptureRate(mCaptureFps);
}
if (mInterleaveDurationUs > 0) {
mp4writer->setInterleaveDuration(mInterleaveDurationUs);
}
if (mLongitudex10000 > -3600000&& mLatitudex10000 > -3600000) {
mp4writer->setGeoData(mLatitudex10000, mLongitudex10000);
}
}
if (mMaxFileDurationUs != 0) {
writer->setMaxFileDuration(mMaxFileDurationUs);
}
if (mMaxFileSizeBytes != 0) {
writer->setMaxFileSize(mMaxFileSizeBytes);
}
if (mVideoSource == VIDEO_SOURCE_DEFAULT
|| mVideoSource ==VIDEO_SOURCE_CAMERA) {
mStartTimeOffsetMs =mEncoderProfiles->getStartTimeOffsetMs(mCameraId);
} else if (mVideoSource ==VIDEO_SOURCE_SURFACE) {
// surface source doesn't need largeinitial delay
mStartTimeOffsetMs = 200;
}
if (mStartTimeOffsetMs > 0) {
writer->setStartTimeOffsetMs(mStartTimeOffsetMs);
}
writer->setListener(mListener);
mWriter = writer;
#ifdefMTK_AOSP_ENHANCEMENT //for EIS2.5
mMP4Writer = mp4writer;
#endif
MM_LOGD("-");
return OK;
}
STEP 8-3:
\frameworks\av\media\libstagefright\MPEG4Writer.cpp
status_tMPEG4Writer::start(MetaData *param) {
#ifdefMTK_AOSP_ENHANCEMENT
ALOGD("start ++");
#endif
if (mInitCheck != OK) {
return UNKNOWN_ERROR;
}
/*
* Check mMaxFileSizeLimitBytes at thebeginning
* since mMaxFileSizeLimitBytes may beimplicitly
* changed later for 32-bit file offseteven if
* user does not ask to set it explicitly.
*/
if (mMaxFileSizeLimitBytes != 0) {
mIsFileSizeLimitExplicitlyRequested =true;
}
int32_t use64BitOffset;
if (param &&
param->findInt32(kKey64BitFileOffset, &use64BitOffset) &&
use64BitOffset) {
mUse32BitOffset = false;
}
if (mUse32BitOffset) {
// Implicit 32 bit file size limit
if (mMaxFileSizeLimitBytes == 0) {
mMaxFileSizeLimitBytes =kMax32BitFileSize;
}
// If file size is set to be largerthan the 32 bit file
// size limit, treat it as an error.
if (mMaxFileSizeLimitBytes >kMax32BitFileSize) {
ALOGW("32-bit file size limit(%" PRId64 " bytes) too big. "
"It is changed to %" PRId64 "bytes",
mMaxFileSizeLimitBytes,kMax32BitFileSize);
mMaxFileSizeLimitBytes =kMax32BitFileSize;
}
}
int32_t use2ByteNalLength;
if (param &&
param->findInt32(kKey2ByteNalLength,&use2ByteNalLength) &&
use2ByteNalLength) {
mUse4ByteNalLength = false;
}
int32_t isRealTimeRecording;
if (param &¶m->findInt32(kKeyRealTimeRecording, &isRealTimeRecording)) {
mIsRealTimeRecording = isRealTimeRecording;
}
mStartTimestampUs = -1;
if (mStarted) {
if (mPaused) {
mPaused = false;
return startTracks(param);
}
return OK;
}
#ifdefMTK_AOSP_ENHANCEMENT
initStart(param);
#endif
if (!param ||
!param->findInt32(kKeyTimeScale,&mTimeScale)) {
mTimeScale = 1000;
}
CHECK_GT(mTimeScale, 0);
ALOGV("movie time scale: %d",mTimeScale);
/*
* When the requested file size limit issmall, the priority
* is to meet the file size limitrequirement, rather than
* to make the file streamable.mStreamableFile does not tell
* whether the actual recorded file isstreamable or not.
*/
#ifdefMTK_AOSP_ENHANCEMENT
mStreamableFile = false; //usemStreamableFile as the streamable fileoptional, usually make it false
#else
mStreamableFile =
(mMaxFileSizeLimitBytes != 0 &&
mMaxFileSizeLimitBytes >=kMinStreamableFileSizeInBytes);
#endif
/*
* mWriteMoovBoxToMemory is true if theamount of data in moov box is
* smaller than the reserved free space atthe beginning of a file, AND
* when the content of moov box isconstructed. Note that video/audio
* frame data is always written to the filebut not in the memory.
*
* Before stop()/reset() is called,mWriteMoovBoxToMemory is always
* false. When reset() is called at the endof a recording session,
* Moov box needs to be constructed.
*
* 1) Right before a moov box isconstructed, mWriteMoovBoxToMemory
* to set to mStreamableFile so that if
* the file is intended to be streamable,it is set to true;
* otherwise, it is set to false. When thevalue is set to false,
* all the content of the moov box iswritten immediately to
* the end of the file. When the value isset to true, all the
* content of the moov box is written to anin-memory cache,
* mMoovBoxBuffer, util the followingcondition happens. Note
* that the size of the in-memory cache isthe same as the
* reserved free space at the beginning ofthe file.
*
* 2) While the data of the moov box iswritten to an in-memory
* cache, the data size is checked againstthe reserved space.
* If the data size surpasses the reservedspace, subsequent moov
* data could no longer be hold in thein-memory cache. This also
* indicates that the reserved space wastoo small. At this point,
* _all_ moov data must be written to theend of the file.
* mWriteMoovBoxToMemory must be set tofalse to direct the write
* to the file.
*
* 3) If the data size in moov box issmaller than the reserved
* space after moov box is completelyconstructed, the in-memory
* cache copy of the moov box is written tothe reserved free
* space. Thus, immediately after the moovis completedly
* constructed, mWriteMoovBoxToMemory isalways set to false.
*/
mWriteMoovBoxToMemory = false;
mMoovBoxBuffer = NULL;
mMoovBoxBufferOffset = 0;
writeFtypBox(param);
mFreeBoxOffset = mOffset;
if (mEstimatedMoovBoxSize == 0) {
int32_t bitRate = -1;
if (param) {
param->findInt32(kKeyBitRate,&bitRate);
}
mEstimatedMoovBoxSize =estimateMoovBoxSize(bitRate);
}
CHECK_GE(mEstimatedMoovBoxSize, 8);
if (mStreamableFile) {
// Reserve a 'free' box only forstreamable file
#if defined(MTK_AOSP_ENHANCEMENT)&& defined(USE_FILE_CACHE)
mCacheWriter->seek(mFreeBoxOffset,SEEK_SET);
#else
lseek64(mFd, mFreeBoxOffset, SEEK_SET);
#endif
writeInt32(mEstimatedMoovBoxSize);
write("free", 4);
mMdatOffset = mFreeBoxOffset +mEstimatedMoovBoxSize;
} else {
mMdatOffset = mOffset;
}
mOffset = mMdatOffset;
#ifdefined(MTK_AOSP_ENHANCEMENT) && defined(USE_FILE_CACHE)
mCacheWriter->seek(mMdatOffset,SEEK_SET);
#else
lseek64(mFd, mMdatOffset, SEEK_SET);
#endif
if (mUse32BitOffset) {
write("????mdat", 8);
} else {
write("\x00\x00\x00\x01mdat????????", 16);
}
status_t err = startWriterThread();
if (err != OK) {
return err;
}
err = startTracks(param);
if (err != OK) {
return err;
}
mStarted = true;
#ifdefMTK_AOSP_ENHANCEMENT
ALOGD("start --");
#endif
return OK;
}
- audio录音在JAVA层的调用
- [RK3288][Android6.0] Audio录音HAL层的初始化流程分析
- [RK3288][Android6.0] Audio录音HAL层的数据读取流程分析
- Record Audio(录音)
- [RK3288][Android6.0] 调试笔记 --- Audio驱动层判断录音数据是否异常
- Android Audio System 架构初探:Java层
- 在linux kernel层调用用户层的方法
- 从java层调用native层的函数,在ida中看伪C代码更直观的方法
- 在android中通过java层程序调用命令行的一些备注
- Android Studio NDK 开发之在Native中调用java层的函数
- java service 层的相互调用问题
- JNI调用java层的方法
- Android java层和C层的相互调用
- Android Jni层调用应用层(Java)的方法
- jni层调用java层方法遇到的bug
- 在android的Java层、JNI层、Native C++层都实现了类似的函数,它们是层层向下调用
- ubuntu skype 录音-Record Audio
- audio:mt6236 录音驱动小结
- VMware安装Ubuntu虚拟机后的相关必备操作
- 解决android webview闪烁的现象
- C# Hashtable 使用说明 以及 Hashtable和HashMap的区别[转]
- Unity3D Shader之路 写Shader前必须要知道的事情2 表面着色器的理解
- gdb调试
- audio录音在JAVA层的调用
- JS和COM口通信
- 实验一大小写字母转换
- 关于pos打印机通过tcp/ip操作打印指令集
- 关于Maven创建webapp项目后pom.xml报错的解决方法
- app随着光照强度变化总结
- 那些不能遗忘的知识点回顾——C/C++系列(笔试面试高频题)
- c#关于数组和多态的简单使用
- myeclipse中部署项目到tomcat失败解决办法(错误:If a file is locked ...)