Android AudioEffect浅谈

来源:互联网 发布:react json 遍历 编辑:程序博客网 时间:2024/06/05 22:51

转至:http://www.cnblogs.com/salam/archive/2011/01/09/1931241.html

在Android2.3中增加了对音频混响的支持,这些API包含在android.media.audiofx包中。 

  一、概述

 

  AudioEffect是android audio framework(android 音频框架)提供的音频效果控制的基类。开发者不能直接使用此类,应该使用它的派生类。 下面列出它的派生类。 

    Equalizer
    Virtualizer
    BassBoost
    PresetReverb
    EnvironmentalReverb


  当创建AudioEffect时,如果音频效果应用到一个具体的AudioTrack和MediaPlayer的实例,应用程序必须指定该实例的音频session ID,如果要应用Global音频输出混响的效果必须制定Session 0。


  要创建音频输出混响(音频 Session 0)要求要有 MODIFY_AUDIO_SETTINGS权限。


  如果要创建的效果在audio framework不存在,那么直接创建该效果,如果已经存在那么直接使用此效果。如果优先级高的对象要在低级别的对象使用该效果时,那么控制将转移到优先级高的对象上,否则继续停留在此对象上。在这种情况下,新的申请将被监听器通知。


  二、嵌套类


  1.AudioEffect.Descriptor:效果描述符包含在音频框架内实现某种特定的效果的信息。
  2.AudioEffect.OnControlStatusChangeListener:此接口定义了当应用程序的音频效果的控制状态改变时由AudioEffect调用的方法。

  3.AudioEffect.OnEnableStatusChangeListener:此接口定义了当应用程序的音频效果的启用状态改变时由AudioEffect调用的方法。

 

  三、常量

StringACTION_CLOSE_AUDIO_EFFECT_CONTROL_SESSION关闭音频效果StringACTION_DISPLAY_AUDIO_EFFECT_CONTROL_PANEL启动一个音频效果控制面板UI。StringACTION_OPEN_AUDIO_EFFECT_CONTROL_SESSION打开音频效果。intALREADY_EXISTS内部操作状态。intCONTENT_TYPE_GAME当播放内容的类型是游戏音频时EXTRA_CONTENT_TYPE的值。intCONTENT_TYPE_MOVIE当播放内容的类型是电影时EXTRA_CONTENT_TYPE的值。intCONTENT_TYPE_MUSIC当播放内容的类型是音乐时EXTRA_CONTENT_TYPE的值。intCONTENT_TYPE_VOICE当播放内容的类型是话音时EXTRA_CONTENT_TYPE的值。                  StringEFFECT_AUXILIARYEffect connection mode 是auxiliaryStringEFFECT_INSERTEffect connection mode 是insert.intERROR指示操作错误。intERROR_BAD_VALUE指示由于错误的参数导致的操作失败。intERROR_DEAD_OBJECT指示由于已关闭的远程对象导致的操作失败。intERROR_INVALID_OPERATION指示由于错误的请求状态导致的操作失败。intERROR_NO_INIT指示由于错误的对象初始化导致的操作失败。intERROR_NO_MEMORY指示由于内存不足导致的操作失败。StringEXTRA_AUDIO_SESSION包含使用效果的音频会话ID。StringEXTRA_CONTENT_TYPE指示应用程序播放内容的类型。StringEXTRA_PACKAGE_NAME包含调用应用程序的包名。intSUCCESS操作成功。

 

  四、公有方法

 

AudioEffect.DescriptorgetDescriptor()
获取效果描述符。
booleangetEnabled()
返回效果的启用状态。
intgetId()
返回效果的标识符
booleanhasControl()
检查该AudioEffect 对象是否拥有效果引擎的控制。如果有,则返回true。
static Descriptor[]queryEffects()
查询平台上的所有有效的音频效果。
voidrelease()
释放本地AudioEffect资源。
voidsetControlStatusListener(AudioEffect.OnControlStatusChangeListener listener)
注册音频效果的控制状态监听器.当控制状态改变时AudioEffect发出通知。
voidsetEnableStatusListener(AudioEffect.OnEnableStatusChangeListener listener)
设置音频效果的启用状态监听器。当启用状态改变时AudioEffect发出通知。
intsetEnabled(boolean enabled)
Enable or disable the effect.

 

  五、应用(此应用来自于SDK包)

  1.新建项目

  

  你或许已经发现在2.3的项目中比2.2多一个配置文件proguard.cfg,包含混淆所需的proguard脚本。

  

  2.打开AndroidManifest.xml文件

  添加权限“android.permission.RECORD_AUDIO”。

  

  3.可视化工具类VisualizerView.java

  

  

          效果图  

  

  4.Main.java

  

复制代码
package com.wjq.audiofx;import android.app.Activity;import android.media.AudioManager;import android.media.MediaPlayer;import android.media.audiofx.Equalizer;import android.media.audiofx.Visualizer;import android.os.Bundle;import android.util.Log;import android.view.Gravity;import android.view.ViewGroup;import android.widget.LinearLayout;import android.widget.SeekBar;import android.widget.TextView;public class Main extends Activity { private static final String TAG = "AudioFxDemo"; private static final float VISUALIZER_HEIGHT_DIP = 50f; private MediaPlayer mMediaPlayer; private Visualizer mVisualizer; private Equalizer mEqualizer; private LinearLayout mLinearLayout; private VisualizerView mVisualizerView; private TextView mStatusTextView; @Override public void onCreate(Bundle icicle) { super.onCreate(icicle); setVolumeControlStream(AudioManager.STREAM_MUSIC); mStatusTextView = new TextView(this); mLinearLayout = new LinearLayout(this); mLinearLayout.setOrientation(LinearLayout.VERTICAL); mLinearLayout.addView(mStatusTextView); setContentView(mLinearLayout); // Create the MediaPlayer mMediaPlayer = MediaPlayer.create(this, R.raw.test_cbr); Log.d(TAG, "MediaPlayer audio session ID: " + mMediaPlayer.getAudioSessionId()); setupVisualizerFxAndUI(); setupEqualizerFxAndUI(); // Make sure the visualizer is enabled only when you actually want to receive data, and // when it makes sense to receive data. mVisualizer.setEnabled(true); // When the stream ends, we don't need to collect any more data. We don't do this in // setupVisualizerFxAndUI because we likely want to have more, non-Visualizer related code // in this callback. mMediaPlayer.setOnCompletionListener(new MediaPlayer.OnCompletionListener() { public void onCompletion(MediaPlayer mediaPlayer) { mVisualizer.setEnabled(false); } }); mMediaPlayer.start(); mStatusTextView.setText("Playing audio...");}private void setupEqualizerFxAndUI() { // Create the Equalizer object (an AudioEffect subclass) and attach it to our media player, // with a default priority (0). mEqualizer = new Equalizer(0, mMediaPlayer.getAudioSessionId()); mEqualizer.setEnabled(true); TextView eqTextView = new TextView(this); eqTextView.setText("Equalizer:"); mLinearLayout.addView(eqTextView); short bands = mEqualizer.getNumberOfBands(); final short minEQLevel = mEqualizer.getBandLevelRange()[0]; final short maxEQLevel = mEqualizer.getBandLevelRange()[1]; for (short i = 0; i < bands; i++) { final short band = i; TextView freqTextView = new TextView(this); freqTextView.setLayoutParams(new ViewGroup.LayoutParams( ViewGroup.LayoutParams.FILL_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT)); freqTextView.setGravity(Gravity.CENTER_HORIZONTAL); freqTextView.setText((mEqualizer.getCenterFreq(band) / 1000) + " Hz"); mLinearLayout.addView(freqTextView); LinearLayout row = new LinearLayout(this); row.setOrientation(LinearLayout.HORIZONTAL); TextView minDbTextView = new TextView(this); minDbTextView.setLayoutParams(new ViewGroup.LayoutParams( ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT)); minDbTextView.setText((minEQLevel / 100) + " dB"); TextView maxDbTextView = new TextView(this); maxDbTextView.setLayoutParams(new ViewGroup.LayoutParams( ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT)); maxDbTextView.setText((maxEQLevel / 100) + " dB"); LinearLayout.LayoutParams layoutParams = new LinearLayout.LayoutParams( ViewGroup.LayoutParams.FILL_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT); layoutParams.weight = 1; SeekBar bar = new SeekBar(this); bar.setLayoutParams(layoutParams); bar.setMax(maxEQLevel - minEQLevel); bar.setProgress(mEqualizer.getBandLevel(band)); bar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() { public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { mEqualizer.setBandLevel(band, (short) (progress + minEQLevel)); } public void onStartTrackingTouch(SeekBar seekBar) {} public void onStopTrackingTouch(SeekBar seekBar) {} }); row.addView(minDbTextView); row.addView(bar); row.addView(maxDbTextView); mLinearLayout.addView(row); }}private void setupVisualizerFxAndUI() { // Create a VisualizerView (defined below), which will render the simplified audio // wave form to a Canvas. mVisualizerView = new VisualizerView(this); mVisualizerView.setLayoutParams(new ViewGroup.LayoutParams( ViewGroup.LayoutParams.FILL_PARENT, (int)(VISUALIZER_HEIGHT_DIP * getResources().getDisplayMetrics().density))); mLinearLayout.addView(mVisualizerView); // Create the Visualizer object and attach it to our media player. mVisualizer = new Visualizer(mMediaPlayer.getAudioSessionId()); mVisualizer.setCaptureSize(Visualizer.getCaptureSizeRange()[1]); mVisualizer.setDataCaptureListener(new Visualizer.OnDataCaptureListener() { public void onWaveFormDataCapture(Visualizer visualizer, byte[] bytes, int samplingRate) { mVisualizerView.updateVisualizer(bytes); } public void onFftDataCapture(Visualizer visualizer, byte[] bytes, int samplingRate) {} }, Visualizer.getMaxCaptureRate() / 2, true, false);}@Overrideprotected void onPause() { super.onPause(); if (isFinishing() && mMediaPlayer != null) { mVisualizer.release(); mEqualizer.release(); mMediaPlayer.release(); mMediaPlayer = null; }}}
复制代码

 

 

复制代码
1 package com.wjq.audiofx; 2 3 import android.content.Context; 4 import android.graphics.Canvas; 5 import android.graphics.Color; 6 import android.graphics.Paint; 7 import android.graphics.Rect; 8 import android.view.View; 9 10 public class VisualizerView extends View {11 12 private byte[] mBytes;13 private float[] mPoints;14 private Rect mRect = new Rect();15 16 private Paint mForePaint = new Paint();17 18 public VisualizerView(Context context) {19 super(context);20 init();21 }22 23 private void init() {24 mBytes = null;25 26 mForePaint.setStrokeWidth(1f);27 mForePaint.setAntiAlias(true);28 mForePaint.setColor(Color.rgb(0, 128, 255));29 }30 31 public void updateVisualizer(byte[] bytes) {32 mBytes = bytes;33 invalidate();34 }35 36 @Override37 protected void onDraw(Canvas canvas) {38 super.onDraw(canvas);39 40 if (mBytes == null) {41 return;42 }43 44 if (mPoints == null || mPoints.length < mBytes.length * 4) {45 mPoints = new float[mBytes.length * 4];46 }47 48 mRect.set(0, 0, getWidth(), getHeight());49 50 for (int i = 0; i < mBytes.length - 1; i++) {51 mPoints[i * 4] = mRect.width() * i / (mBytes.length - 1);52 mPoints[i * 4 + 1] = mRect.height() / 253 + ((byte) (mBytes[i] + 128)) * (mRect.height() / 2) / 128;54 mPoints[i * 4 + 2] = mRect.width() * (i + 1) / (mBytes.length - 1);55 mPoints[i * 4 + 3] = mRect.height() / 256 + ((byte) (mBytes[i + 1] + 128)) * (mRect.height() / 2) / 128;57 }58 59 canvas.drawLines(mPoints, mForePaint);60 }61 62 }

0 0
原创粉丝点击