基于Handler架构的录音程序

来源:互联网 发布:海康监控数据恢复 编辑:程序博客网 时间:2024/05/17 22:20

最近我的app需要一个录音功能,于是搜到这篇文章

文章中录音线程与主线程间的通讯是通过内部类访问外部类成员变量的方式来实现

while (isRecord == true) { //isRecord是外部类的成员变量readsize = audioRecord.read(audiodata, 0, bufferSizeInBytes);if (AudioRecord.ERROR_INVALID_OPERATION != readsize) {try {fos.write(audiodata);} catch (IOException e) {e.printStackTrace();}}}


一来自己对Java语法不是很熟,二来觉得这种方法扩展性不好,所以决定用Handler来与主线程通信

但子线程要想接收主线程发来的消息,势必要调用Looper类的loop方法,该方法是个死循环,且参数为空(没法插入回调)

    public void run() {        super.run();        Looper.prepare();        Looper.loop();//一旦进入这个函数,就出不来了    }

子线程无法既有Looper.loop()又有while(),所以我想到了一个workaround

将while里的语句放在一个事件里处理,处理完后调用handler再次向自身发送发送该事件,直到收到主线程的STOP事件

事实证明该方法可行,且相比Timer方式,没有时隙的浪费

上代码

package com.hp.speechclient;/** * Created by Administrator on 15-7-16. */import java.lang.Thread;import java.io.FileInputStream;import java.io.FileOutputStream;import java.io.IOException;import java.io.FileNotFoundException;import android.util.Log;import android.os.Handler;import android.os.Message;import android.os.Looper;import android.os.Bundle;import android.os.Environment;import android.content.Context;import android.media.AudioFormat;import android.media.AudioRecord;import android.media.MediaRecorder;public class RecordThread extends Thread {    private Context mUiCtx;    private Handler mUiHandler;    private boolean bRecording = false;    FileOutputStream mSpeechStream = null;    private AudioRecord mAudioRecord;    private int mUnitBufSize;    private static String TAG = RecordThread.class.getSimpleName();    public RecordThread(Context ctx, Handler handler){        mUiCtx = ctx;        mUiHandler = handler;        mUnitBufSize = AudioRecord.getMinBufferSize(16000, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);        mAudioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, 16000, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, mUnitBufSize);    }    private Handler mHandler = new Handler() {        @Override        public void handleMessage(Message msg) {            super.handleMessage(msg);            switch (msg.what) {                case CommonMsg.REC_STOP: {                    bRecording = false;                    break;                }                case CommonMsg.REC_START: {                    bRecording = true;                    startRecord();                    break;                }                case CommonMsg.UNIT_REC_FIN: {                    if (bRecording) {                        Message reply = mHandler.obtainMessage(CommonMsg.UNIT_REC_START);                        mHandler.sendMessage(reply);                    }else{                        stopRecord();                        Message reply = mUiHandler.obtainMessage(CommonMsg.REC_FIN);                        mUiHandler.sendMessage(reply);                    }                    break;                }                case CommonMsg.UNIT_REC_START: {                    recordUnit();                    break;                }                default:{                    assert false;                    break;                }            }        }    };    public void run() {        super.run();        Looper.prepare();        Looper.loop();    }    private void startRecord() {        mAudioRecord.startRecording();        try {            mSpeechStream = new FileOutputStream(CommonMsg.SPEECH_PATH + ".raw");// 建立一个可存取字节的文件        } catch (Exception e) {            assert false;        }        //开始一小段录音        recordUnit();    }    private void stopRecord() {        mAudioRecord.stop();        try {            mSpeechStream.close();// 关闭写入流            mSpeechStream = null;        } catch (IOException e) {            assert false;        }        convertToWav(CommonMsg.SPEECH_PATH+".raw", CommonMsg.SPEECH_PATH+".wav");    }    private void recordUnit() {        // new一个byte数组用来存一些字节数据,大小为缓冲区大小        byte[] audiodata = new byte[mUnitBufSize];        int readsize = 0;        readsize = mAudioRecord.read(audiodata, 0, mUnitBufSize);        if (AudioRecord.ERROR_INVALID_OPERATION != readsize) {            try {                mSpeechStream.write(audiodata);            } catch (IOException e) {                assert false;            }        }        Message reply = mHandler.obtainMessage(CommonMsg.UNIT_REC_FIN);        mHandler.sendMessage(reply);    }    protected void finalize() {        try {            super.finalize();            mAudioRecord.release();//释放资源            mAudioRecord = null;        }catch (Throwable e) {            assert false;        }    }    public Handler getHandler(){        return mHandler;    }    private void convertToWav(String inFilename, String outFilename) {        FileInputStream in = null;        FileOutputStream out = null;        long totalAudioLen = 0;        long totalDataLen = totalAudioLen + 36;        int sampleRate = 16000;        int channels = 1;        byte bitDepth = 16;        long byteRate = bitDepth * sampleRate * channels / 8;        byte[] data = new byte[mUnitBufSize];        try {            in = new FileInputStream(inFilename);            out = new FileOutputStream(outFilename);            totalAudioLen = in.getChannel().size();            totalDataLen = totalAudioLen + 36;            WriteWaveFileHeader(out, totalAudioLen, totalDataLen,                    sampleRate, channels, bitDepth);            while (in.read(data) != -1) {                out.write(data);            }            in.close();            out.close();        } catch (FileNotFoundException e) {            assert false;        } catch (IOException e) {            assert false;        }    }    private void WriteWaveFileHeader(FileOutputStream out, long totalAudioLen,                                     long totalDataLen, int sampleRate, int channels, byte bitDepth)            throws IOException {        int byteRate = bitDepth * sampleRate * channels / 8;        byte[] header = new byte[44];        header[0] = 'R'; // RIFF/WAVE header        header[1] = 'I';        header[2] = 'F';        header[3] = 'F';        header[4] = (byte) (totalDataLen & 0xff);        header[5] = (byte) ((totalDataLen >> 8) & 0xff);        header[6] = (byte) ((totalDataLen >> 16) & 0xff);        header[7] = (byte) ((totalDataLen >> 24) & 0xff);        header[8] = 'W';        header[9] = 'A';        header[10] = 'V';        header[11] = 'E';        header[12] = 'f'; // 'fmt ' chunk        header[13] = 'm';        header[14] = 't';        header[15] = ' ';        header[16] = 16; // 4 bytes: size of 'fmt ' chunk        header[17] = 0;        header[18] = 0;        header[19] = 0;        header[20] = 1; // format = 1        header[21] = 0;        header[22] = (byte) channels;        header[23] = 0;        header[24] = (byte) (sampleRate & 0xff);        header[25] = (byte) ((sampleRate >> 8) & 0xff);        header[26] = (byte) ((sampleRate >> 16) & 0xff);        header[27] = (byte) ((sampleRate >> 24) & 0xff);        header[28] = (byte) (byteRate & 0xff);        header[29] = (byte) ((byteRate >> 8) & 0xff);        header[30] = (byte) ((byteRate >> 16) & 0xff);        header[31] = (byte) ((byteRate >> 24) & 0xff);        header[32] = (byte) (channels * bitDepth / 8); // block align        header[33] = 0;        header[34] = 16; // bits per sample        header[35] = 0;        header[36] = 'd';        header[37] = 'a';        header[38] = 't';        header[39] = 'a';        header[40] = (byte) (totalAudioLen & 0xff);        header[41] = (byte) ((totalAudioLen >> 8) & 0xff);        header[42] = (byte) ((totalAudioLen >> 16) & 0xff);        header[43] = (byte) ((totalAudioLen >> 24) & 0xff);        out.write(header, 0, 44);    }}


0 0