ios播放多个PCM直播音频流,实现多人语音聊天

来源:互联网 发布:数据挖掘课程设计 编辑:程序博客网 时间:2024/05/19 13:46

之前一直用前人留下的audioqueue在搞,发现设置什么的都是单例,想多语音聊天没法实现,要么多人的时候卡,实例化两个的时候也没用,可能就是一个单例


后来发现播放音频流的就那么两个,audioqueue和openAL,就尝试了下openAL,实例化两个,就播放成功了,声音互不干扰


录制还是用的audioqueue的,播放用openal,以后再写完整吧


代码:.h

#import <Foundation/Foundation.h>#import <OpenAL/al.h>#import <OpenAL/alc.h>#import <OpenAL/oalMacOSX_OALExtensions.h>@interface LXOpenAlPlay2 :NSObject{    ALCcontext *mContext;    ALCdevice *mDevicde;    ALuint outSourceId;    NSMutableDictionary *soundDictionary;    NSMutableArray *bufferStorageArray;    ALuint buff;    NSTimer *updateBufferTimer;}@property(nonatomic)ALCcontext *mContext;@property(nonatomic)ALCdevice *mDevice;@property(nonatomic,retain)NSMutableDictionary *soundDictionary;@property(nonatomic,retain)NSMutableArray *bufferStorageArray;-(void)initOpenAL;-(void)openAudioFromQueue:(uint8_t *)data dataSize:(UInt32)dataSize;- (void) openAudioFromQueue:(NSData *)tmpData;-(BOOL)updataQueueBuffer;-(void)playSound;-(void)stopSound;-(void)cleanUpOpenAL;@end


.m文件


#import "LXOpenAlPlay2.h"#import "audioHeader.h"@implementation LXOpenAlPlay2@synthesize mDevice,mContext,soundDictionary,bufferStorageArray;#pragma make - openal function-(void)initOpenAL{    NSLog(@"=======initOpenAl===");    mDevice=alcOpenDevice(NULL);    if (mDevice)    {        mContext=alcCreateContext(mDevice, NULL);        alcMakeContextCurrent(mContext);    }        alGenSources(1, &outSourceId);    alSpeedOfSound(1.0);    alDopplerVelocity(1.0);    alDopplerFactor(1.0);    alSourcef(outSourceId, AL_PITCH, 1.0f);    alSourcef(outSourceId, AL_GAIN, 1.0f);    alSourcei(outSourceId, AL_LOOPING, AL_FALSE);    alSourcef(outSourceId, AL_SOURCE_TYPE, AL_STREAMING);    }- (void) openAudioFromQueue:(NSData *)tmpData//(unsigned char*)data dataSize:(UInt32)dataSize{    NSCondition* ticketCondition= [[NSCondition alloc] init];    [ticketCondition lock];        ALuint bufferID = 0;    alGenBuffers(1, &bufferID);    // NSLog(@"bufferID = %d",bufferID);//    NSData * tmpData = [NSData dataWithBytes:data length:dataSize];//    VideoFrameExtrator *temp = [[(AppDelegate *)[[UIApplication sharedApplication] delegate] viewController] video];    int aSampleRate,aBit,aChannel;    aSampleRate = kDefaultSampleRate;//temp->sampleRates;    aBit = 16;//temp->aBits;    aChannel = 1;//temp->Channels;    // NSLog(@"%d,%d,%d",aSampleRate,aBit,aChannel);    ALenum format = 0;        if (aBit ==8)    {        if (aChannel ==1)            format = AL_FORMAT_MONO8;        elseif(aChannel == 2)            format = AL_FORMAT_STEREO8;        elseif( alIsExtensionPresent( "AL_EXT_MCFORMATS" ) )        {            if( aChannel ==4 )            {                format = alGetEnumValue( "AL_FORMAT_QUAD8" );            }            if( aChannel ==6 )            {                format = alGetEnumValue( "AL_FORMAT_51CHN8" );            }        }    }        if( aBit ==16 )    {        if( aChannel ==1 )        {            format = AL_FORMAT_MONO16;        }        if( aChannel ==2 )        {            // NSLog(@"achhenl= 2!!!!!");            format = AL_FORMAT_STEREO16;        }        if( alIsExtensionPresent("AL_EXT_MCFORMATS" ) )        {            if( aChannel ==4 )            {                format = alGetEnumValue( "AL_FORMAT_QUAD16" );            }            if( aChannel ==6 )            {                NSLog(@"achannel = 6!!!!!!");                format = alGetEnumValue( "AL_FORMAT_51CHN16" );            }        }    }    //  NSLog(@"%d",format);    alBufferData(bufferID, format, (char*)[tmpData bytes], (ALsizei)[tmpData length],aSampleRate);    alSourceQueueBuffers(outSourceId, 1, &bufferID);        [self updataQueueBuffer];        ALint stateVaue;    alGetSourcei(outSourceId, AL_SOURCE_STATE, &stateVaue);        [ticketCondition unlock];    ticketCondition = nil;    }- (BOOL)updataQueueBuffer{    ALint stateVaue;    int processed, queued;        alGetSourcei(outSourceId, AL_BUFFERS_PROCESSED, &processed);    alGetSourcei(outSourceId, AL_BUFFERS_QUEUED, &queued);        //NSLog(@"Processed = %d\n", processed);    //NSLog(@"Queued = %d\n", queued);        alGetSourcei(outSourceId, AL_SOURCE_STATE, &stateVaue);        if (stateVaue == AL_STOPPED ||        stateVaue == AL_PAUSED ||        stateVaue == AL_INITIAL)    {        if (queued < processed || queued ==0 ||(queued == 1 && processed ==1)) {            NSLog(@"Audio Stop");            [self stopSound];            [self cleanUpOpenAL];        }                // NSLog(@"===statevaue ========================%d",stateVaue);        [self playSound];        returnNO;    }        while(processed--)    {        // NSLog(@"queue = %d",queued);        alSourceUnqueueBuffers(outSourceId, 1, &buff);        alDeleteBuffers(1, &buff);    }    //NSLog(@"queue = %d",queued);    returnYES;}#pragma make - play/stop/clean function-(void)playSound{    alSourcePlay(outSourceId);}-(void)stopSound{    alSourceStop(outSourceId);}-(void)cleanUpOpenAL{    [updateBufferTimer invalidate];    updateBufferTimer = nil;    alDeleteSources(1, &outSourceId);    alDeleteBuffers(1, &buff);    alcDestroyContext(mContext);    alcCloseDevice(mDevicde);}-(void)dealloc{    NSLog(@"openal sound dealloc");}@end






0 0