iOS语音识别封装

来源:互联网 发布:决战武林神兵进阶数据 编辑:程序博客网 时间:2024/03/29 23:51

项目中用到了讯飞语音识别,发现讯飞的framework封装的有点坑,趁着下午茶时间,自己进行了二次封装,也方便team中的其他小伙伴调用,如果大家有需要的也可以拿去用(记得appId换成自己申请的key),下面说下自己的简单封装思路。代码的下载地址为:http://download.csdn.net/download/u010670117/9365611

(一):

首先,沿用了讯飞demo中的IATConfig这个类来配置需要的参数,

@property (nonatomic,copy)NSString *appID;

其中这个appID是自己添加的参数,如果想要使用在.m文件中换成自己申请的appId。

其余参数都是demo中的自带参数,每个参数的含义在IATConfig中有详细介绍 

/**设置的最长录音时间*/

@property (nonatomic,strong)NSString *speechTimeout;

/**设置的前段录音超时时间*/

@property (nonatomic,strong)NSString *vadEos;

/**设置的后段录音超时时间*/

@property (nonatomic,strong)NSString *vadBos;

/**设置录音识别语言*/

@property (nonatomic,strong)NSString *language;

/**设置录音识别方言,默认为普通话*/

@property (nonatomic,strong)NSString *accent;

@property (nonatomic,strong)NSString *dot;

/**设置声道采样频率*/

@property (nonatomic,strong)NSString *sampleRate;

(二)

设置好参数后,接下来调用framework中的语音识别类,封装了

RecoganizionOfVoice,用block回传识别的语音听写结果


#import <Foundation/Foundation.h>

#import <iflyMSC/iflyMSC.h>

typedefvoid(^recoganizonOfVoiceBlock)(NSString *result,BOOL successResolution

);

@interface RecoganizionOfVoice :NSObject<IFlyRecognizerViewDelegate>


+(RecoganizionOfVoice *)createSingleInstance;

-(void)establishObject;


@property (nonatomic,copy)recoganizonOfVoiceBlock recoganizonOfVoiceBlock;

@property (nonatomic,copy)NSString *resultString;

/**带界面的识别语音*/

@property (nonatomic,strong)IFlyRecognizerView *recongnizerView;

@property (nonatomic,strong)IFlyDataUploader *dataUploader;

/**语音文件路径*/

@property (nonatomic,copy)NSString *filePathOfVoice;

@end


#import "RecoganizionOfVoice.h"

#import "IATConfig.h"


//#define APPIDOfVoice @"564dab94"


static RecoganizionOfVoice *reconganizionOfVoice =nil;

@implementation RecoganizionOfVoice

+(RecoganizionOfVoice *)createSingleInstance

{

   staticdispatch_once_t onceToken;

   dispatch_once(&onceToken, ^{

        reconganizionOfVoice = [[RecoganizionOfVoicealloc]init];

    });

    returnreconganizionOfVoice;

}

-(instancetype)init

{

   self = [superinit];

   if (self) {

//        [self establishObject];

    }

    return self;

}

-(void)establishObject

{

    //配置APPID

    //语音转换有关配置

    [IFlySettingsetLogFile:LVL_ALL];

    [IFlySetting showLogcat:YES];

    NSArray *paths =NSSearchPathForDirectoriesInDomains(NSCachesDirectory,NSUserDomainMask,YES);

   NSString *cachePath = [pathsobjectAtIndex:0];

    [IFlySettingsetLogFilePath:cachePath];

   NSString *appIdString = [NSStringstringWithFormat:@"appid=%@",[IATConfigsharedInstance].appID];

    [IFlySpeechUtilitycreateUtility:appIdString];

    

    //创建语音录制对象

    if([IATConfigsharedInstance].haveView ==YES){

        

        if (self.recongnizerView) {

            [self.recongnizerViewcancel];

            

        }else{

            self.recongnizerView = [[IFlyRecognizerViewalloc]initWithCenter:CGPointMake([UIScreenmainScreen].bounds.size.width/2, [UIScreen mainScreen].bounds.size.height/2)];

        }

       self.resultString =@"";

        [self.recongnizerViewsetParameter:@""forKey:[IFlySpeechConstantPARAMS]];

        [self.recongnizerViewsetParameter:@"iat"forKey:[IFlySpeechConstantIFLY_DOMAIN]];

        self.recongnizerView.delegate =self;

       IATConfig *iatConfig = [IATConfigsharedInstance];

        //设置最长录音时间

        [self.recongnizerViewsetParameter:iatConfig.speechTimeoutforKey:[IFlySpeechConstantSPEECH_TIMEOUT]];

       //设置后端点

        [self.recongnizerViewsetParameter:iatConfig.vadEosforKey:[IFlySpeechConstantVAD_EOS]];

       //设置前端点

        [self.recongnizerViewsetParameter:iatConfig.vadBosforKey:[IFlySpeechConstantVAD_BOS]];

       //网络等待时间

        [self.recongnizerViewsetParameter:@"60000"forKey:[IFlySpeechConstantNET_TIMEOUT]];

        //设置采样率,推荐使用16K

        [self.recongnizerViewsetParameter:iatConfig.sampleRateforKey:[IFlySpeechConstantSAMPLE_RATE]];

       if ([iatConfig.languageisEqualToString:[IATConfigchinese]]) {

           //设置语言

            [self.recongnizerViewsetParameter:iatConfig.languageforKey:[IFlySpeechConstantLANGUAGE]];

           //设置方言

            [self.recongnizerViewsetParameter:iatConfig.accentforKey:[IFlySpeechConstantACCENT]];

        }elseif ([iatConfig.languageisEqualToString:[IATConfigenglish]]) {

           //设置语言

            [self.recongnizerViewsetParameter:iatConfig.languageforKey:[IFlySpeechConstantLANGUAGE]];

        }

        //设置是否返回标点符号

        [self.recongnizerViewsetParameter:iatConfig.dotforKey:[IFlySpeechConstantASR_PTT]];

        

    }

    

    //设置音频来源为麦克风

    [self.recongnizerViewsetParameter:IFLY_AUDIO_SOURCE_MICforKey:@"audio_source"];

    //设置听写结果

    [self.recongnizerViewsetParameter:@"plain"forKey:[IFlySpeechConstantRESULT_TYPE]];

    //保存录音文件

    [self.recongnizerViewsetParameter:@"asr.pcm"forKey:[IFlySpeechConstantASR_AUDIO_PATH]];

    [self.recongnizerViewstart];

    

}

//录音听写回调方法

-(void)onError:(IFlySpeechError *)error{

    

   if ([errorerrorCode] !=0) {

        self.recoganizonOfVoiceBlock(@"识别失败",NO);

    }

    

}

//听写结果回调

-(void)onResult:(NSArray *)resultArray isLast:(BOOL)isLast{

    

   if (isLast) {

        //        NSLog(@"resultArray1%@",resultArray);

    }else{

        NSMutableString *result = [[NSMutableStringalloc]init];

       NSDictionary *dic = [resultArrayobjectAtIndex:0];

        [resultappendString:self.resultString];

       for (NSString *key in dic) {

            [resultappendFormat:@"%@",key];

        }

        [result stringByReplacingOccurrencesOfString:@","withString:@""];

        [result stringByReplacingOccurrencesOfString:@" "withString:@""];

       self.resultString = result;

        self.recoganizonOfVoiceBlock(self.resultString,YES);

    }

    

}

@end


(三):进行到这一步封装后,如果别人用起来还是相对麻烦一点,所以进行了第三步封装,用的时候直接调用这个类方法即可

 [TheToolOfvoicerecognitiongetTextOfVoice:^(id dictionary) {

       

    }resolvingFailure:^(id dictionary) {

        

    }];

后续如果有什么关于讯飞的坑跳不过去可以留言,如果遇到过会及时帮大家解决的。




2 0
原创粉丝点击