iOS ffmpeg将音视频写入文件aac+h264

来源:互联网 发布:2016nba新秀弹跳数据 编辑:程序博客网 时间:2024/04/28 04:54
#define STREAM_DURATION   5.0#define STREAM_FRAME_RATE 25 /* 25 images/s */#define STREAM_NB_FRAMES  ((int)(STREAM_DURATION * STREAM_FRAME_RATE))#define STREAM_PIX_FMT PIX_FMT_YUV420P /* default pix_fmt */#undef NO_DATA#define NO_DATA  0#define HAS_DATA 1#define STREAM_DURATION   5.0#define STREAM_FRAME_RATE 25 /* 25 images/s */#define STREAM_NB_FRAMES  ((int)(STREAM_DURATION * STREAM_FRAME_RATE))#define STREAM_PIX_FMT PIX_FMT_YUV420P /* default pix_fmt */float t, tincr, tincr2;int16_t *samples;uint8_t *audio_outbuf;int audio_outbuf_size;int audio_input_frame_size;AVFormatContext *oc;AVStream *audio_st0, *video_st0;static AVStream *add_audio_stream(AVFormatContext *oc, int codec_id){    AVCodecContext *c;    AVStream *st;        ///////////////////////////////////////////////    AVCodec *codec;    codec = avcodec_find_encoder(AV_CODEC_ID_AAC);        if(NULL == codec)    {        printf("没有找到合适的编码器!\n");    }    st = avformat_new_stream(oc, codec);        if (st==NULL)    {        printf("输出文件打开失败!\n");    }        c = st->codec;    c->codec = codec;    c->codec_id =  AV_CODEC_ID_AAC;    c->codec_type = AVMEDIA_TYPE_AUDIO;    c->sample_fmt = AV_SAMPLE_FMT_S16;    c->sample_rate= 44100;    c->channel_layout=AV_CH_LAYOUT_STEREO;    c->channels = /*av_get_channel_layout_nb_channels(audioCodecCtx->channel_layout)*/2;    c->bit_rate = 16000;    c->strict_std_compliance = FF_COMPLIANCE_EXPERIMENTAL;    codec->profiles = FF_PROFILE_AAC_MAIN;    int a[]= {AV_SAMPLE_FMT_S16};    codec->sample_fmts = a;    return st;}static void make_dsi( unsigned int sampling_frequency_index, unsigned int channel_configuration,  char* dsi ){    unsigned int object_type = 2; // AAC LC by default    dsi[0] = (object_type<<3) | (sampling_frequency_index>>1);    dsi[1] = ((sampling_frequency_index&1)<<7) | (channel_configuration<<3);}static int get_sr_index(unsigned int sampling_frequency){    switch (sampling_frequency) {        case 96000: return 0;        case 88200: return 1;        case 64000: return 2;        case 48000: return 3;        case 44100: return 4;        case 32000: return 5;        case 24000: return 6;        case 22050: return 7;        case 16000: return 8;        case 12000: return 9;        case 11025: return 10;        case 8000:  return 11;        case 7350:  return 12;        default:    return 0;    }}static void open_audio(AVFormatContext *oc, AVStream *st){    AVCodecContext *c;            c = st->codec; //    char dsi[2] = {0};//    make_dsi( (unsigned int)get_sr_index( (unsigned int)44100 ), (unsigned int)2, dsi);//    c->extradata = (uint8_t*)dsi;//    c->extradata_size = 2;//    if (oc->oformat->flags & AVFMT_GLOBALHEADER)//        //        c->flags |= CODEC_FLAG_GLOBAL_HEADER;    /* open it */    if ( avcodec_open2(c, st->codec->codec, NULL) < 0)    {        fprintf(stderr, "could not open audio codec\n");    }    }static void write_audio_frame(AVFormatContext *oc, AVStream *st, void *audioBuffer, int bufferSize, double pts){    AVCodecContext *c;    AVPacket pkt;    av_init_packet(&pkt);        c = st->codec;        if (c->coded_frame->pts != AV_NOPTS_VALUE)        pkt.pts= av_rescale_q(c->coded_frame->pts, c->time_base, st->time_base);      //  if(c->coded_frame->key_frame)        pkt.flags |= AV_PKT_FLAG_KEY;    pkt.stream_index= st->index;    pkt.data = audioBuffer;    pkt.size = bufferSize;        /* write the compressed frame in the media file */    if (av_write_frame(oc, &pkt) != 0) //av_interleaved_write_frame    {        fprintf(stderr, "Error while writing audio frame\n");    }else    {        NSLog(@"write_audio_frame");    }}static void close_audio(AVFormatContext *oc, AVStream *st){    avcodec_close(st->codec);}/**************************************************************//* video output */AVFrame *picture, *tmp_picture;uint8_t *video_outbuf;int frame_count, video_outbuf_size;/* add a video output stream */static AVStream *add_video_stream(AVFormatContext *oc, int codec_id){    AVCodecContext *c;    AVStream *st;        AVCodec *codec;    codec = avcodec_find_encoder(codec_id);        if(NULL == codec)    {        NSLog(@"could not find video encoder");    }        st = avformat_new_stream( oc, codec );    c = st->codec;    c->codec = codec;    c->me_range = 5;//16    c->max_qdiff = 1;    c->qmin = 200;  //10    c->qmax = 200; //51    c->qcompress = 0.5f;            c->codec_id = codec_id;    c->codec_type = AVMEDIA_TYPE_VIDEO;    c->bit_rate = 512000;//    c->width = 480;    c->height = 640;            // videoCodecCtx->gop_size = 100;    //用于帧间压缩时,比如12,是指12个图片的帧间预测    if (codec_id == AV_CODEC_ID_MPEG4)    {        c->pix_fmt = PIX_FMT_YUV420P;    //像素格式,表示屏幕的显示方式    }else    {        c->pix_fmt = PIX_FMT_YUVJ420P;  // PIX_FMT_YUVJ420P //像素格式,表示屏幕的显示方式        //  videoCodecCtx->pix_fmt = PIX_FMT_YUV420P;    }    c->time_base.den = 15;    c->time_base.num = 1;    c->max_b_frames = 0;    c->idct_algo = FF_IDCT_ARM; //            if(!strcmp(oc->oformat->name, "mp4") || !strcmp(oc->oformat->name, "mov") || !strcmp(oc->oformat->name, "3gp"))        c->flags |= CODEC_FLAG_GLOBAL_HEADER;    return st;}static void open_video(AVFormatContext *oc, AVStream *st){        AVCodecContext *c;        c = st->codec;        /* open the codec */    if (avcodec_open2(c,st->codec->codec ,NULL) < 0)    {        fprintf(stderr, "could not open video codec\n");    }    }double timeStamp = 0;static void write_video_frame(AVFormatContext *oc, AVStream *st, void *videoBuffer, int bufferSize, int pts){    int ret;    AVCodecContext *c;        c = st->codec;    AVPacket pkt;    av_init_packet(&pkt);    //    if (c->coded_frame->pts != AV_NOPTS_VALUE)//        pkt.pts= av_rescale_q(c->coded_frame->pts, c->time_base, st->time_base);            pkt.pts = (double)st->pts.val * (1.0f/15.0f)+pts;//输出视频时间戳//    pkt.dts = 0;     //  if(c->coded_frame->key_frame)        pkt.flags |= AV_PKT_FLAG_KEY;        pkt.stream_index= st->index;    pkt.data= videoBuffer;    pkt.size= bufferSize;        /* write the compressed frame in the media file */    ret = av_write_frame(oc, &pkt);        if (ret != 0)    {        fprintf(stderr, "Error while writing video frame\n");    }else    {        NSLog(@"write_video_frame");    }}static void close_video(AVFormatContext *oc, AVStream *st){    avcodec_close(st->codec);}static void startR(){    const char *filename;    AVOutputFormat *fmt;       /* initialize libavcodec, and register all codecs and formats */  //  avcodec_register_all();    av_register_all();        NSString *videoName = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,NSUserDomainMask, YES) objectAtIndex:0];    //下面的在录像的时候初始化        NSString *str = [videoName stringByAppendingPathComponent:@"videos"];        videoName = [str stringByAppendingPathComponent:@"test.aac"];    filename = [videoName UTF8String];        /* auto detect the output format from the name. default is mp4. */    fmt =av_guess_format(NULL, filename, NULL);        /* allocate the output media context */    oc = avformat_alloc_context();    if (!oc) {        fprintf(stderr, "Memory error\n");        exit(1);    }    oc->oformat = fmt;    fmt->audio_codec = AV_CODEC_ID_AAC;  //  fmt->video_codec = AV_CODEC_ID_H264;  //  snprintf(oc->filename, sizeof(oc->filename), "%s", filename);        /* add the audio and video streams using the default format codecs     and initialize the codecs */    video_st0 = NULL;    audio_st0 = NULL;        if (fmt->video_codec != CODEC_ID_NONE) {       video_st0 = add_video_stream(oc, fmt->video_codec);    }    if (fmt->audio_codec != CODEC_ID_NONE) {        audio_st0 = add_audio_stream(oc, fmt->audio_codec);    }        av_dump_format(oc, 0, filename, 1);        /* now that all the parameters are set, we can open the audio and     video codecs and allocate the necessary encode buffers */    if (video_st0)        open_video(oc, video_st0);    if (audio_st0)        open_audio(oc, audio_st0);        /* open the output file, if needed */    if (!(fmt->flags & AVFMT_NOFILE)) {        if(avio_open(&oc->pb, filename, AVIO_FLAG_READ_WRITE)< 0)        {            NSLog(@"error open file");            return ;        }    }            /* write the stream header, if any *///    avformat_write_header(oc, NULL);//    //    for(;;) {//        /* compute current audio and video time *///        if (audio_st)//            audio_pts = (double)audio_st->pts.val * audio_st->time_base.num / audio_st->time_base.den;//        else//            audio_pts = 0.0;//        //        if (video_st)//            video_pts = (double)video_st->pts.val * video_st->time_base.num / video_st->time_base.den;//        else//            video_pts = 0.0;//        //        /* write interleaved audio and video frames *///        if (!video_st || (video_st && audio_st && audio_pts < video_pts)) {//           // write_audio_frame(oc, audio_st);//        } else {//         //   write_video_frame(oc, video_st);//        }//    }//    //    /* write the trailer, if any.  the trailer must be written//     * before you close the CodecContexts open when you wrote the//     * header; otherwise write_trailer may try to use memory that//     * was freed on av_codec_close() *///    av_write_trailer(oc);//    //    /* close each codec *///    if (video_st)//        close_video(oc, video_st);//    if (audio_st)//        close_audio(oc, audio_st);//    //    /* free the streams *///    for(i = 0; i < oc->nb_streams; i++) {//        av_freep(&oc->streams[i]->codec);//        av_freep(&oc->streams[i]);//    }//    //    av_free(oc);}

0 0
原创粉丝点击