来源于 原创 分类 ios 5.0 ffmpeg decode h264 stream

来源:互联网 发布:淘宝代销和分销的区别 编辑:程序博客网 时间:2024/06/06 13:17
i got a problem when useing ffmpeg to decode h264 stream . i nearly search all the website,but unfortunately ,nothing useful!
some guys solve this fuckingshit problem but they are so selffish that unwilling to share the experences with us confused people.
finally i fix it.wish this tutorial can help you. that's all.

如何在ios sdk5.0的环境下编译ffmpeg,请看我的这个帖子:http://www.cocoachina.com/bbs/read.php?tid=98563

友情提示,用真机,我测试是没问题的,iphone4/4s,ipad1/2都可以看视频,本人亲测!不能用的东西发出来死全家啊!
我在网上搜ffmpeg decode h264 stream,tmd居然搜不到,有的人试成功了,但是不把代码贴出来,太tmd可恶了!!!
好不容易搜到一个有点靠谱的帖子,大家可以看一下:http://bbs.chinavideo.org/viewthread.php?tid=12977


用到的头文件:

#include "libavformat/avformat.h"  //0407add
#include "libswscale/swscale.h"
#include "libavcodec/avcodec.h"

定义一下数据类型:
    //0407 for ffmpeg
    AVCodec            *pCodec;
    AVPacket           packet;
    AVCodecContext     *pCodecCtx;
    AVFrame            *pFrame;
    AVPicture          picture;
    struct SwsContext  *img_convert_ctx;

添加的函数:
//0407
-(void)initFFMPEG;
-(void)releaseFFMPEG;
-(void)convertFrameToRGB;
-(void)setupScaler;

方法的实现:
#pragma mark - ffmpeg functions

-(void)initFFMPEG
{
    // Register all formats and codecs
    //avcodec_init();
    av_register_all();
    av_init_packet(&packet);
    // Find the decoder for the 264
    pCodec=avcodec_find_decoder(CODEC_ID_H264);
    if(pCodec==NULL)
        goto initError; // Codec not found
    
    pCodecCtx = avcodec_alloc_context();
    // Open codec
    if(avcodec_open(pCodecCtx, pCodec) < 0)
        goto initError; // Could not open codec
    // Allocate video frame
    
    pFrame=avcodec_alloc_frame();
    
//    pCodecCtx->width = 640;
//    pCodecCtx->height = 480;
//    pCodecCtx->pix_fmt = PIX_FMT_YUV420P;
    NSLog(@"init success");
    return;
    
initError:
    //error action
    NSLog(@"init failed");
    return ;
}

-(void)releaseFFMPEG
{
    // Free scaler
    sws_freeContext(img_convert_ctx);    
    
    // Free RGB picture
    avpicture_free(&picture);
    
    // Free the YUV frame
    av_free(pFrame);
    
    // Close the codec
    if (pCodecCtx) avcodec_close(pCodecCtx);
}

-(void)setupScaler {
    
    // Release old picture and scaler
    avpicture_free(&picture);
    sws_freeContext(img_convert_ctx);    
    
    // Allocate RGB picture
    avpicture_alloc(&picture, PIX_FMT_RGB24,pCodecCtx->width,pCodecCtx->height);
    
    // Setup scaler
    static int sws_flags =  SWS_FAST_BILINEAR;
    img_convert_ctx = sws_getContext(pCodecCtx->width,
                                     pCodecCtx->height,
                                     pCodecCtx->pix_fmt,
                                     pCodecCtx->width,
                                     pCodecCtx->height,
                                     PIX_FMT_RGB24,
                                     sws_flags, NULL, NULL, NULL);
    
}

-(void)convertFrameToRGB
{    [self setupScaler];
    sws_scale (img_convert_ctx,pFrame->data, pFrame->linesize,
               0, pCodecCtx->height,
               picture.data, picture.linesize);    
}


下面是解码的函数,你只要把你收到的一桢桢的视频数据放在一个buf就可以了,也不反对你弄一个很大的buffer存多桢,控制好读写就行。

-(void)decodeAndShow : (char*) buf length:(int)len andTimeStamp:(unsigned long)ulTime
{
    if (m_bPlayStop == YES)
    {
        return;
    }
    //0407 add for ffmpeg decode
    packet.size = len;
    packet.data = (unsigned char *)buf;
    int got_picture_ptr=0;
    int nImageSize;
    nImageSize = avcodec_decode_video2(pCodecCtx,pFrame,&got_picture_ptr,&packet);
    NSLog(@"nImageSize:%d--got_picture_ptr:%d",nImageSize,got_picture_ptr);
    
    if (nImageSize > 0)
    {
        if (pFrame->data[0])
        {
            [self convertFrameToRGB];
            nWidth = pCodecCtx->width;
            nHeight = pCodecCtx->height;
            CGBitmapInfo bitmapInfo = kCGBitmapByteOrderDefault;
            CFDataRef data = CFDataCreateWithBytesNoCopy(kCFAllocatorDefault, picture.data[0], nWidth*nHeight*3,kCFAllocatorNull);
            CGDataProviderRef provider = CGDataProviderCreateWithCFData(data);
            CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
            
            CGImageRef cgImage = CGImageCreate(nWidth,
                                               nHeight,
                                               8,
                                               24,
                                               nWidth*3,
                                               colorSpace,
                                               bitmapInfo,
                                               provider,
                                               NULL,
                                               YES,
                                               kCGRenderingIntentDefault);
            CGColorSpaceRelease(colorSpace);
            //UIImage *image = [UIImage imageWithCGImage:cgImage];
            UIImage* image = [[UIImage alloc]initWithCGImage:cgImage];   //crespo modify 20111020
            CGImageRelease(cgImage);
            CGDataProviderRelease(provider);
            CFRelease(data);    
            [self timeIntervalControl:ulTime]; //add 0228
            [self performSelectorOnMainThread:@selector(updateView:) withObject:image waitUntilDone:YES];
            [image release];
        }
    }
    m_decodeFinish = YES;
    return;
}

原创粉丝点击