arm6410 ffmpeg+x264软编码+硬件编码 jrtplib发送

来源:互联网 发布:画图visio软件下载 编辑:程序博客网 时间:2024/06/14 23:02

从大三开始就开始做一些东西了,但是由于自己总是不善于总结,导致自己总是重复学习,看了一些大神的博客之后,觉得写博客确实可以促进学习,所以就总结一下最近和同学做的一个毕业设计。水平有限,如有错误,请大家指出这个项目算是同学的毕业设计吧,在总结了一些前人的经验之后,我觉得这个东西还是可以继续往下做的,例如做一些自动化监控,图像识别之类的。这些就是后话了。由于时间有限,一些代码没有详细注释或者作出解释,以后会慢慢添加的。


这个是整个工程的源代码一键make就可以了

当然前提是有FFMpeg库和x264库和jrtplib库

关于这几个库的编译,大家可以去百度一下,网上资料还是比较多的。

http://download.csdn.net/detail/wisha1989/5101634

(一) 图像的采集:

      图像采集使用的是v4l2 接口,在嵌入式linux里面自带了的,这一块没啥难度,就是网上摘抄的代码,写了两接口函数:

      

#ifndef V4L2CAPTURE_H#define V4L2CAPTURE_H#include <stdio.h>#include <stdlib.h>#include <string.h>#include <assert.h>#include <getopt.h>#include <fcntl.h>#include <unistd.h>#include <errno.h>#include <malloc.h>#include <sys/stat.h>#include <sys/types.h>#include <sys/time.h>#include <sys/mman.h>#include <sys/ioctl.h>#include <asm/types.h>#include <linux/videodev2.h>extern "C"{    #include"libavcodec/avcodec.h"    #include"libavformat/avformat.h"    #include"libswscale/swscale.h"    #include"libavdevice/avdevice.h"    #include"libavutil/avutil.h"    #include"libavfilter/avfilter.h"}#define CLEAR(x) memset (&(x), 0, sizeof (x))struct buffer{        void *       start;        size_t       length;};int  v4l2capture_init(int width ,int height ,const char* dev );//图象采集初始化函数,采集宽度、高度、设备路径int  v4l2capture(AVPacket *avpkt);                            //图象采集函数,使用的ffmpeg里面的数据结构AVPacket#endif // V4L2CAPTURE_H

以上就是采集模块的头文件(写得不是很规范啊,是不是应该用extern “C” 扩起来呢?),下面就是两个函数的实现,其实真的没啥新意,这类代码已经很多了

就是我的摄像头采集出来是yuv422打包格式的,但是根据h264标准,只能编码yuv420p格式的图像的,所以要转换一下(这个后面会提一下的)

static int              fd              = -1;struct buffer *         buffers         = NULL;static unsigned int     n_buffers       = 0;static unsigned long file_length;static unsigned char *file_name;void v4l2Capture_release();static int read_frame (AVPacket *avpkt){   struct v4l2_buffer buf;   unsigned int i;   CLEAR (buf);   buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;   buf.memory = V4L2_MEMORY_MMAP;   int ff = ioctl (fd, VIDIOC_DQBUF, &buf);  //缓存出列   if(ff<0)   fprintf(stderr,"failture = %d\n",ff);    assert (buf.index < n_buffers);   fprintf(stderr,"capture frame size: %d\n\n",buffers[buf.index].length);   uint8_t* tbuf = (uint8_t*)malloc(buffers[buf.index].length);   memcpy(tbuf,buffers[buf.index].start,buffers[buf.index].length);   avpkt->data = tbuf;   avpkt->size = buffers[buf.index].length;   ff=ioctl (fd, VIDIOC_QBUF, &buf); //缓存入列   if(ff<0)   printf("failture VIDIOC_QBUF\n");   return 1;}int v4l2capture(AVPacket *avpkt){    read_frame(avpkt);    return 0;}int v4l2capture_init(int width ,int height ,const char* dev ){    struct v4l2_capability cap;    struct v4l2_format fmt;    unsigned int i;    enum v4l2_buf_type type;     //////////////////////////////     ///////open video dev ////////     ////////////////////////////   fd = open(dev,O_RDWR); // open the dev with blocked    if(fd == -1)    {        fprintf(stderr,"open dev failed!\n");    }    else    {    fprintf(stderr,"open dev success! dev:%s\n\n",dev);    }    int ff=ioctl (fd, VIDIOC_QUERYCAP, &cap);//获取摄像头参数    if(ff<0)    fprintf(stderr,"failture VIDIOC_QUERYCAP\n");           struct v4l2_fmtdesc fmt1;            int ret;           memset(&fmt1, 0, sizeof(fmt1));           fmt1.index = 0;           fmt1.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;           //////////////////////////////           //////enum the supported ////           //////video    type     ////           ////////////////////////////           while ((ret = ioctl(fd, VIDIOC_ENUM_FMT, &fmt1)) == 0)           {               fmt1.index++;               fprintf(stderr,"{ pixelformat = '%c%c%c%c', description = '%s' }\n",                             fmt1.pixelformat & 0xFF, (fmt1.pixelformat >> 8) & 0xFF,                             (fmt1.pixelformat >> 16) & 0xFF, (fmt1.pixelformat >> 24) & 0xFF,                             fmt1.description);        }           //////////////////////////////           //////set the video type////           //////////////////////////// CLEAR (fmt); fmt.type                = V4L2_BUF_TYPE_VIDEO_CAPTURE; fmt.fmt.pix.width       = width; fmt.fmt.pix.height      = height; fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;//V4L2_PIX_FMT_YVU420;//V4L2_PIX_FMT_YUYV; fmt.fmt.pix.field       = V4L2_FIELD_INTERLACED; ff = ioctl (fd, VIDIOC_S_FMT, &fmt); //设置图像格式 if(ff<0) fprintf(stderr,"failture VIDIOC_S_FMT\n"); file_length = fmt.fmt.pix.bytesperline * fmt.fmt.pix.height; //计算图片大小 struct v4l2_requestbuffers req; CLEAR (req); req.count               = 5; req.type                = V4L2_BUF_TYPE_VIDEO_CAPTURE; req.memory              = V4L2_MEMORY_MMAP; ioctl (fd, VIDIOC_REQBUFS, &req); //申请缓冲,count是申请的数量 if(ff<0) fprintf(stderr,"failture VIDIOC_REQBUFS\n"); if (req.count < 1) fprintf(stderr,"Insufficient buffer memory\n"); buffers = (buffer*)calloc (req.count, sizeof (*buffers));//内存中建立对应空间 for (n_buffers = 0; n_buffers < req.count; ++n_buffers){     struct v4l2_buffer buf;   //驱动中的一帧     CLEAR (buf);     buf.type        = V4L2_BUF_TYPE_VIDEO_CAPTURE;     buf.memory      = V4L2_MEMORY_MMAP;     buf.index       = n_buffers;     if (-1 == ioctl (fd, VIDIOC_QUERYBUF, &buf)) //映射用户空间      printf ("VIDIOC_QUERYBUF error\n");     buffers[n_buffers].length = buf.length;     buffers[n_buffers].start =     mmap (NULL /* start anywhere */,    //通过mmap建立映射关系      buf.length,      PROT_READ | PROT_WRITE /* required */,      MAP_SHARED /* recommended */,      fd, buf.m.offset);     if (MAP_FAILED == buffers[n_buffers].start)        fprintf (stderr,"mmap failed\n");            } for (i = 0; i < n_buffers; ++i) {    struct v4l2_buffer buf;    CLEAR (buf);    buf.type        = V4L2_BUF_TYPE_VIDEO_CAPTURE;    buf.memory      = V4L2_MEMORY_MMAP;    buf.index       = i;    if (-1 == ioctl (fd, VIDIOC_QBUF, &buf))//申请到的缓冲进入列队     fprintf (stderr,"VIDIOC_QBUF failed\n"); } enum v4l2_buf_type v4l2type = V4L2_BUF_TYPE_VIDEO_CAPTURE; int fret = ioctl (fd, VIDIOC_STREAMON, &v4l2type);}

以上就是采集部分。


(二)编码部分(软编码)

   编码部分采用了软编码和硬编码。软编码部分采用了ffmpeg+x264的方式进行编码的,因为我只用过ffmpeg ,对x264并不熟悉,所以还是使用ffmpeg进行编码,但是与此同时又遇到了一些问题,最明显的问题就是调整ffmpeg  AVCodecContext 里面的参数的时候,并不一定能够实现参数的改变,因为ffmpeg 在调用x264的函数的时候,对参数进行了检测,如果参数检测没有通过的话,就会使用默认参数,这个就是一个比较头痛的问题,如果你真的想了解h264的话,建议还是深入了解一下x264的参数。但是我是求速度完成项目的,就对x264的一些源代码进行了修改,完成了参数的一些调整(感觉有点暴力哈)。

由于代码结构并不是很好,所以软编码部分可能有点乱,总共分为以下几个函数

#ifndef CAPTUREANDCOMPRESS_H#define CAPTUREANDCOMPRESS_Hextern "C"{    #include"libavcodec/avcodec.h"    #include"libavformat/avformat.h"    #include"libswscale/swscale.h"    #include"libavdevice/avdevice.h"    #include"libavutil/avutil.h"    #include"libavfilter/avfilter.h"}void codec_init();void *capture_video(void*);     //采集线程函数void captureAndCompress();      //采集和编码函数void *compress_video(void *);   //编码线程函数#endif // CAPTUREANDCOMPRESS_H


以下是上面函数的实现:


#include"captureAndcompress.h"#include"sys/stat.h"#include"rtpsend.h"#include"v4l2capture.h"#include"mfcapi.h"//#include <sys/wait.h>#include"time.h"extern "C"{    #include"pthread.h"    #include"semaphore.h"    #include"unistd.h"}#define PICWIDTH   320#define PICHEIGHT  240#define buf_size 5clock_t switch_time_t1, switch_time_t2;AVCodec *codec;AVCodecContext *c= NULL;AVFrame *picture;uint8_t *outbuf, *picture_buf;uint32_t outbuf_size = 0;FILE *f = NULL;FILE *fdebug = NULL;sem_t emptyFrameSpace;sem_t fullFrameSpace;pthread_mutex_t mutex;struct SwsContext   *color_convert_ctx ;static void *enc_handle;static int  enc_frame_cnt = 0;unsigned char** yuv422BUf;void thread_debug(char* content,int d){    if(!fdebug)    fdebug = fopen("debugdata.txt","wb");    fprintf(fdebug,"%s%d\n",content,d);}static void yuv422BUf_init(){    yuv422BUf = (unsigned char**)malloc(buf_size*(sizeof(unsigned char**)));    int i;    for(i = 0; i< buf_size;i++)    {        yuv422BUf[i] = (unsigned char*)malloc(PICWIDTH * PICHEIGHT * 3  * sizeof(unsigned char));    }}
static void YUV422_to_I420(unsigned char *yuv422,unsigned char *I420[],unsigned int width,unsigned int height){    unsigned char *data[2];     int ylinesize[4];     int ilinesize[4];     clock_t t1,t2;     t1 = clock();    ylinesize[0] = width  *  2;    ylinesize[1] = width  /  2;    ylinesize[2] = height /  2;    ilinesize[0] = width;    ilinesize[1] = width / 2;    ilinesize[2] = width / 2;    data[0] = yuv422;    if(color_convert_ctx)    {        sws_scale(color_convert_ctx,data,ylinesize,0,height,I420,ilinesize);       // data[1] = I420[2];       // I420[2] = I420[1];       // I420[1] = data[1];    }    else    {        fprintf(stderr,"yuv422 to yuv420 failed!\n\n");    }t2 = clock();fprintf(stderr,"yuv422 to yuv420 time cost: %d ms\n\n",(t2-t1));}void codec_init(){  //  thread_debug("HAHA",10);    rtp_params_init();    avcodec_init();    av_register_all();    /**********ffmpeg-init***********/    codec = avcodec_find_encoder(CODEC_ID_H264);    c = avcodec_alloc_context();    picture = avcodec_alloc_frame();    int br = 2*1000;    c->rc_lookahead = 0;  // can not set the true param    c->bit_rate = br;    c->rc_min_rate =br;                   c->rc_max_rate = br;                   c->bit_rate_tolerance = br;                  // c->rc_buffer_size=br;                   c->rc_initial_buffer_occupancy = c->rc_buffer_size*3/4;                   c->rc_buffer_aggressivity= (float)1.0;                   c->rc_initial_cplx= 0.5;             c->codec_type =  AVMEDIA_TYPE_VIDEO;             c->dct_algo = 0;                       c->me_pre_cmp=2;                       c->cqp = 30;                       c->me_method =7;                       c->qmin = 3;                       c->qmax = 31;                       c->max_qdiff = 3;                       c->qcompress = 0.3; //important param                       c->qblur = 0.3;                       c->nsse_weight = 8;                       c->i_quant_factor = (float)0.8;                       c->b_quant_factor = 1.25;                       c->b_quant_offset = 1.25;   // c->bit_rate_tolerance = 9000*1000;    c->width  = PICWIDTH;    c->height = PICHEIGHT;    //c->max_b_frames = 0;    c->time_base.num = 1;    c->time_base.den = 10;   // c->flags2 = c->flags2&!CODEC_FLAG2_MBTREE;    c->gop_size = 10;    c->max_b_frames = 0;    c->pix_fmt = PIX_FMT_YUV420P;    if(avcodec_open(c,codec)<0)    {        fprintf(stderr,"open encoder fialed!\n");        exit(1);    }    enc_handle = mfc_encoder_init(PICWIDTH, PICHEIGHT, 25, 1000, 20);    v4l2capture_init(PICWIDTH,PICHEIGHT,"/dev/video2");    yuv422BUf_init();    outbuf_size = 10000000;    outbuf = (uint8_t*)av_malloc(outbuf_size);    int size = c->width * c->height;    picture_buf = (uint8_t*)av_malloc((size*3)/2);    picture->data[0] = picture_buf;    picture->data[1] = picture->data[0] + size;    picture->data[2] = picture->data[1] + size / 4;    picture->linesize[0] = c->width;    picture->linesize[1] = c->width / 2;    picture->linesize[2] = c->width / 2;    color_convert_ctx =   sws_getContext(   PICWIDTH,                                            PICHEIGHT,                                            PIX_FMT_YUYV422,                                            PICWIDTH, PICHEIGHT,                                            PIX_FMT_YUV420P,                                            SWS_FAST_BILINEAR,                                            NULL, NULL, NULL  );    sem_init(&emptyFrameSpace,0,buf_size);    sem_init(&fullFrameSpace,0,0);    pthread_mutex_init(&mutex,NULL);}void *capture_video(void *){    int i = 0;    while(1)    {                 sem_wait(&emptyFrameSpace);        pthread_mutex_lock(&mutex);clock_t pre;        switch_time_t1 = pre = clock();fprintf(stderr,"switch time cost:= %d us\n",abs(switch_time_t1 - switch_time_t2));         AVPacket avpkt;         v4l2capture(&avpkt);         i =  i%buf_size;         memcpy(yuv422BUf[i],avpkt.data,avpkt.size);         free(avpkt.data);         avpkt.size = 0;           i++;          clock_t aft = clock();  fprintf(stderr,"capture time cost:%d\n",(aft-pre)/1000);         pthread_mutex_unlock(&mutex);         sem_post(&fullFrameSpace);    }}void *compress_video(void*){    int i = 0;    int out_size;       clock_t t1,t2,t3;     while(1)    {        sem_wait(&fullFrameSpace);        pthread_mutex_lock(&mutex);switch_time_t2 = t1 = clock();fprintf(stderr,"switch time cost:= %d us\n",abs(switch_time_t1 - switch_time_t2));        i = i%buf_size;               YUV422_to_I420(yuv422BUf[i],picture->data,PICWIDTH,PICHEIGHT);      //yuv422打包格式转换为yuv420平面格式              out_size = avcodec_encode_video(c,outbuf, outbuf_size, picture);    //ffmpeg编码函数       //fprintf(stderr,"encode frame size: %d\n",out_size);        picture->pts++;        t2 = clock();fprintf(stderr,"encode time cost: %d\n\n",(t2 - t1)/1000);           nal_rtp_send((unsigned char*)outbuf,out_size );                       //编码结束之后用rtp发送              i++;t3 = clock();fprintf(stderr,"send time cost:%d\n",(t3-t2)/1000);        pthread_mutex_unlock(&mutex);        sem_post(&emptyFrameSpace);           }     }void captureAndCompress(){    pthread_t id1,id2;    codec_init();    if(pthread_create(&id1,NULL,capture_video,NULL))    {        fprintf(stderr,"creat capture thread failed!\n");    }    if(pthread_create(&id2,NULL,compress_video,NULL))    {        fprintf(stderr,"creat,compress thread failed!\n");    }    pthread_join(id1,NULL);    pthread_join(id2,NULL);  }


以上就是软编码部分,其实编码部分就包含了rtp发送的函数在里面          nal_rtp_send((unsigned char*)outbuf,out_size ); 以上这一段代码其实就算是整个程序的主程序代码了。代码结构有点混乱。。。


(三)rtp发送部分

rtp的发送采用了jrtplib(3.9.1) 这个开源库,是用C++写的,而且使用cmake编译系统,所以交叉编译有点麻烦,但是用起来还是比较简单的。


#ifndef RTPSEND_H#define RTPSEND_H#include "jrtplib3/rtpsession.h"#include "jrtplib3/rtpudpv4transmitter.h"#include "jrtplib3/rtpipv4address.h"#include "jrtplib3/rtpsessionparams.h"#include "jrtplib3/rtperrors.h"#include<time.h>#include <stdlib.h>#include <stdio.h>#include <iostream>#include <string>#define H264                    96#define MAX_RTP_PKT_LENGTH     1350using namespace jrtplib;int rtp_params_init();void rtp_send(unsigned char *buf, int len);void nal_rtp_send(unsigned char* buf, int len);#endif // RTPSEND_H



以下是函数实现代码:


#include"rtpsend.h"static RTPSession                   sess;static RTPUDPv4TransmissionParams   transparams;static RTPSessionParams             sessparams;static uint                         portbase,destport;static uint                         destip;static std::string                  ipstr;static int                          status,i,num;static int                          timeStampInc = 90000 / 20;static long long                    currentTime;static int                          sendFrameCount = 0;static void checkerror(int err);unsigned char*                      nalBuffer;unsigned char*                      sendBuffer;static  int  get_next_nalu(unsigned char* buf,int buf_len,unsigned char* outbuf);static  bool find_start_code(unsigned char*buf);int rtp_params_init(){    ipstr = "192.168.0.1";    destport = 9000;    portbase = 8000;    num = 0;    destip = inet_addr(ipstr.c_str());    if (destip == INADDR_NONE)    {        std::cerr << "Bad IP address specified" << std::endl;        return -1;    }       destip = ntohl(destip);       sessparams.SetOwnTimestampUnit((double)(1.0f/90000.0f));       //sessparams.SetAcceptOwnPackets(true);       transparams.SetPortbase(portbase);       fprintf(stderr,"SetPortbass\n");       status = sess.Create(sessparams,&transparams);       fprintf(stderr, "Creat session\n");       checkerror(status);       RTPIPv4Address addr(destip,destport);      status = sess.AddDestination(addr);       fprintf(stderr,"Add to Destination\n");       checkerror(status);       currentTime = clock();       sendBuffer = (unsigned char*)malloc(1000*1000);       nalBuffer = (unsigned char*)malloc(1000*1000);}void rtp_send(unsigned char *buf, int len){    int n    = len / MAX_RTP_PKT_LENGTH;    int last = len % MAX_RTP_PKT_LENGTH;    if(last > 0) n++;    int timeInc ;    //currentTime += timeStampInc;    //fprintf(stderr,"buf len = \n");    if(len>0)fprintf(stderr,"send count:\n");    char nalHead = buf[0];    if(len < MAX_RTP_PKT_LENGTH)    {        status = sess.SendPacket((void*)buf,len,H264,true,timeStampInc);    }    else    {       for(int i = 0; i < n;i++)       {            sendBuffer[0] = (nalHead & 0x60)|28;            sendBuffer[1] = (nalHead & 0x1f);                            if(0 == i)                {                       timeInc = timeStampInc;                       sendBuffer[1] |= 0x80;                       memcpy(&sendBuffer[2],&buf[i*MAX_RTP_PKT_LENGTH],MAX_RTP_PKT_LENGTH);                       status = sess.SendPacket((void*)sendBuffer,MAX_RTP_PKT_LENGTH+2,H264,false,timeInc);                }            else if(i == n - 1) //send the last subpacket            {               timeInc = 0;               sendBuffer[1] |= 0x40;               memcpy(&sendBuffer[2],&buf[i*MAX_RTP_PKT_LENGTH],last);               status = sess.SendPacket((void*)sendBuffer,last +2 ,H264,true,timeInc);            }            else            {                timeInc = 0;               memcpy(&sendBuffer[2],&buf[i*MAX_RTP_PKT_LENGTH],MAX_RTP_PKT_LENGTH);               status = sess.SendPacket((void*)sendBuffer,MAX_RTP_PKT_LENGTH+2 ,H264,false,timeInc);            }            checkerror(status);                //sess.OnPollThreadStart();    //        free(tempBuf);                 status = sess.Poll();    //        checkerror(status);       }    }}static void checkerror(int err){  if (err < 0) {  const  char* errstr = RTPGetErrorString(err).c_str();    printf("Error:%s//n", errstr);    exit(-1);  }}void nal_rtp_send(unsigned char* buf, int len){    int pos = 0;    while(pos < len)    {        int nalLen = get_next_nalu(&buf[pos],len - pos,nalBuffer);        rtp_send(nalBuffer + 4,nalLen - 4);                         //drop the start code 00 00 00 01        pos += nalLen;    }}static int get_next_nalu(unsigned char* buf,int buf_len,unsigned char* outbuf){    int pos = 0;    bool findStartcode;    if(buf_len > 4)    {        findStartcode = find_start_code(&buf[pos]);    }    else return 0 ;    if(findStartcode)    {        do        {                    outbuf[pos] = buf[pos];                    pos++;                    findStartcode = find_start_code(&buf[pos]);        }while(!findStartcode && (pos + 4 < buf_len));    }    if(findStartcode)    {        return pos ;    }    else if(pos + 4 >= buf_len)    {        do{outbuf[pos] = buf[pos];pos++;}while(pos < buf_len);        return buf_len;    }    else return 0;}static bool find_start_code(unsigned char*buf){  if( (buf[0] == 0)&&      (buf[1] == 0)&&      (buf[2] == 0)&&      (buf[3] == 1)          )  {    return true;  }  else      return false;}


(四)硬编码函数裁减:

硬编码部分来自于三星公司提供的资料,里面有各种格式的编解码api,我提取了其中h264编码的部分,添加到我的项目中。


其实就是抽象出一下几个函数:


#ifndef __SAMSUNG_SYSLSI_APDEV_CAM_ENC_DEC_TEST_H__#define __SAMSUNG_SYSLSI_APDEV_CAM_ENC_DEC_TEST_H__#ifdef __cplusplusextern "C" {#endifvoid *mfc_encoder_init(int width, int height, int frame_rate, int bitrate, int gop_num);void *mfc_encoder_exe(void *handle, unsigned char *yuv_buf, int frame_size, int first_frame, long *size);void mfc_encoder_free(void *handle);#ifdef __cplusplus}#endif#endif /* __SAMSUNG_SYSLSI_APDEV_CAM_ENC_DEC_TEST_H__ */


以下是代码实现:

#include <stdio.h>#include <string.h>#include <stdlib.h>#include <fcntl.h>#include <unistd.h>#include <ctype.h>#include <errno.h>#include <signal.h>#include <sys/mman.h>#include <sys/time.h>#include <sys/ioctl.h>#include <pthread.h>#include <linux/videodev2.h>#include <semaphore.h>#include "s3c_pp.h"#include "SsbSipH264Encode.h"#include "LogMsg.h"#include "performance.h"#include "MfcDriver.h"#include "mfcapi.h"/*#define PP_DEV_NAME"/dev/s3c-pp"#define FB1_WIDTH400#define FB1_HEIGHT480#define FB1_BPP16#define FB1_COLOR_SPACERGB16#define SAMSUNG_UXGA_S5K3BA/***************** MFC *******************///static void *enc_handle, *dec_handle;//static int  enc_frame_cnt, dec_frame_cnt;/* MFC functions *///static void *mfc_encoder_init(int width, int height, int frame_rate, int bitrate, int gop_num);//static void *mfc_encoder_exe(void *handle, unsigned char *yuv_buf, int frame_size, int first_frame, long *size);//static void  mfc_encoder_free(void *handle);/***************** etc *******************///#define SHARED_BUF_NUM5//#define MFC_LINE_BUF_SIZE_PER_INSTANCE(204800)/***************** MFC driver function *****************/void *mfc_encoder_init(int width, int height, int frame_rate, int bitrate, int gop_num){intframe_size;void*handle;intret;frame_size= (width * height * 3) >> 1;handle = SsbSipH264EncodeInit(width, height, frame_rate, bitrate, gop_num);if (handle == NULL) {LOG_MSG(LOG_ERROR, "Test_Encoder", "SsbSipH264EncodeInit Failed\n");return NULL;}ret = SsbSipH264EncodeExe(handle);return handle;}void *mfc_encoder_exe(void *handle, unsigned char *yuv_buf, int frame_size, int first_frame, long *size){unsigned char*p_inbuf, *p_outbuf;inthdr_size;intret;p_inbuf = SsbSipH264EncodeGetInBuf(handle, 0);memcpy(p_inbuf, yuv_buf, frame_size);ret = SsbSipH264EncodeExe(handle);if (first_frame) {SsbSipH264EncodeGetConfig(handle, H264_ENC_GETCONF_HEADER_SIZE, &hdr_size);//printf("Header Size : %d\n", hdr_size);}p_outbuf = SsbSipH264EncodeGetOutBuf(handle, size);return p_outbuf;}void mfc_encoder_free(void *handle){SsbSipH264EncodeDeInit(handle);}
其他的就是三星自己的资料了,可以自己去看看。
调用这个api的时候(是在上面软编码 的 编码线程函数部分,替换掉ffmpeg编码函数):
void *compress_video(void*){    int i = 0;    int out_size;       clock_t t1,t2,t3;     while(1)    {        sem_wait(&fullFrameSpace);        pthread_mutex_lock(&mutex);switch_time_t2 = t1 = clock();fprintf(stderr,"switch time cost:= %d us\n",abs(switch_time_t1 - switch_time_t2));        i = i%buf_size;               YUV422_to_I420(yuv422BUf[i],picture->data,PICWIDTH,PICHEIGHT);              //out_size = avcodec_encode_video(c,outbuf, outbuf_size, picture);        long oubuf_size_hard = 0;if(enc_frame_cnt%20 == 0){outbuf = (uint8_t*)mfc_encoder_exe(enc_handle, picture->data[0], PICWIDTH*PICHEIGHT*3/2, 1, &oubuf_size_hard);}else{outbuf = (uint8_t*)mfc_encoder_exe(enc_handle, picture->data[0], PICWIDTH*PICHEIGHT*3/2, 0, &oubuf_size_hard);}enc_frame_cnt++;//fprintf(stderr,"encode frame size: %d\n",out_size);        picture->pts++;        t2 = clock();fprintf(stderr,"encode time cost: %d\n\n",(t2 - t1)/1000);           nal_rtp_send((unsigned char*)outbuf,oubuf_size_hard);              i++;t3 = clock();fprintf(stderr,"send time cost:%d\n",(t3-t2)/1000);        pthread_mutex_unlock(&mutex);        sem_post(&emptyFrameSpace);usleep(1000*10);           }     }

这个就这样了,有空再补充,详细一点。






	
				
		
原创粉丝点击