FFMPEG SDK 快速截取I帧
来源:互联网 发布:java实现base64加密 编辑:程序博客网 时间:2024/06/03 23:46
主要流程 :
根据输入的时间,向前seek到最近的I帧,读取I帧,解码,将得到的I帧写为JPEG保存。
主要接口:
av_seek_frame();
av_read_frame();
avcodec_decode_video2();
注意问题:
1.输入的时间戳单位是秒,需要转换成ffmpeg的时间戳,输入到av_seek_frame(),主要使用av_rescale()实现转换。
2.ffmpeg解码另开线程,所以decode不是同步返回结果,代码中使用while(){sleep()}的逻辑获取解码结果。
3.解码得到的frame需要先转换成YUVJ420p,利用sws_scale()实现,转换成YUV420p存在color_range的问题,导致输出的jpeg图像与原图像存在对比度差异,详见: 点击打开链接
//// main.cpp// test_keyframe//// Created by shiyao.xsy on 16/1/26.// Copyright © 2016年 rq. All rights reserved.//#include <iostream>#include <stdio.h>extern "C" {#include "libavformat/avformat.h"#include "libswscale/swscale.h"#include "libavcodec/avcodec.h"#include "libavutil/time.h"}int SavetoJPEG(AVFrame *pFrameYUV,AVStream *pVStream,char *filepath,int width, int height){ AVFormatContext* pFormatCtx; AVOutputFormat* fmt; AVStream* video_st; AVCodecContext* pCodecCtx; AVCodec* pCodec; uint8_t* picture_buf; AVFrame* picture; AVPacket pkt; int y_size; int got_picture=0; int size; int ret=0; int in_w=width,in_h=height; //YUV's width and height char* out_file = filepath; //Output file //Method 1 pFormatCtx = avformat_alloc_context(); //Guess format fmt = av_guess_format("mjpeg", NULL, NULL); pFormatCtx->oformat = fmt; //Output URL if (avio_open(&pFormatCtx->pb,out_file, AVIO_FLAG_READ_WRITE) < 0){ printf("Couldn't open output file."); return -1; } //Method 2. More simple //avformat_alloc_output_context2(&pFormatCtx, NULL, NULL, out_file); //fmt = pFormatCtx->oformat; video_st = avformat_new_stream(pFormatCtx, 0); video_st->time_base.num = pVStream->time_base.num; video_st->time_base.den = pVStream->time_base.den; if (video_st==NULL){ return -1; } pCodecCtx = video_st->codec; pCodecCtx->codec_id = fmt->video_codec; pCodecCtx->codec_type = AVMEDIA_TYPE_VIDEO; pCodecCtx->pix_fmt = PIX_FMT_YUVJ420P; pCodecCtx->width = in_w; pCodecCtx->height = in_h; pCodecCtx->time_base.num = pVStream->codec->time_base.num; pCodecCtx->time_base.den = pVStream->codec->time_base.den; //Output some information av_dump_format(pFormatCtx, 0, out_file, 1); pCodec = avcodec_find_encoder(pCodecCtx->codec_id); if (!pCodec){ printf("Codec not found."); return -1; } if (avcodec_open2(pCodecCtx, pCodec,NULL) < 0){ printf("Could not open codec."); return -1; } pCodecCtx->qmin = pCodecCtx->qmax = 3; pCodecCtx->flags|=CODEC_FLAG_QSCALE; picture = av_frame_alloc(); size = avpicture_get_size(pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height); picture_buf = (uint8_t *)av_malloc(size); if (!picture_buf) { return -1; } avpicture_fill((AVPicture *)picture, picture_buf, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height); //Write Header avformat_write_header(pFormatCtx,NULL); y_size = pCodecCtx->width * pCodecCtx->height; av_new_packet(&pkt,y_size*3); picture->data[0] = pFrameYUV->data[0];// Y picture->data[1] = pFrameYUV->data[1];// U picture->data[2] = pFrameYUV->data[2];// V picture->width = in_w; picture->height = in_h; picture->format = PIX_FMT_YUVJ420P; picture->pts = 0; //Encode ret = avcodec_encode_video2(pCodecCtx, &pkt,picture, &got_picture); if(ret < 0){ printf("Encode Error.\n"); return -1; } if (got_picture==1){ pkt.stream_index = video_st->index; ret = av_write_frame(pFormatCtx, &pkt); } av_packet_unref(&pkt); //Write Trailer av_write_trailer(pFormatCtx); printf("Encode Successful.\n"); if (video_st){ avcodec_close(video_st->codec); av_free(picture); av_free(picture_buf); } avio_close(pFormatCtx->pb); avformat_free_context(pFormatCtx); return 0;}//void SaveYUV(AVFrame *pFrame, int width, int height, int iFrame) {// FILE *pFile;// char szFilename[32];// int y;//// // Open file// sprintf(szFilename, "/Users/shiyaoxsy/frame%d.yuv", iFrame);// pFile=fopen(szFilename, "wb+");// if(pFile==NULL)// return;// // Write data// int y_size = width * height;// fwrite(pFrame->data[0], 1, y_size, pFile);// fwrite(pFrame->data[1], 1, y_size/4, pFile);// fwrite(pFrame->data[2], 1, y_size/4, pFile);//// // Close file// fclose(pFile);//}int main(int argc, char * argv[]) { if(argc != 4){ fprintf(stderr, "usage: %s timestamp input_file video_output_file \n",argv[0]); return -1; } AVFormatContext *pFormatCtx; int i, videoindex,PictureSize; AVCodecContext *pCodecCtx; AVCodec *pCodec; AVFrame *pFrame,*pFrameRGB; AVPacket packet; int ret, got_picture; char *filepath = argv[2]; int64_t time_,time_total; struct SwsContext *pSwsCtx; uint8_t *outBuff; time_total = av_gettime_relative(); av_register_all(); avformat_network_init(); pFormatCtx = avformat_alloc_context(); time_ = av_gettime_relative(); if(avformat_open_input(&pFormatCtx,filepath,NULL,NULL)!=0){ printf("Couldn't open input stream.\n"); return -1; } if(avformat_find_stream_info(pFormatCtx,NULL)<0){ printf("Couldn't find stream information.\n"); return -1; } videoindex=-1; for(i=0; i<pFormatCtx->nb_streams; i++){ if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO){ videoindex=i; break; } } if(videoindex == -1){ printf("Didn't find a video stream.\n"); return -1; } pCodecCtx=pFormatCtx->streams[videoindex]->codec; pCodec=avcodec_find_decoder(pCodecCtx->codec_id); if(pCodec==NULL){ printf("Codec not found.\n"); return -1; } if(avcodec_open2(pCodecCtx, pCodec,NULL)<0){ printf("Could not open codec.\n"); return -1; } pFrame=av_frame_alloc(); pFrameRGB=av_frame_alloc(); if( pFrame == NULL || pFrameRGB == NULL) { printf("avframe malloc failed!\n"); return -1; } PictureSize = avpicture_get_size(PIX_FMT_YUVJ420P, pCodecCtx->width, pCodecCtx->height); outBuff = (uint8_t*)av_malloc(PictureSize*sizeof(uint8_t)); if(outBuff == NULL){ printf("av malloc failed!\n"); return -1; } avpicture_fill((AVPicture *)pFrameRGB, outBuff, PIX_FMT_YUVJ420P, pCodecCtx->width, pCodecCtx->height); pSwsCtx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, PIX_FMT_YUVJ420P, SWS_BICUBIC, NULL, NULL, NULL); time_ = av_gettime_relative() - time_; printf("open file and find info, cost time: %0.3fs\n",time_/1000000.0); time_ = av_gettime_relative(); int64_t timestamp = atoi(argv[1]); timestamp = av_rescale(timestamp, pFormatCtx->streams[videoindex]->time_base.den, (int64_t)pFormatCtx->streams[videoindex]->time_base.num); av_seek_frame(pFormatCtx, videoindex, timestamp, AVSEEK_FLAG_BACKWARD); avcodec_flush_buffers(pFormatCtx->streams[videoindex]->codec); time_ = av_gettime_relative()-time_; printf("seek frame, costs time: %0.3fs\n",time_ / 1000000.0); AVPacket avpkt; av_init_packet(&avpkt); avpkt.data = NULL; avpkt.size = 0; time_ = av_gettime_relative(); while (av_read_frame(pFormatCtx, &packet) >=0 ) { if(packet.stream_index == videoindex ){ if(packet.flags){ if((ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, &packet))<0){ printf("Decode Error!\n"); return -1; } while(got_picture==0){ sleep(0.01); if((ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, &avpkt))<0){ printf("Decode Error!\n"); return -1; } } if(got_picture){ time_ = av_gettime_relative()-time_; printf("read and decode frame, costs time: %0.3fs\n",time_/1000000.0); time_ = av_gettime_relative(); sws_scale(pSwsCtx, (uint8_t const * const *)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameRGB->data, pFrameRGB->linesize); if (SavetoJPEG(pFrameRGB,pFormatCtx->streams[videoindex],argv[3],pCodecCtx->width,pCodecCtx->height)<0) { printf("Write Image Error!\n"); return -1; } time_ = av_gettime_relative()-time_; printf("write frame, costs time: %0.3fs\n",time_/1000000.0); av_packet_unref(&packet); break; } } } av_packet_unref(&packet); } sws_freeContext(pSwsCtx); av_packet_unref(&avpkt); av_free(outBuff); av_free(pFrameRGB); av_free(pFrame); avcodec_close(pCodecCtx); avformat_close_input(&pFormatCtx); time_total = av_gettime_relative()-time_total; printf("all done, costs time: %0.3fs\n",time_total/1000000.0); return 0;}
0 0
- FFMPEG SDK 快速截取I帧
- FFMPEG SDK 快速截取I帧
- 基于ffmpeg截取视频帧画面
- 基于ffmpeg截取视频帧画面
- 基于ffmpeg截取视频帧画面
- FFmpeg 入门(1):截取视频帧
- 使用ffmpeg生成I帧
- Windows下FFmpeg快速入门sdk下载以及ffmpeg库开发
- Windows下FFmpeg快速入门sdk下载以及ffmpeg库开发
- FFmpeg截取视频
- ffmpeg 截取视频
- 运用ffmpeg截取音频
- 使用ffmpeg从视频中截取图像帧
- 截取视频第一帧——使用ffmpeg
- 使用ffmpeg从视频中截取图像帧
- ffmpeg开发--ffmpeg sdk下载
- ffmpeg 多张图片转视频,视频转gif,截取视频,截取视频里的帧
- 使用 ffmpeg 截取视频文件图片
- 【CodeVS】1073 家族
- java后台框架 springmvc mybatis(oracle 和 mysql数据库)
- 递归栈帧分析
- 四种常见的 POST 提交数据方式
- 使用js实现“别踩白块儿”游戏
- FFMPEG SDK 快速截取I帧
- 三角形测试用例
- yum---Shell前端软件包管理器
- 新的开始!
- 返回日期相关的
- Android 实时监听网络状态
- Sass的安装与使用经验
- TotoiseSVN的基本使用方法
- spring+springMVC+mybatis整合框架增删改查demo(包含资源下载链接)