IOS 硬件GPU解码
来源:互联网 发布:alphago用的什么算法 编辑:程序博客网 时间:2024/05/17 08:39
导入框架和头文件#import <VideoToolbox/VideoToolbox.h>
{//声明的全局变量 NSData *spsData ; NSData *ppsData; uint8_t pFrameData[BUFFER_SIZE]; CMVideoFormatDescriptionRef videoFormatDescription ; VTDecompressionSessionRef decompressionSession; VTDecompressionOutputCallback decompressionSessionDecodeFrameCallback;}-(void)createDecompSession{ VTDecompressionOutputCallbackRecord callBackRecord; callBackRecord.decompressionOutputCallback = decompressionSessionDecodeFrameCallback; callBackRecord.decompressionOutputRefCon = (__bridge void *)self; //attr是传递给decode session的属性词典,两种方式// NSDictionary *destinationImageBufferAttributes =[NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithBool:NO],(id)kCVPixelBufferOpenGLESCompatibilityKey,// [NSNumber numberWithInt:kCVPixelFormatType_24RGB],(id)kCVPixelBufferPixelFormatTypeKey,nil]; //kCVPixelFormatType_420YpCbCr8BiPlanarFullRange,kCVPixelFormatType_24RGB //(__bridge CFDictionaryRef)(destinationImageBufferAttributes) //attr是传递给decode session的属性词典 CFDictionaryRef attrs = NULL; //kCVPixelBufferPixelFormatTypeKey,指定解码后的图像格式,必须指定成NV12,苹果的硬解码器只支持NV12。 const void *keys[] = { kCVPixelBufferPixelFormatTypeKey }; // kCVPixelFormatType_420YpCbCr8Planar is YUV420 // kCVPixelFormatType_420YpCbCr8BiPlanarFullRange is NV12 uint32_t v = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange; const void *values[] = { CFNumberCreate(NULL, kCFNumberSInt32Type, &v) }; attrs = CFDictionaryCreate(NULL, keys, values, 1, NULL, NULL); //创建解码session OSStatus status = VTDecompressionSessionCreate(kCFAllocatorDefault, videoFormatDescription, NULL, attrs , // attrs, // NULL &callBackRecord, &decompressionSession);}-(void)decodeData:(char*)pFromeData Leng:(int)length{ //去掉开始码,提取sps和pps int naluType = ((uint8_t)pFromeData[4]&0x1f);// for (int i=0; i<length; i++) {// printf("%c",pFromeData[i]);// } if ((naluType==7||naluType==8)&&videoFormatDescription ==NULL) { if (naluType==7) { spsData = [NSData dataWithBytes:pFromeData + 4 length:length-4]; } if (naluType==8) { ppsData = [NSData dataWithBytes:pFromeData+4 length:length-4]; } } if(ppsData!=nil&&spsData!=nil){ const uint8_t * const parameterSetPointers[2] = { (const uint8_t*)[spsData bytes], (const uint8_t*)[ppsData bytes] }; const size_t parameterSetSizes[2] = { spsData.length, ppsData.length }; CMVideoFormatDescriptionRef formatDesc = NULL; //使用CMVideoFormatDescriptionCreateFromH264ParameterSets函数来构建CMVideoFormatDescriptionRef OSStatus formatCreateResult = CMVideoFormatDescriptionCreateFromH264ParameterSets(kCFAllocatorDefault, 2, parameterSetPointers, parameterSetSizes, 4, &formatDesc); if (formatCreateResult == noErr) { videoFormatDescription = formatDesc; if (decompressionSession == NULL || VTDecompressionSessionCanAcceptFormatDescription(decompressionSession, formatDesc) == NO) { [self createDecompSession]; } } } if ((naluType == 1 || naluType == 5) && videoFormatDescription) { uint32_t dataLength32 = htonl(length - 4); memcpy (pFrameData, &dataLength32, sizeof(uint32_t)); NSLog(@"%lu",sizeof(pFromeData)); CMBlockBufferRef blockBuffer = NULL; //使用CMBlockBufferCreateWithMemoryBlock接口构造CMBlockBufferRef; OSStatus status = CMBlockBufferCreateWithMemoryBlock(NULL, pFrameData, length, kCFAllocatorNull, NULL, 0, length, kCMBlockBufferAlwaysCopyDataFlag, &blockBuffer); NSLog(@"%lu",sizeof(blockBuffer)); if (status == kCMBlockBufferNoErr) { const size_t sampleSize = CMBlockBufferGetDataLength(blockBuffer);//length; // CMSampleBufferRef sampBuf = NULL;// status = CMSampleBufferCreate(kCFAllocatorDefault,// blockBuffer,// true,// NULL,// NULL,// videoFormatDescription,// 1,// 0,// NULL,// 1,// &sampleSize,// &sampBuf); status = CMSampleBufferCreateReady(kCFAllocatorDefault, blockBuffer, videoFormatDescription, 1, 0, NULL, 1, &sampleSize, &sampBuf); if (status == noErr) { // 然后根据获取到的buffer画到AVSampleBufferPlayerLayer上即可 // NSLog(@"decode success"); /* 将CMSampleBuffer数据使用VTDecompressionSessionDecodeFrame接口解码成CVPixelBufferRef数据: */ //NSLog(@"%ld",CMSampleBufferGetDataBuffer(sampBuf)); CVPixelBufferRef outputPixelBuffer = NULL; VTDecodeFrameFlags flags = 0;// kVTDecodeFrame_EnableTemporalProcessing; VTDecodeInfoFlags flagOut = 0;//kVTDecodeInfo_ImageBufferModifiable; //将CMSampleBuffer数据使用VTDecompressionSessionDecodeFrame接口解码成CVPixelBufferRef数据: OSStatus decodeStatus = VTDecompressionSessionDecodeFrame(decompressionSession, sampBuf, flags, &outputPixelBuffer, &flagOut); if(decodeStatus == kVTInvalidSessionErr) { NSLog(@"IOS8VT: Invalid session, reset decoder session"); } else if(decodeStatus == kVTVideoDecoderBadDataErr) { NSLog(@"IOS8VT: decode failed status=%d(Bad data)", decodeStatus); } else if(decodeStatus != noErr) { NSLog(@"IOS8VT: decode failed status=%d ", decodeStatus); } if (decodeStatus==noErr) { NSLog(@"decode success--------"); } //将CVPixelBufferRef数据转换成UIImage并显示 CIImage *ciImage = [CIImage imageWithCVPixelBuffer:outputPixelBuffer]; CIContext *temporaryContext = [CIContext contextWithOptions:nil];//required CGImageRef videoImage = [temporaryContext createCGImage:ciImage fromRect:CGRectMake(0, 0, CVPixelBufferGetWidth(outputPixelBuffer), CVPixelBufferGetHeight(outputPixelBuffer))];//required UIImage *uiImage = [UIImage imageWithCGImage:videoImage]; // NSLog(@"%@",uiImage); CGImageRelease(videoImage); } } }}
阅读全文
0 0
- IOS 硬件GPU解码
- IOS硬件解码VTDecompressionSession失效
- iOS 硬件解码H.264
- IOS硬件解码VTDecompressionSession失效
- 树莓派raspberry pi3硬件解码H264 GPU OMX
- webrc ios 打开h264 硬件编解码
- 硬件解码
- 硬件解码
- iOS硬件编解码库VideoToolBox的调用
- iOS中H.264视频流硬件解码
- iOS系统H264视频硬件编解码说明
- GPU硬解码---CUVID
- GPU硬解码---DXVA
- intel gpu 解码
- GPU硬件虚拟化
- GPU硬件结构--CUDA
- GPU的硬件结构
- [转]GPU硬件结构
- 委内瑞拉总统宣布将推出由石油支持的加密货币
- 莫斯科政府的开源区块链投票工具
- 美国政府新的网络部门首次提起“ICO诈骗”诉讼
- android从放弃到精通 第一天 重拾项目开发
- 数字金额转大写
- IOS 硬件GPU解码
- guice和spring的整合
- 解决Studio3.0 Dagger2注入Error:android-apt plugin不兼容的问题
- maven项目pom文件中有红叉:Missing artifact jar
- docker on marathon&mesos示例
- poj-3468 A Simple Problem with Integers(线段树,树状数组区间求和)
- 如何优雅的抄袭代码!
- redis集群安装部署
- centos7虚拟机编译openwrt填坑