iOS直接获取摄像头数据
来源:互联网 发布:阿里云域名解析步骤 编辑:程序博客网 时间:2024/05/16 06:16
需要添加的 Framework :CoreMedia, CoreVideo QuartzCore, AVFoundation
MyAVController.h:
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
#import <CoreGraphics/CoreGraphics.h>
#import <CoreVideo/CoreVideo.h>
#import <CoreMedia/CoreMedia.h>
@interface MyAVController : UIViewController <
AVCaptureVideoDataOutputSampleBufferDelegate> {
AVCaptureSession *_captureSession;
UIImageView *_imageView;
CALayer *_customLayer;
AVCaptureVideoPreviewLayer *_prevLayer;
}
@property (nonatomic, retain) AVCaptureSession *captureSession;
@property (nonatomic, retain) UIImageView *imageView;
@property (nonatomic, retain) CALayer *customLayer;
@property (nonatomic, retain) AVCaptureVideoPreviewLayer *prevLayer;
- (void)initCapture;
@end
MyAVController.m:
#import "MyAVController.h"
@implementation MyAVController
@synthesize captureSession = _captureSession;
@synthesize imageView = _imageView;
@synthesize customLayer = _customLayer;
@synthesize prevLayer = _prevLayer;
#pragma mark -
#pragma mark Initialization
- (id)init {
self = [super init];
if (self) {
self.imageView = nil;
self.prevLayer = nil;
self.customLayer = nil;
}
return self;
}
- (void)viewDidLoad {
[self initCapture];
}
- (void)initCapture {
AVCaptureDeviceInput *captureInput = [AVCaptureDeviceInput
deviceInputWithDevice:[AVCaptureDevice
defaultDeviceWithMediaType:AVMediaTypeVideo] error:nil];
AVCaptureVideoDataOutput *captureOutput = [[AVCaptureVideoDataOutput alloc]
init];
captureOutput.alwaysDiscardsLateVideoFrames = YES;
//captureOutput.minFrameDuration = CMTimeMake(1, 10);
dispatch_queue_t queue;
queue = dispatch_queue_create("cameraQueue", NULL);
[captureOutput setSampleBufferDelegate:self queue:queue];
dispatch_release(queue);
NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
NSNumber* value = [NSNumber
numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
NSDictionary* videoSettings = [NSDictionary
dictionaryWithObject:value forKey:key];
[captureOutput setVideoSettings:videoSettings];
self.captureSession = [[AVCaptureSession alloc] init];
[self.captureSession addInput:captureInput];
[self.captureSession addOutput:captureOutput];
[self.captureSession startRunning];
self.customLayer = [CALayer layer];
self.customLayer.frame = self.view.bounds;
self.customLayer.transform = CATransform3DRotate(
CATransform3DIdentity, M_PI/2.0f, 0, 0, 1);
self.customLayer.contentsGravity = kCAGravityResizeAspectFill;
[self.view.layer addSublayer:self.customLayer];
self.imageView = [[UIImageView alloc] init];
self.imageView.frame = CGRectMake(0, 0, 100, 100);
[self.view addSubview:self.imageView];
self.prevLayer = [AVCaptureVideoPreviewLayer
layerWithSession: self.captureSession];
self.prevLayer.frame = CGRectMake(100, 0, 100, 100);
self.prevLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[self.view.layer addSublayer: self.prevLayer];
}
#pragma mark -
#pragma mark AVCaptureSession delegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection
{
NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init];
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer,0);
uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef newContext = CGBitmapContextCreate(baseAddress,
width, height, 8, bytesPerRow, colorSpace,
kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGImageRef newImage = CGBitmapContextCreateImage(newContext);
CGContextRelease(newContext);
CGColorSpaceRelease(colorSpace);
[self.customLayer performSelectorOnMainThread:@selector(setContents:)
withObject: (id) newImage waitUntilDone:YES];
UIImage *image= [UIImage imageWithCGImage:newImage scale:1.0
orientation:UIImageOrientationRight];
CGImageRelease(newImage);
[self.imageView performSelectorOnMainThread:@selector(setImage:)
withObject:image waitUntilDone:YES];
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
[pool drain];
}
#pragma mark -
#pragma mark Memory management
- (void)viewDidUnload {
self.imageView = nil;
self.customLayer = nil;
self.prevLayer = nil;
}
- (void)dealloc {
[self.captureSession release];
[super dealloc];
}
MyAVController.h:
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
#import <CoreGraphics/CoreGraphics.h>
#import <CoreVideo/CoreVideo.h>
#import <CoreMedia/CoreMedia.h>
@interface MyAVController : UIViewController <
AVCaptureVideoDataOutputSampleBufferDelegate> {
AVCaptureSession *_captureSession;
UIImageView *_imageView;
CALayer *_customLayer;
AVCaptureVideoPreviewLayer *_prevLayer;
}
@property (nonatomic, retain) AVCaptureSession *captureSession;
@property (nonatomic, retain) UIImageView *imageView;
@property (nonatomic, retain) CALayer *customLayer;
@property (nonatomic, retain) AVCaptureVideoPreviewLayer *prevLayer;
- (void)initCapture;
@end
MyAVController.m:
#import "MyAVController.h"
@implementation MyAVController
@synthesize captureSession = _captureSession;
@synthesize imageView = _imageView;
@synthesize customLayer = _customLayer;
@synthesize prevLayer = _prevLayer;
#pragma mark -
#pragma mark Initialization
- (id)init {
self = [super init];
if (self) {
self.imageView = nil;
self.prevLayer = nil;
self.customLayer = nil;
}
return self;
}
- (void)viewDidLoad {
[self initCapture];
}
- (void)initCapture {
AVCaptureDeviceInput *captureInput = [AVCaptureDeviceInput
deviceInputWithDevice:[AVCaptureDevice
defaultDeviceWithMediaType:AVMediaTypeVideo] error:nil];
AVCaptureVideoDataOutput *captureOutput = [[AVCaptureVideoDataOutput alloc]
init];
captureOutput.alwaysDiscardsLateVideoFrames = YES;
//captureOutput.minFrameDuration = CMTimeMake(1, 10);
dispatch_queue_t queue;
queue = dispatch_queue_create("cameraQueue", NULL);
[captureOutput setSampleBufferDelegate:self queue:queue];
dispatch_release(queue);
NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
NSNumber* value = [NSNumber
numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
NSDictionary* videoSettings = [NSDictionary
dictionaryWithObject:value forKey:key];
[captureOutput setVideoSettings:videoSettings];
self.captureSession = [[AVCaptureSession alloc] init];
[self.captureSession addInput:captureInput];
[self.captureSession addOutput:captureOutput];
[self.captureSession startRunning];
self.customLayer = [CALayer layer];
self.customLayer.frame = self.view.bounds;
self.customLayer.transform = CATransform3DRotate(
CATransform3DIdentity, M_PI/2.0f, 0, 0, 1);
self.customLayer.contentsGravity = kCAGravityResizeAspectFill;
[self.view.layer addSublayer:self.customLayer];
self.imageView = [[UIImageView alloc] init];
self.imageView.frame = CGRectMake(0, 0, 100, 100);
[self.view addSubview:self.imageView];
self.prevLayer = [AVCaptureVideoPreviewLayer
layerWithSession: self.captureSession];
self.prevLayer.frame = CGRectMake(100, 0, 100, 100);
self.prevLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[self.view.layer addSublayer: self.prevLayer];
}
#pragma mark -
#pragma mark AVCaptureSession delegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection
{
NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init];
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer,0);
uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef newContext = CGBitmapContextCreate(baseAddress,
width, height, 8, bytesPerRow, colorSpace,
kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGImageRef newImage = CGBitmapContextCreateImage(newContext);
CGContextRelease(newContext);
CGColorSpaceRelease(colorSpace);
[self.customLayer performSelectorOnMainThread:@selector(setContents:)
withObject: (id) newImage waitUntilDone:YES];
UIImage *image= [UIImage imageWithCGImage:newImage scale:1.0
orientation:UIImageOrientationRight];
CGImageRelease(newImage);
[self.imageView performSelectorOnMainThread:@selector(setImage:)
withObject:image waitUntilDone:YES];
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
[pool drain];
}
#pragma mark -
#pragma mark Memory management
- (void)viewDidUnload {
self.imageView = nil;
self.customLayer = nil;
self.prevLayer = nil;
}
- (void)dealloc {
[self.captureSession release];
[super dealloc];
}
0 0
- iOS直接获取摄像头数据
- iOS直接获取摄像头数据
- iOS直接获取摄像头数据
- IOS4直接获取摄像头数据
- IOS获取摄像头数据 显示当地视频
- iOS 从摄像头获取YUV420SP数据
- iOS 获取摄像头视频
- 直接获取摄像头传回的图像数据(人脸、微笑、眨眼: 识别-->第一步):图像识别第一步
- iOS获取摄像头是否允许
- 利用Video4Linux获取摄像头数据
- 利用Video4Linux获取摄像头数据
- 大华摄像头获取yuv数据
- IOS获取设备的前置摄像头和后置摄像头
- ios学习--iPhone/ipod摄像头设备获取
- 如何利用Video4Linux获取摄像头数据
- 如何利用Video4Linux获取摄像头数据
- 如何利用Video4Linux获取摄像头数据
- 如何利用Video4Linux获取摄像头数据
- OCP 1Z0 053 76
- nyoj19(擅长排列的小明)
- 重写、多态、抽象类、final及接口
- 实现string的 sblit subString 的功能
- 高精度 大数加法 乘法 除法 幂 模板
- iOS直接获取摄像头数据
- _stdcall 和 _cdecl
- Oracle主键约束的创建,添加和删除
- 单向链表反转
- 学习技术
- [转]sprintf函数用法详解
- oracle建表空间 各种语句
- iPhone摄像头设备获取(分离简化版)
- MongoDB 里面日期查询的问题