iOS 获取摄像头视频

来源:互联网 发布:windows exp导出数据库 编辑:程序博客网 时间:2024/05/18 00:38

#import "ViewController.h"

#import <AVFoundation/AVFoundation.h>

#import <CoreGraphics/CoreGraphics.h>

#import <CoreVideo/CoreVideo.h>

#import <CoreMedia/CoreMedia.h>

@interfaceViewController ()<AVCaptureVideoDataOutputSampleBufferDelegate>

@property (nonatomic,strong)AVCaptureSession *session;

@property (nonatomic,strong)AVCaptureVideoDataOutput *videoOutput;

@property (nonatomic,strong)AVCaptureConnection *videoConnection;

@property (nonatomic,retain) AVCaptureSession *captureSession;

@property (nonatomic,retain) UIImageView *imageView;

@property (nonatomic,retain) CALayer *customLayer;

@property (nonatomic,retain) AVCaptureVideoPreviewLayer *prevLayer;

@end

@implementation ViewController


#pragma mark -

#pragma mark init

- (instancetype)init

{

    self = [superinit];

    if (self) {

        self.imageView =nil;

        self.prevLayer =nil;

        self.customLayer =nil;

    }

    return self;

}


- (void)viewDidLoad {

    [superviewDidLoad];

    [selfinitCapture];

}


- (void)initCapture {

   //配置采集输入源(摄像头)

    AVCaptureDevice*videoDevice=[AVCaptureDevicedefaultDeviceWithMediaType:AVMediaTypeVideo];

    videoDevice=[selfgetCameraDeviceWithPosition:AVCaptureDevicePositionFront];

   //用设备初始化一个采集的输入对象

    AVCaptureDeviceInput *captureInput = [AVCaptureDeviceInputdeviceInputWithDevice:videoDevice error:nil];

    AVCaptureVideoDataOutput *captureOutput = [[AVCaptureVideoDataOutputalloc]init];

    captureOutput.alwaysDiscardsLateVideoFrames =YES;

//    captureOutput.minFrameDuration = CMTimeMake(1, 10);

   //配置采集输出,即我们取得视频图像的接口

    dispatch_queue_t queue;

    queue =dispatch_queue_create("cameraQueue",NULL);

    [captureOutput setSampleBufferDelegate:selfqueue:queue];

    NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;

   //配置输出视频图像格式

    NSNumber* value = [NSNumbernumberWithUnsignedInt:kCVPixelFormatType_32BGRA];

    NSDictionary* videoSettings = [NSDictionarydictionaryWithObject:valueforKey:key];

    [captureOutput setVideoSettings:videoSettings];

    _captureSession = [[AVCaptureSessionalloc] init];

    [_captureSessionaddInput:captureInput];

    [_captureSessionaddOutput:captureOutput];

    [_captureSessionstartRunning];

    //保存Connection,用于在SampleBufferDelegate中判断数据来源(是Video/Audio?)

    [captureOutputconnectionWithMediaType:AVMediaTypeVideo];

    _customLayer = [CALayerlayer];

    _customLayer.frame =self.view.bounds;

    _customLayer.transform =CATransform3DRotate(CATransform3DIdentity,M_PI/2.0f,0, 0,1);

    _customLayer.contentsGravity =kCAGravityResizeAspectFill;

    [self.view.layeraddSublayer:_customLayer];

    _imageView = [[UIImageViewalloc] init];

    _imageView.frame =CGRectMake(0,0, 100,100);

    [self.viewaddSubview:_imageView];

//    _prevLayer = [AVCaptureVideoPreviewLayer layerWithSession: _captureSession];

//    _prevLayer.frame = CGRectMake(100, 0, 100, 100);

//    _prevLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;

//    [self.view.layer addSublayer: self.prevLayer];

}


/**

 * 取得指定位置的摄像头

 *

 *  @param position 摄像头位置

 *

 *  @return 摄像头设备

 */

-(AVCaptureDevice *)getCameraDeviceWithPosition:(AVCaptureDevicePosition )position{

    NSArray *cameras= [AVCaptureDevicedevicesWithMediaType:AVMediaTypeVideo];

    for (AVCaptureDevice *camerain cameras) {

        if ([cameraposition]==position) {

            return camera;

        }

    }

    return nil;

}


#pragma mark -

#pragma mark AVCaptureSession delegate

- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {

    // 这里的sampleBuffer就是采集到的数据了,但它是Video还是Audio的数据,得根据connection来判断

    if (connection ==_videoConnection) {  // Video

        /*

         // 取得当前视频尺寸信息

         CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);

         int width = CVPixelBufferGetWidth(pixelBuffer);

         int height = CVPixelBufferGetHeight(pixelBuffer);

         NSLog(@"video width: %d  height: %d", width, height);

         */

        NSLog(@"在这里获得video sampleBuffer,做进一步处理(编码H.264");

    }

//    if (connection == _audioConnection) {  // Audio

//       NSLog(@"这里获得audio sampleBuffer,做进一步处理(编码AAC");

//    }

//    NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init];

    // 这里的sampleBuffer就是采集到的数据了,但它是Video还是Audio的数据,得根据connection来判断

    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);

    CVPixelBufferLockBaseAddress(imageBuffer,0);

    uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);

    size_t bytesPerRow =CVPixelBufferGetBytesPerRow(imageBuffer);

    size_t width =CVPixelBufferGetWidth(imageBuffer);

    size_t height =CVPixelBufferGetHeight(imageBuffer);

    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();

    CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height,8, bytesPerRow, colorSpace,kCGBitmapByteOrder32Little |kCGImageAlphaPremultipliedFirst);

    CGImageRef newImage = CGBitmapContextCreateImage(newContext);

    CGContextRelease(newContext);

    CGColorSpaceRelease(colorSpace);

    [_customLayerperformSelectorOnMainThread:@selector(setContents:)withObject: (__bridgeid) newImagewaitUntilDone:YES];

    UIImage *image= [UIImageimageWithCGImage:newImagescale:1.0orientation:UIImageOrientationRight];

    CGImageRelease(newImage);

    [_imageViewperformSelectorOnMainThread:@selector(setImage:) withObject:image waitUntilDone:YES];

    CVPixelBufferUnlockBaseAddress(imageBuffer,0);

//    [pool drain];

}


#pragma mark -

#pragma mark Memory management

//- (void)viewDidUnload {

//    

//    _imageView = nil;

//    

//    _customLayer = nil;

//    

//    _prevLayer = nil;

//    

//}


- (void)didReceiveMemoryWarning {

    [superdidReceiveMemoryWarning];

    // Dispose of any resources that can be recreated.

}



@end



0 0