iOS从摄像头获得实时视频流(研究中)

来源:互联网 发布:淘宝董事长 编辑:程序博客网 时间:2024/06/06 03:15

推荐大家先看一下http://www.cnblogs.com/kenshincui/p/4186022.html这个博客,可以更好地了解视频以及音频的录制以及播放

首先下面的Demo是将摄像头的视频流转化为image(JPEG)

////  ViewController.m//  实时视频Demo////  Created by 程磊 on 15/4/11.//  Copyright (c) 2015年 nightGroup. All rights reserved.//#import "ViewController.h"#import <AVFoundation/AVFoundation.h>@interface ViewController ()<AVCaptureVideoDataOutputSampleBufferDelegate>@end@implementation ViewController- (void)viewDidLoad {    [super viewDidLoad];    // Do any additional setup after loading the view, typically from a nib.    [self setupCaptureSession];}- (void)setupCaptureSession{    NSError *error = nil;        // Create the session    AVCaptureSession *session = [[AVCaptureSession alloc] init];//负责输入和输出设置之间的数据传递        // Configure the session to produce lower resolution video frames, if your    // processing algorithm can cope. We'll specify medium quality for the    // chosen device.    session.sessionPreset = AVCaptureSessionPresetMedium;//设置分辨率        // Find a suitable AVCaptureDevice    AVCaptureDevice *device = [AVCaptureDevice                               defaultDeviceWithMediaType:AVMediaTypeVideo];//这里默认是使用后置摄像头,你可以改成前置摄像头        // Create a device input with the device and add it to the session.    AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device                                                                        error:&error];    if (!input) {        // Handling the error appropriately.    }    [session addInput:input];        // Create a VideoDataOutput and add it to the session    AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init];//创建一个视频数据输出流    [session addOutput:output];        // Configure your output.    dispatch_queue_t queue = dispatch_queue_create("myQueue", NULL);    [output setSampleBufferDelegate:self queue:queue];    // Specify the pixel format    output.videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:                            [NSNumber numberWithInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,                            [NSNumber numberWithInt: 320], (id)kCVPixelBufferWidthKey,                            [NSNumber numberWithInt: 240], (id)kCVPixelBufferHeightKey,                            nil];        AVCaptureVideoPreviewLayer* preLayer = [AVCaptureVideoPreviewLayer layerWithSession: session];//相机拍摄预览图层    //preLayer = [AVCaptureVideoPreviewLayer layerWithSession:session];    preLayer.frame = CGRectMake(0, 0, 320, 240);    preLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;    [self.view.layer addSublayer:preLayer];    // If you wish to cap the frame rate to a known value, such as 15 fps, set    // minFrameDuration.    output.minFrameDuration = CMTimeMake(1, 15);        // Start the session running to start the flow of data    [session startRunning];        // Assign session to an ivar.    //[self setSession:session];}// Delegate routine that is called when a sample buffer was written- (void)captureOutput:(AVCaptureOutput *)captureOutputdidOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer       fromConnection:(AVCaptureConnection *)connection{    // Create a UIImage from the sample buffer data    UIImage *image = [self imageFromSampleBuffer:sampleBuffer];        }// Create a UIImage from sample buffer data- (UIImage *) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer{    // Get a CMSampleBuffer's Core Video image buffer for the media data    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);    // Lock the base address of the pixel buffer    CVPixelBufferLockBaseAddress(imageBuffer, 0);        // Get the number of bytes per row for the pixel buffer    void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);        // Get the number of bytes per row for the pixel buffer    size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);    // Get the pixel buffer width and height    size_t width = CVPixelBufferGetWidth(imageBuffer);    size_t height = CVPixelBufferGetHeight(imageBuffer);        // Create a device-dependent RGB color space    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();        // Create a bitmap graphics context with the sample buffer data    CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8,                                                 bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);    // Create a Quartz image from the pixel data in the bitmap graphics context    CGImageRef quartzImage = CGBitmapContextCreateImage(context);    // Unlock the pixel buffer    CVPixelBufferUnlockBaseAddress(imageBuffer,0);        // Free up the context and color space    CGContextRelease(context);    CGColorSpaceRelease(colorSpace);        // Create an image object from the Quartz image    UIImage *image = [UIImage imageWithCGImage:quartzImage];        // Release the Quartz image    CGImageRelease(quartzImage);        return (image);}- (void)didReceiveMemoryWarning {    [super didReceiveMemoryWarning];    // Dispose of any resources that can be recreated.}@end


0 0