iOS RTMP 视频直播开发笔记(1)----- 采集摄像头图像

来源:互联网 发布:telnet 3306端口不通 编辑:程序博客网 时间:2024/05/16 05:08

这里简单说下 iOS 的摄像头采集。

首先初始化AVCaptureSession,说到Session,有没有人想到AVAudioSession呢?

设置采集的 Video 和 Audio 格式,这两个是分开设置的,也就是说,你可以只采集视频。

实现 AVCaptureOutputDelegate:

关于实时编码H.264和AAC Buffer,这里又是两个技术点,之后再讲吧。

配置完成,现在启动 Session:

1.1 附加任务:将当前硬件采集视频图像显示到屏幕

很简单,发送端直接使用自家的AVCaptureVideoPreviewLayer显示,so easy

然后将这个layer添加到界面中即可显示了。

具体实现代码:

#import "MyAVController.h"

#import <AVFoundation/AVFoundation.h>

#import <CoreGraphics/CoreGraphics.h>

#import <CoreVideo/CoreVideo.h>

#import <CoreMedia/CoreMedia.h>


@interface MyAVController()<AVCaptureVideoDataOutputSampleBufferDelegate>

@property (nonatomic,retain) AVCaptureSession *captureSession;

@property (nonatomic,retain) UIImageView *imageView;

@property (nonatomic,retain) CALayer *customLayer;

@property (nonatomic,retain) AVCaptureVideoPreviewLayer *prevLayer;

- (void)initCapture;


@end


#import "MyAVController.h"


@implementation MyAVController

{

    AVCaptureSession *_captureSession;

    UIImageView *_imageView;

    CALayer *_customLayer;

    AVCaptureVideoPreviewLayer *_prevLayer;

    AVCaptureConnection *_videoConnection;

    AVCaptureConnection *_audioConnection;

}



#pragma mark -

#pragma mark Initialization

- (id)init {

    self = [superinit];

    if (self) {

        self.imageView =nil;

        self.prevLayer =nil;

        self.customLayer =nil;

    }

    returnself;

}


- (void)viewDidLoad {

    [selfinitCapture];

}


- (void)initCapture {

    //配置采集输入源(摄像头)

    AVCaptureDevice*videoDevice=[AVCaptureDevicedefaultDeviceWithMediaType:AVMediaTypeVideo];

    //用设备初始化一个采集的输入对象

    AVCaptureDeviceInput *captureInput = [AVCaptureDeviceInputdeviceInputWithDevice:videoDevice error:nil];

    AVCaptureVideoDataOutput *captureOutput = [[AVCaptureVideoDataOutputalloc]init];

    captureOutput.alwaysDiscardsLateVideoFrames =YES;

    //captureOutput.minFrameDuration = CMTimeMake(1, 10);

    

    //配置采集输出,即我们取得视频图像的接口

    dispatch_queue_t queue;

    queue = dispatch_queue_create("cameraQueue",NULL);

    

    [captureOutput setSampleBufferDelegate:selfqueue:queue];

    dispatch_release(queue);

    NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;

    //配置输出视频图像格式

    NSNumber* value = [NSNumbernumberWithUnsignedInt:kCVPixelFormatType_32BGRA];

    NSDictionary* videoSettings = [NSDictionary

                                   dictionaryWithObject:valueforKey:key];

    [captureOutput setVideoSettings:videoSettings];

    self.captureSession = [[AVCaptureSessionalloc] init];

    [self.captureSessionaddInput:captureInput];

    [self.captureSessionaddOutput:captureOutput];

    [self.captureSessionstartRunning];

    

    //保存Connection,用于在SampleBufferDelegate中判断数据来源(是Video/Audio?)

    _videoConnection=[captureOutput

                      connectionWithMediaType:AVMediaTypeVideo];

    

    //view

    self.customLayer = [CALayerlayer];

    self.customLayer.frame =self.view.bounds;

    self.customLayer.transform = CATransform3DRotate(

                                                     CATransform3DIdentity,M_PI/2.0f,0, 0,1);

    self.customLayer.contentsGravity = kCAGravityResizeAspectFill;

    [self.view.layeraddSublayer:self.customLayer];

    self.imageView = [[UIImageViewalloc] init];

    self.imageView.frame =CGRectMake(0,0, 100,100);

    [self.viewaddSubview:self.imageView];

    self.prevLayer = [AVCaptureVideoPreviewLayer

                      layerWithSession:self.captureSession];

    self.prevLayer.frame =CGRectMake(100,0, 100,100);

    self.prevLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;

    [self.view.layeraddSublayer: self.prevLayer];

}


#pragma mark -

#pragma mark AVCaptureSession delegate

- (void)captureOutput:(AVCaptureOutput *)captureOutput

didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer

       fromConnection:(AVCaptureConnection *)connection

{

    

    // 这里的sampleBuffer就是采集到的数据了,但它是Video还是Audio的数据,得根据connection来判断

    if (connection ==_videoConnection) {  // Video

        /*

         // 取得当前视频尺寸信息

         CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);

         int width = CVPixelBufferGetWidth(pixelBuffer);

         int height = CVPixelBufferGetHeight(pixelBuffer);

         NSLog(@"video width: %d  height: %d", width, height);

         */

        NSLog(@"在这里获得video sampleBuffer,做进一步处理(编码H.264");

    } elseif (connection == _audioConnection) {  // Audio

        NSLog(@"这里获得audio sampleBuffer,做进一步处理(编码AAC");

    }

    

    

    

    NSAutoreleasePool * pool = [[NSAutoreleasePoolalloc] init];

    // 这里的sampleBuffer就是采集到的数据了,但它是Video还是Audio的数据,得根据connection来判断

    CVImageBufferRef imageBuffer =CMSampleBufferGetImageBuffer(sampleBuffer);

    CVPixelBufferLockBaseAddress(imageBuffer,0);

    uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);

    size_t bytesPerRow =CVPixelBufferGetBytesPerRow(imageBuffer);

    size_t width =CVPixelBufferGetWidth(imageBuffer);

    size_t height =CVPixelBufferGetHeight(imageBuffer);

    

    CGColorSpaceRef colorSpace =CGColorSpaceCreateDeviceRGB();

    CGContextRef newContext =CGBitmapContextCreate(baseAddress,

                                                    width, height,8, bytesPerRow, colorSpace,

                                                    kCGBitmapByteOrder32Little |kCGImageAlphaPremultipliedFirst);

    CGImageRef newImage =CGBitmapContextCreateImage(newContext);

    

    CGContextRelease(newContext);

    CGColorSpaceRelease(colorSpace);

    

    [self.customLayerperformSelectorOnMainThread:@selector(setContents:)

                                       withObject: (__bridgeid) newImage waitUntilDone:YES];

    

    UIImage *image= [UIImageimageWithCGImage:newImagescale:1.0

                                  orientation:UIImageOrientationRight];

    

    CGImageRelease(newImage);

    

    [self.imageViewperformSelectorOnMainThread:@selector(setImage:)

                                     withObject:imagewaitUntilDone:YES];

    

    CVPixelBufferUnlockBaseAddress(imageBuffer,0);

    

    [pool drain];

}


#pragma mark -

#pragma mark Memory management


- (void)viewDidUnload {

    self.imageView =nil;

    self.customLayer =nil;

    self.prevLayer =nil;

}


- (void)dealloc {

    [self.captureSessionrelease];

    [superdealloc];

}


@end




0 0
原创粉丝点击