开源zxing库加入xcode工程(二)

来源:互联网 发布:张伯伦命中率数据统计 编辑:程序博客网 时间:2024/05/13 11:15

上一篇文章介绍了搭建zxing在xcode的环境和测试的第一种方式。今天介绍下第二种方式。

第二种方式的主要流程 创建会话,连接设备,然后启动startRunning。在委托里面就会有图片。把图片传入Decoder库解码,就能得到相应的二维码数据了。如下图:


相信大家能看懂意思。 

直接上代码。

.h 文件

#import <UIKit/UIKit.h>

#import <AVFoundation/AVFoundation.h>

#import <Decoder.h>

@protocol MyCustomViewControllerDelegate <NSObject>

- (void)didScannResult:(NSString *)sResult;

@end

@interface MyCustomViewController :UIViewController<DecoderDelegate,AVCaptureVideoDataOutputSampleBufferDelegate,MyCustomViewControllerDelegate> {

    BOOL isScanning;    

}

@property (nonatomic,strong)AVCaptureSession *session;                   // 捕获会话

@property (nonatomic,strong,)AVCaptureVideoPreviewLayer *preview;         // 预览视图

@property (nonatomic,assign)id<MyCustomViewControllerDelegate> delegate;

@end


/////.m 文件

#import "MyCustomViewController.h"

#import <AssetsLibrary/AssetsLibrary.h>

#import <QRCodeReader.h>

#import <TwoDDecoderResult.h>

@interface MyCustomViewController ()

@end

@implementation MyCustomViewController

@synthesize delegate = delegate_;

- (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil

{

    self = [super initWithNibName:nibNameOrNil bundle:nibBundleOrNil];

    if (self) {

        // Custom initialization

    }

    return self;

}

- (void)viewDidLoad

{

    [superviewDidLoad];

// Do any additional setup after loading the view.

    [selfinitCaputure];

}

- (void)didReceiveMemoryWarning

{

    [superdidReceiveMemoryWarning];

    // Dispose of any resources that can be recreated.

}

- (void)initCaputure {

     NSError *error = nil;

    // 1.创建会话层

    self.session = [[AVCaptureSessionalloc]init];

    //  设置采集大小

    int nVersion = (int)[UIDevicecurrentDevice].systemVersion;

    if (nVersion >= 5) {//ios 5 

        self.session.sessionPreset =AVCaptureSessionPresetiFrame960x540;

    }

    else {

        self.session.sessionPreset =AVCaptureSessionPreset640x480;

    }

    // 2.找到一个合适的采集设备

    AVCaptureDevice *device = [AVCaptureDevicedefaultDeviceWithMediaType:AVMediaTypeVideo];

    // 3.创建一个输入设备,并将它添加到会话

    AVCaptureDeviceInput *captureInput = [AVCaptureDeviceInputdeviceInputWithDevice:deviceerror:&error];

    if (!captureInput){

        NSLog(@"Error: %@", error);

        return;

    }

    [self.session addInput:captureInput];

    

    // 4.创建一个输出设备,并将它添加到会话

    AVCaptureVideoDataOutput *captureOutput = [[AVCaptureVideoDataOutputalloc]init];

    captureOutput.alwaysDiscardsLateVideoFrames =YES;

    [captureOutput setSampleBufferDelegate:selfqueue:dispatch_get_main_queue()];

    NSString* key = (NSString *)kCVPixelBufferPixelFormatTypeKey;

    NSNumber* value = [NSNumbernumberWithUnsignedInt:kCVPixelFormatType_32BGRA];

    NSDictionary *videoSettings = [NSDictionarydictionaryWithObject:valueforKey:key];

    [captureOutput setVideoSettings:videoSettings];

    [self.session addOutput:captureOutput];

    self.preview = [AVCaptureVideoPreviewLayerlayerWithSession:self.session];

    self.preview.frame =self.view.bounds;

    self.preview.videoGravity =AVLayerVideoGravityResizeAspectFill;

    [self.view.layeraddSublayer:self.preview];

    isScanning = YES;

    [self.sessionstartRunning];

}

- (UIImage *) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer

{

    CVImageBufferRef imageBuffer =CMSampleBufferGetImageBuffer(sampleBuffer);

    // Lock the base address of the pixel buffer

    CVPixelBufferLockBaseAddress(imageBuffer,0);

    

    // Get the number of bytes per row for the pixel buffer

    size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);

    // Get the pixel buffer width and height

    size_t width = CVPixelBufferGetWidth(imageBuffer);

    size_t height = CVPixelBufferGetHeight(imageBuffer);

    

    // Create a device-dependent RGB color space

    CGColorSpaceRef colorSpace =CGColorSpaceCreateDeviceRGB();

    if (!colorSpace)

    {

        NSLog(@"CGColorSpaceCreateDeviceRGB failure");

        return nil;

    }

    

    // Get the base address of the pixel buffer

    void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);

    // Get the data size for contiguous planes of the pixel buffer.

    size_t bufferSize = CVPixelBufferGetDataSize(imageBuffer);

    

    // Create a Quartz direct-access data provider that uses data we supply

    CGDataProviderRef provider =CGDataProviderCreateWithData(NULL, baseAddress, bufferSize,

                                                              NULL);

    // Create a bitmap image from data supplied by our data provider

    CGImageRef cgImage =

    CGImageCreate(width,

                  height,

                  8,

                  32,

                  bytesPerRow,

                  colorSpace,

                  kCGImageAlphaNoneSkipFirst |kCGBitmapByteOrder32Little,

                  provider,

                  NULL,

                  true,

                  kCGRenderingIntentDefault);

    CGDataProviderRelease(provider);

    CGColorSpaceRelease(colorSpace);

    

    // Create and return an image object representing the specified Quartz image

    UIImage *image = [UIImageimageWithCGImage:cgImage];

    CGImageRelease(cgImage);

    CVPixelBufferUnlockBaseAddress(imageBuffer,0);

    return image;

}

- (void)decodeImage:(UIImage *)image

{

    NSMutableSet *qrReader = [[NSMutableSetalloc]init];

    QRCodeReader *qrcoderReader = [[QRCodeReaderalloc]init];

    [qrReader addObject:qrcoderReader];

    

    Decoder *decoder = [[Decoderalloc]init];

    decoder.delegate = self;

    decoder.readers = qrReader;

    [decoder decodeImage:image];

}

#pragma mark - AVCaptureVideoDataOutputSampleBufferDelegate

- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection

{

    [selfdecodeImage:[selfimageFromSampleBuffer:sampleBuffer]];

}

#pragma mark - DecoderDelegate

- (void)decoder:(Decoder *)decoder didDecodeImage:(UIImage *)image usingSubset:(UIImage *)subset withResult:(TwoDDecoderResult *)result

{

    isScanning = NO;

    [self.sessionstopRunning];

  if (delegate_ && [delegate_respondsToSelector:@selector(didScannResult:)]) {

        [delegate_ didScannResult:result.text];

        NSLog(@"scanned ok..");

    }

}

- (void)decoder:(Decoder *)decoder failedToDecodeImage:(UIImage *)image usingSubset:(UIImage *)subset reason:(NSString *)reason

{

    if (!isScanning) {

        NSLog(@"failed...%@",reason);

    }

}

//调用部分。

- (void)buttonAction:(id)sender {

    MyCustomViewController *custom = [[MyCustomViewControlleralloc]initWithNibName:nilbundle:nil];

    custom.view.frame =self.view.frame;

    custom.delegate = self;

    [self.viewaddSubview:custom.view];

    [custom release];

}

#pragma Mark MyCustomViewControllerDelegate

- (void)didScannResult:(NSString *)sResult {

    self.textLabel.text = sResult;

}



原创粉丝点击