功能类:一个微信小视频的录制类与播放类

来源:互联网 发布:导入支付宝数据账本 编辑:程序博客网 时间:2024/06/06 01:47
#import <UIKit/UIKit.h>@interface CameraViewController : UIViewController@end#import "CameraViewController.h"#import <AVFoundation/AVFoundation.h>#import "ViewController.h"#import "PlayerViewController.h"#define kDuration 4.0#define kTrans self.view.frame.size.width/kDuration/60.0@interface CameraViewController ()<AVCaptureFileOutputRecordingDelegate>@property (weak, nonatomic) IBOutlet NSLayoutConstraint *progressViewHeight;@property (weak, nonatomic) IBOutlet NSLayoutConstraint *progressViewWidth;@property (weak, nonatomic) IBOutlet UIView *progressView;@property (weak, nonatomic) IBOutlet UILabel *clickBtn;@property (weak, nonatomic) IBOutlet UIView *cameraView;@property (nonatomic,strong) AVCaptureSession *captureSession;@property (nonatomic,strong) AVCaptureDevice *videoDevice;@property (nonatomic,strong) AVCaptureDevice *audioDevice;@property (nonatomic,strong) AVCaptureDeviceInput *videoInput;@property (nonatomic,strong) AVCaptureDeviceInput *audioInput;@property (nonatomic,strong) AVCaptureMovieFileOutput *movieOutput;@property (nonatomic,strong) AVCaptureVideoPreviewLayer *captureVideoPreviewLayer;@property (nonatomic,assign) BOOL canSave;@property (nonatomic,strong) NSURL *outputURL;@property (nonatomic,strong) CADisplayLink *link;@end@implementation CameraViewController- (void)viewDidLoad {    [super viewDidLoad];    [self setupUI];    [self addCamera];    [self getAuthorization];}- (void)setupUI {    self.clickBtn.layer.borderWidth = 2.0f;    self.clickBtn.layer.borderColor = [[UIColor greenColor]CGColor];    self.clickBtn.layer.cornerRadius = 50.f;    self.clickBtn.layer.masksToBounds = YES;}- (void)getAuthorization {    switch ([AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo]) {        case AVAuthorizationStatusAuthorized:            NSLog(@"授权成功");            //开启镜头            [self addCamera];            break;        case AVAuthorizationStatusNotDetermined:            NSLog(@"授权失败");            [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];            break;        default:            break;    }}- (void)addCamera {    self.captureSession = [[AVCaptureSession alloc]init];    if ([self.captureSession canSetSessionPreset:AVCaptureSessionPreset640x480]) {        [self.captureSession setSessionPreset:AVCaptureSessionPreset640x480];    }    [self.captureSession beginConfiguration];    /*================ videoDevice ================*/    self.videoDevice = [[AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]firstObject];    /*================ videoInput ================*/    NSError *error;    self.videoInput = [[AVCaptureDeviceInput alloc]initWithDevice:self.videoDevice error:&error];    if (error) {        NSLog(@"视频出错了");        return;    }    if ([self.captureSession canAddInput:self.videoInput]) {        [self.captureSession addInput:self.videoInput];    }    /*================ movieOutput ================*/    self.movieOutput = [[AVCaptureMovieFileOutput alloc]init];    if ([self.captureSession canAddOutput:self.movieOutput]) {        [self.captureSession addOutput:self.movieOutput];        AVCaptureConnection *captureConnection = [self.movieOutput connectionWithMediaType:AVMediaTypeVideo];        if ([captureConnection isVideoStabilizationSupported]) {            captureConnection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeAuto;        }        captureConnection.videoScaleAndCropFactor = captureConnection.videoMaxScaleAndCropFactor;    }    /*================ audioDevice ================*/    self.audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];    /*================ audioInput ================*/    NSError *audioError;    self.audioInput = [[AVCaptureDeviceInput alloc]initWithDevice:self.audioDevice error:&audioError];    if (audioError) {        NSLog(@"音频出错了");        return;    }    if ([self.captureSession canAddInput:self.audioInput]) {        [self.captureSession addInput:self.audioInput];    }    /*================ addPreviewLayer ================*/    [self.view layoutIfNeeded];    self.captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc]initWithSession:self.captureSession];    self.captureVideoPreviewLayer.frame = self.view.layer.bounds;    self.captureVideoPreviewLayer.connection.videoOrientation = [self.movieOutput connectionWithMediaType:AVMediaTypeVideo].videoOrientation;    self.captureVideoPreviewLayer.position = CGPointMake(self.view.frame.size.width * 0.5, self.cameraView.frame.size.height * 0.5);    CALayer *layer = self.cameraView.layer;    layer.masksToBounds = YES;    [self.view layoutIfNeeded];    [layer addSublayer:self.captureVideoPreviewLayer];    /*================ commitConfiguration ================*/    [self.captureSession commitConfiguration];    [self.captureSession startRunning];}- (BOOL)isInButtonRect:(CGPoint)point {    CGFloat x = point.x;    CGFloat y = point.y;    return (x > self.clickBtn.frame.origin.x && x <= self.clickBtn.frame.origin.x + self.clickBtn.frame.size.width) && (y > self.clickBtn.frame.origin.y && y <= self.clickBtn.frame.origin.y + self.clickBtn.frame.size.height);}- (void)touchesBegan:(NSSet<UITouch *> *)touches withEvent:(UIEvent *)event {    NSLog(@"touch");    UITouch *touch = [touches anyObject];    CGPoint point = [touch locationInView:self.view];    BOOL condition = [self isInButtonRect:point];    if (condition) {        NSLog(@"录制");        [self stopLink];        [self.link addToRunLoop:[NSRunLoop mainRunLoop] forMode:NSRunLoopCommonModes];    }}- (void)touchesMoved:(NSSet<UITouch *> *)touches withEvent:(UIEvent *)event {}- (void)touchesEnded:(NSSet<UITouch *> *)touches withEvent:(UIEvent *)event {    [self recordComplete];    [self stopRecord];}- (NSURL *)outputURL1 {    self.outputURL = [NSURL fileURLWithPath:[NSString stringWithFormat:@"%@%@",NSTemporaryDirectory(),@"output.mov"]];    return self.outputURL;}- (void)startRecord {    [self.movieOutput startRecordingToOutputFileURL:[self outputURL1] recordingDelegate:self];}- (void)stopRecord {    // 取消视频拍摄    [self.movieOutput stopRecording];}- (void)recordComplete {    self.canSave = YES;}- (CADisplayLink *)link {    if (_link == nil) {        _link = [CADisplayLink displayLinkWithTarget:self selector:@selector(refresh:)];        self.progressViewWidth.constant = self.view.frame.size.width;        self.progressViewHeight.constant = 2;        [self startRecord];    }    return _link;}- (void)stopLink {    _link.paused = YES;    [_link invalidate];    _link = nil;}- (void)refresh:(CADisplayLink *)link {    if (self.progressViewWidth.constant <= 0) {        self.progressViewWidth.constant = 0;        [self recordComplete];        [self stopRecord];        return;    }    self.progressViewWidth.constant -= kTrans;    NSLog(@"%f",self.progressViewWidth.constant);}- (void)showMsgWithTitle:(NSString *)title andContent:(NSString *)content{    [[[UIAlertView alloc] initWithTitle:title message:content delegate:nil cancelButtonTitle:@"确定" otherButtonTitles:nil] show];}- (void)pushToPlay:(NSURL *)url {    PlayerViewController *VC = [[UIStoryboard storyboardWithName:@"Main" bundle:nil]instantiateViewControllerWithIdentifier:@"PlayerViewController"];    VC.outputURL = url;    [self.navigationController pushViewController:VC animated:YES];}#pragma mark - AVCaptureFileOutputRecordingDelegate- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didStartRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray *)connections {    NSLog(@"---- 开始录制 ----");}- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error {    NSLog(@"---- 录制结束 ----%@ ",captureOutput.outputFileURL);    if (outputFileURL.absoluteString.length == 0 && captureOutput.outputFileURL.absoluteString.length == 0 ) {        [self showMsgWithTitle:@"出错了" andContent:@"录制视频保存地址出错"];        return;    }    if (self.canSave) {        [self pushToPlay:captureOutput.outputFileURL];        self.canSave = NO;    }    [self.captureSession stopRunning];}#pragma mark - 懒加载- (NSURL *)outputURL {    if (_outputURL == nil) {        _outputURL = [[NSURL alloc]init];    }    return _outputURL;}@end
#import <UIKit/UIKit.h>@interface PlayerViewController : UIViewController@property (nonatomic,strong) NSURL *outputURL;@end#import "PlayerViewController.h"#import <AVFoundation/AVFoundation.h>#import <AssetsLibrary/AssetsLibrary.h>#import <AVKit/AVKit.h>@interface PlayerViewController ()@property (nonatomic,strong) AVPlayer *player;@property (nonatomic,strong) AVPlayerItem *playerItem;@property (nonatomic,strong) AVPlayerLayer *playerLayer;@end@implementation PlayerViewController- (void)viewDidLoad {    [super viewDidLoad];    self.view.backgroundColor = [UIColor whiteColor];    [self createUI];    [[NSNotificationCenter defaultCenter]addObserver:self selector:@selector(playbackFinished:) name:AVPlayerItemDidPlayToEndTimeNotification object:nil];}- (void)createUI {    self.playerItem = [AVPlayerItem playerItemWithURL:self.outputURL];    self.player = [AVPlayer playerWithPlayerItem:self.playerItem];    self.playerLayer = [AVPlayerLayer playerLayerWithPlayer:self.player];    self.playerLayer.frame = CGRectMake(0, 64, self.view.frame.size.width, 200);    self.playerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;    [self.view.layer addSublayer:self.playerLayer];    [self.player play];}-(void)playbackFinished:(NSNotification *)notification {    [_player seekToTime:CMTimeMake(0, 1)];    [_player play];}@end
0 0
原创粉丝点击