iOS自定义拍摄小视频压缩上传
来源:互联网 发布:me557ll a支持什么网络 编辑:程序博客网 时间:2024/05/10 02:27
//
// LittleVideoViewController.h
// uploadVideoDemo
//
// Created by 欧阳荣 on 16/9/5.
// Copyright © 2016年 HengTaiXin. All rights reserved.
//
#import <UIKit/UIKit.h>
@protocol LittleVideoDelegate <NSObject>
- (void)finishLittleVideoViewControllerCapture:(NSURL *)filePath;
@end
@interface LittleVideoViewController :UIViewController
@property (nonatomic,weak)id<LittleVideoDelegate> delegate;
@end
//
// LittleVideoViewController.m
// uploadVideoDemo
//
// Created by 欧阳荣 on 16/9/5.
// Copyright © 2016年 HengTaiXin. All rights reserved.
//
#import "LittleVideoViewController.h"
#import <AVFoundation/AVFoundation.h>
#import "UIView+RMAdditions.h"
#define BLUECOLOR [UIColor colorWithRed:0/255.0 green:155/255.0 blue:225/255.0 alpha:1]
#define REDCOLOR [UIColor colorWithRed:255/255.0 green:27/255.0 blue:86/255.0 alpha:1]
#define kDuration 8.0
#define kTrans SCREEN_WIDTH/kDuration/60.0
typedef NS_ENUM(NSInteger,VideoStatus){
VideoStatusEnded = 0,
VideoStatusStarted
};
@interface LittleVideoViewController ()<AVCaptureFileOutputRecordingDelegate>
{
AVCaptureSession * _captureSession;
AVCaptureDevice *_videoDevice;
AVCaptureDevice *_audioDevice;
AVCaptureDeviceInput *_videoInput;
AVCaptureDeviceInput *_audioInput;
AVCaptureMovieFileOutput *_movieOutput;
AVCaptureVideoPreviewLayer *_captureVideoPreviewLayer;
}
@property (nonatomic,strong) UIView * navView;
@property (nonatomic,strong) UIButton * backBtn;
@property (nonatomic,strong) UIView * videoView;
@property (nonatomic,strong) UIView * bottomView;
@property (nonatomic,strong) UILabel * tapBtn;
@property (nonatomic,assign) VideoStatus status;
@property (nonatomic,strong) NSLayoutConstraint * progressWidth;
@property (nonatomic,strong) UIView *progressView;
@property (nonatomic,strong) CADisplayLink *link;
@property (nonatomic,assign)BOOL canSave;
@property (nonatomic,strong) UILabel * cancelTip;
@property (nonatomic,strong) UIView * focusCircle;
@property (nonatomic,strong) UIButton *changeBtn;
@property (nonatomic,strong) UIButton *flashModelBtn;
@end
@implementation LittleVideoViewController
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view.
[self creatNavView];
}
#pragma mark - CreatUI
-(void)creatNavView{
self.videoView = [[UIView alloc]initWithFrame:CGRectMake(0,0, SCREEN_WIDTH, SCREEN_HEIGHT)];
[self.view addSubview:self.videoView];
self.videoView.layer.masksToBounds =YES;
self.navView = [[UIView alloc]initWithFrame:CGRectMake(0,0,SCREEN_WIDTH, 64)];
self.navView.backgroundColor = [UIColor colorWithRed:0/255.0 green:0/255.0 blue:0/255.0 alpha:0.6];
[self.view addSubview:self.navView];
[self.navView addSubview:self.flashModelBtn];
[self.navView addSubview:self.changeBtn];
self.backBtn = [UIButton buttonWithType:UIButtonTypeCustom];
// [self.backBtn setTitle:@"取消" forState:UIControlStateNormal];
[_backBtn setImage:[UIImage imageNamed:@"WechatShortVideo_close"] forState:UIControlStateNormal];
self.backBtn.frame = CGRectMake(15,25,25, 25);
[self.backBtn addTarget:self action:@selector(backBtnClick) forControlEvents:UIControlEventTouchUpInside];
[self.navView addSubview:self.backBtn];
self.bottomView = [[UIView alloc]initWithFrame:CGRectMake(0, SCREEN_HEIGHT -170/2 -4, SCREEN_WIDTH, 170/2 +4)];
self.bottomView.backgroundColor = [UIColor colorWithRed:0/255.0 green:0/255.0 blue:0/255.0 alpha:0.6];
[self.view addSubview:self.bottomView];
self.tapBtn = [[UILabel alloc]initWithFrame:CGRectMake(SCREEN_WIDTH/2 -60/2,85/2 -60/2,60, 60)];
self.tapBtn.text =@"按住拍";
self.tapBtn.textColor = [UIColor whiteColor];
[self.bottomView addSubview:_tapBtn];
self.tapBtn.font = [UIFont systemFontOfSize:15];
self.tapBtn.textAlignment = NSTextAlignmentCenter;
_tapBtn.layer.borderWidth = 4;
_tapBtn.layer.cornerRadius = 60/2;
_tapBtn.layer.masksToBounds = YES;
_tapBtn.layer.borderColor = BLUECOLOR.CGColor;
//进度条
self.progressView = [[UIView alloc]init];
_progressView.translatesAutoresizingMaskIntoConstraints = NO;
_progressView.backgroundColor = BLUECOLOR;
self.progressView.alpha =0;
[self.view addSubview:_progressView];
//宽度先设置为
//子view的中心横坐标等于父view的中心横坐标
NSLayoutConstraint *constrant1 = [NSLayoutConstraint constraintWithItem:_progressView attribute:NSLayoutAttributeCenterX relatedBy:NSLayoutRelationEqual toItem:self.view attribute:NSLayoutAttributeCenterX multiplier:1.0 constant:0.0];
//子view的中心纵坐标等于父view的中心纵坐标
NSLayoutConstraint *constrant2 = [NSLayoutConstraint constraintWithItem:_progressView attribute:NSLayoutAttributeCenterY relatedBy:NSLayoutRelationEqual toItem:self.view attribute:NSLayoutAttributeCenterY multiplier:1.0 constant:SCREEN_HEIGHT -170/2 -2 - SCREEN_HEIGHT/2];
self.progressWidth = [NSLayoutConstraint constraintWithItem:_progressView attribute:NSLayoutAttributeWidth relatedBy:NSLayoutRelationGreaterThanOrEqual toItem:nil attribute:NSLayoutAttributeNotAnAttribute multiplier:1.0 constant:SCREEN_WIDTH];
//子view的高度为4
NSLayoutConstraint *constrant4 = [NSLayoutConstraint constraintWithItem:_progressView attribute:NSLayoutAttributeHeight relatedBy:NSLayoutRelationGreaterThanOrEqual toItem:nil attribute:NSLayoutAttributeNotAnAttribute multiplier:1.0 constant:4];
NSArray *array = [NSArray arrayWithObjects:constrant1, constrant2,self.progressWidth, constrant4,nil];
[self.view addConstraints:array];
[self getAuthorization];
[self addGenstureRecognizer];
}
#pragma mark touchs
-(void)touchesBegan:(NSSet<UITouch *> *)touches withEvent:(UIEvent *)event
{
NSLog(@"touch");
UITouch *touch = [touchesanyObject];
CGPoint point = [touchlocationInView:self.view];
BOOL condition = [selfisInBtnRect:point];
if (condition) {
[selfisFitCondition:condition];
[selfstartAnimation];
self.changeBtn.hidden=self.flashModelBtn.hidden =YES;
}
}
- (void)touchesMoved:(NSSet<UITouch *> *)touches withEvent:(UIEvent *)event
{
[supertouchesMoved:touches withEvent:event];
NSLog(@"touchesMoved");
UITouch *touch = [touchesanyObject];
CGPoint point = [touchlocationInView:self.view];
BOOL condition = [selfisInBtnRect:point];
[selfisFitCondition:condition];
}
- (void)touchesEnded:(NSSet<UITouch *> *)touches withEvent:(UIEvent *)event
{
NSLog(@"touchesEnded");
UITouch *touch = [touchesanyObject];
CGPoint point = [touchlocationInView:self.view];
BOOL condition = [selfisInBtnRect:point];
/*
结束时候咱们设定有两种情况依然算录制成功
1.抬手时,录制时长 > 1/3总时长
2.录制进度条完成时,就算手指超出按钮范围也算录制成功 --此时 end方法不会调用,因为用户手指还在屏幕上,所以直接代码调用录制成功的方法,将控制器切换
*/
if (condition) {
NSLog(@"手指还在按钮范围之内");
if (self.progressWidth.constant < SCREEN_WIDTH * 0.67) {
//录制完成
[selfrecordComplete];
}
}
[selfstopAnimation];
self.changeBtn.hidden =self.flashModelBtn.hidden =NO;
}
- (BOOL)isInBtnRect:(CGPoint)point
{
CGFloat x = point.x;
CGFloat y = point.y;
return (x>self.tapBtn.left && x<=self.tapBtn.right) && (y > (self.tapBtn.top + self.bottomView.y) && y <= (self.tapBtn.bottom + self.bottomView.y));
}
//po self.tapBtn.left 130 self.tapBtn.right 190 xCGFloat 146 yCGFloat 523
//self.tapBtn.top 12 self.bottomView.bottom 568
- (void)isFitCondition:(BOOL)condition
{
if (condition) {
self.cancelTip.text =@"↑上滑取消";
self.cancelTip.backgroundColor = [UIColor clearColor];
self.cancelTip.textColor =BLUECOLOR;
self.progressView.backgroundColor = BLUECOLOR;
}else{
self.progressView.backgroundColor = REDCOLOR;
self.cancelTip.text =@"松手取消";
self.cancelTip.backgroundColor = REDCOLOR;
self.cancelTip.textColor = [UIColorwhiteColor];
}
}
- (void)startAnimation
{
NSLog(@"startAnimation");
if (self.status == VideoStatusEnded) {
self.status =VideoStatusStarted;
[UIViewanimateWithDuration:0.5animations:^{
self.cancelTip.alpha =self.progressView.alpha =1.0;
self.tapBtn.alpha =0.0;
self.tapBtn.transform = CGAffineTransformMakeScale(2.0,2.0);
} completion:^(BOOL finished) {
[selfstopLink];
[self.linkaddToRunLoop:[NSRunLoopmainRunLoop] forMode:NSRunLoopCommonModes];
}];
}
}
- (void)stopAnimation{
NSLog(@"stopAnimation");
if (self.status == VideoStatusStarted) {
self.status =VideoStatusEnded;
[selfstopLink];
[selfstopRecord];
[UIViewanimateWithDuration:0.5animations:^{
self.cancelTip.alpha =self.progressView.alpha =0.0;
self.tapBtn.alpha =1.0;
self.tapBtn.transform = CGAffineTransformMakeScale(1.0,1.0);
} completion:^(BOOL finished) {
self.progressWidth.constant =SCREEN_WIDTH;
}];
}
}
- (CADisplayLink *)link{
if (!_link) {
_link = [CADisplayLinkdisplayLinkWithTarget:selfselector:@selector(refresh:)];
self.progressWidth.constant =SCREEN_WIDTH;
[selfstartRecord];
}
return_link;
}
- (void)stopLink
{
_link.paused =YES;
[_linkinvalidate];
_link =nil;
}
- (void)refresh:(CADisplayLink *)link
{
if (self.progressWidth.constant <= 0) {
self.progressWidth.constant =0;
[selfrecordComplete];
[selfstopAnimation];
return;
}
self.progressWidth.constant -= kTrans;
// NSLog(@" self.progressView.frame %@",NSStringFromCGRect(self.progressView.frame));
// self.progressWidth.constant -=kTrans;
}
#pragma mark 录制相关
- (NSURL *)outPutFileURL
{
return [NSURLfileURLWithPath:[NSStringstringWithFormat:@"%@%@",NSTemporaryDirectory(), @"outPut.mov"]];
}
- (void)startRecord
{
[_movieOutputstartRecordingToOutputFileURL:[selfoutPutFileURL]recordingDelegate:self];
}
- (void)stopRecord
{
// 取消视频拍摄
[_movieOutputstopRecording];
}
- (void)recordComplete
{
NSLog(@"录制完成");
self.canSave =YES;
}
//这个在完全退出小视频时调用
- (void)quit
{
[_captureSessionstopRunning];
}
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didStartRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray *)connections
{
NSLog(@"----开始录制 ----");
}
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error
{
NSLog(@"----录制结束 outputFileURL---%@- captureOutput.outputFileURL - %@ ",outputFileURL,captureOutput.outputFileURL);
if (outputFileURL.absoluteString.length ==0 && captureOutput.outputFileURL.absoluteString.length == 0 ) {
[selfshowMsgWithTitle:@"出错了"andContent:@"录制视频保存地址出错"];
return;
}
if (self.canSave) {
[selfpushToPlay:outputFileURL];
self.canSave =NO;
}
}
- (void)pushToPlay:(NSURL *)url
{
if ([_delegaterespondsToSelector:@selector(finishLittleVideoViewControllerCapture:)]) {
[_delegatefinishLittleVideoViewControllerCapture:url];
}
[selfdismissViewControllerAnimated:YEScompletion:nil];
}
- (void)getAuthorization
{
/*
AVAuthorizationStatusNotDetermined = 0,// 未进行授权选择
AVAuthorizationStatusRestricted, //未授权,且用户无法更新,如家长控制情况下
AVAuthorizationStatusDenied, //用户拒绝App使用
AVAuthorizationStatusAuthorized, //已授权,可使用
*/
switch ([AVCaptureDeviceauthorizationStatusForMediaType:AVMediaTypeVideo])
{
caseAVAuthorizationStatusAuthorized: //已授权,可使用 The client is authorized to access the hardware supporting a media type.
{
NSLog(@"授权摄像头使用成功");
[selfsetupAVCaptureInfo];
break;
}
caseAVAuthorizationStatusNotDetermined: //未进行授权选择 Indicates that the user has not yet made a choice regarding whether the client can access the hardware.
{
//则再次请求授权
[AVCaptureDevicerequestAccessForMediaType:AVMediaTypeVideocompletionHandler:^(BOOL granted) {
if(granted){ //用户授权成功
[selfsetupAVCaptureInfo];
return;
} else { //用户拒绝授权
[selfbackBtnClick];
[selfshowMsgWithTitle:@"出错了"andContent:@"用户拒绝授权摄像头的使用权,返回上一页.请打开\n设置-->隐私/通用等权限设置"];
return;
}
}];
break;
}
default: //用户拒绝授权/未授权
{
[selfbackBtnClick];
[selfshowMsgWithTitle:@"出错了"andContent:@"拒绝授权,返回上一页.请检查下\n设置-->隐私/通用等权限设置"];
break;
}
}
}
- (void)setupAVCaptureInfo
{
[selfaddSession];
[_captureSessionbeginConfiguration];
[selfaddVideo];
[selfaddAudio];
[selfaddPreviewLayer];
[_captureSessioncommitConfiguration];
//开启会话-->注意,不等于开始录制
[_captureSessionstartRunning];
}
- (void)addSession
{
_captureSession = [[AVCaptureSessionalloc] init];
//设置视频分辨率
/* 通常支持如下格式
(
AVAssetExportPresetLowQuality,
AVAssetExportPreset960x540,
AVAssetExportPreset640x480,
AVAssetExportPresetMediumQuality,
AVAssetExportPreset1920x1080,
AVAssetExportPreset1280x720,
AVAssetExportPresetHighestQuality,
AVAssetExportPresetAppleM4A
)
*/
//注意,这个地方设置的模式/分辨率大小将影响你后面拍摄照片/视频的大小,
if ([_captureSessioncanSetSessionPreset:AVAssetExportPresetHighestQuality]) {
[_captureSessionsetSessionPreset:AVAssetExportPresetHighestQuality];
}
}
- (void)addVideo
{
// 获取摄像头输入设备,创建 AVCaptureDeviceInput对象
/* MediaType
AVF_EXPORT NSString *const AVMediaTypeVideo NS_AVAILABLE(10_7, 4_0); //视频
AVF_EXPORT NSString *const AVMediaTypeAudio NS_AVAILABLE(10_7, 4_0); //音频
AVF_EXPORT NSString *const AVMediaTypeText NS_AVAILABLE(10_7, 4_0);
AVF_EXPORT NSString *const AVMediaTypeClosedCaption NS_AVAILABLE(10_7, 4_0);
AVF_EXPORT NSString *const AVMediaTypeSubtitle NS_AVAILABLE(10_7, 4_0);
AVF_EXPORT NSString *const AVMediaTypeTimecode NS_AVAILABLE(10_7, 4_0);
AVF_EXPORT NSString *const AVMediaTypeMetadata NS_AVAILABLE(10_8, 6_0);
AVF_EXPORT NSString *const AVMediaTypeMuxed NS_AVAILABLE(10_7, 4_0);
*/
/* AVCaptureDevicePosition
typedef NS_ENUM(NSInteger, AVCaptureDevicePosition) {
AVCaptureDevicePositionUnspecified = 0,
AVCaptureDevicePositionBack = 1, //后置摄像头
AVCaptureDevicePositionFront = 2 //前置摄像头
} NS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED;
*/
_videoDevice = [selfdeviceWithMediaType:AVMediaTypeVideopreferringPosition:AVCaptureDevicePositionBack];
[selfaddVideoInput];
[selfaddMovieOutput];
}
- (void)addVideoInput
{
NSError *videoError;
// 视频输入对象
// 根据输入设备初始化输入对象,用户获取输入数据
_videoInput = [[AVCaptureDeviceInputalloc] initWithDevice:_videoDeviceerror:&videoError];
if (videoError) {
NSLog(@"----取得摄像头设备时出错 ------ %@",videoError);
return;
}
// 将视频输入对象添加到会话 (AVCaptureSession)中
if ([_captureSessioncanAddInput:_videoInput]) {
[_captureSessionaddInput:_videoInput];
}
}
- (void)addMovieOutput
{
// 拍摄视频输出对象
// 初始化输出设备对象,用户获取输出数据
_movieOutput = [[AVCaptureMovieFileOutputalloc] init];
if ([_captureSessioncanAddOutput:_movieOutput]) {
[_captureSessionaddOutput:_movieOutput];
AVCaptureConnection *captureConnection = [_movieOutputconnectionWithMediaType:AVMediaTypeVideo];
//设置视频旋转方向
/*
typedef NS_ENUM(NSInteger, AVCaptureVideoOrientation) {
AVCaptureVideoOrientationPortrait = 1,
AVCaptureVideoOrientationPortraitUpsideDown = 2,
AVCaptureVideoOrientationLandscapeRight = 3,
AVCaptureVideoOrientationLandscapeLeft = 4,
} NS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED;
*/
// if ([captureConnection isVideoOrientationSupported]) {
// [captureConnection setVideoOrientation:AVCaptureVideoOrientationPortrait];
// }
// 视频稳定设置
if ([captureConnectionisVideoStabilizationSupported]) {
captureConnection.preferredVideoStabilizationMode =AVCaptureVideoStabilizationModeAuto;
}
captureConnection.videoScaleAndCropFactor = captureConnection.videoMaxScaleAndCropFactor;
}
}
- (void)addAudio
{
NSError *audioError;
// 添加一个音频输入设备
_audioDevice = [AVCaptureDevicedefaultDeviceWithMediaType:AVMediaTypeAudio];
// 音频输入对象
_audioInput = [[AVCaptureDeviceInputalloc] initWithDevice:_audioDeviceerror:&audioError];
if (audioError) {
NSLog(@"取得录音设备时出错 ------ %@",audioError);
return;
}
// 将音频输入对象添加到会话 (AVCaptureSession)中
if ([_captureSessioncanAddInput:_audioInput]) {
[_captureSessionaddInput:_audioInput];
}
}
- (void)addPreviewLayer
{
[self.viewlayoutIfNeeded];
// 通过会话 (AVCaptureSession)创建预览层
_captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayeralloc] initWithSession:_captureSession];
_captureVideoPreviewLayer.frame =self.view.layer.bounds;
/* 填充模式
Options are AVLayerVideoGravityResize, AVLayerVideoGravityResizeAspect and AVLayerVideoGravityResizeAspectFill. AVLayerVideoGravityResizeAspect is default.
*/
//有时候需要拍摄完整屏幕大小的时候可以修改这个
_captureVideoPreviewLayer.videoGravity =AVLayerVideoGravityResizeAspectFill;
// 如果预览图层和视频方向不一致,可以修改这个
_captureVideoPreviewLayer.connection.videoOrientation = [_movieOutputconnectionWithMediaType:AVMediaTypeVideo].videoOrientation;
_captureVideoPreviewLayer.position =CGPointMake(self.view.width*0.5,self.videoView.height*0.5);
// 显示在视图表面的图层
CALayer *layer =self.videoView.layer;
layer.masksToBounds =true;
[self.viewlayoutIfNeeded];
[layer addSublayer:_captureVideoPreviewLayer];
}
- (void)showMsgWithTitle:(NSString *)title andContent:(NSString *)content
{
[[[UIAlertViewalloc] initWithTitle:titlemessage:content delegate:nilcancelButtonTitle:@"确定"otherButtonTitles:nil]show];
}
#pragma mark 获取摄像头-->前/后
- (AVCaptureDevice *)deviceWithMediaType:(NSString *)mediaType preferringPosition:(AVCaptureDevicePosition)position
{
NSArray *devices = [AVCaptureDevicedevicesWithMediaType:mediaType];
AVCaptureDevice *captureDevice = devices.firstObject;
for (AVCaptureDevice *device in devices ) {
if ( device.position == position ) {
captureDevice = device;
break;
}
}
return captureDevice;
}
#pragma mark 交互
//切换闪光灯 闪光模式开启后,并无明显感觉,所以还需要开启手电筒
- (void)changeFlashlight:(UIButton *)sender {
BOOL con1 = [_videoDevicehasTorch]; //支持手电筒模式
BOOL con2 = [_videoDevicehasFlash]; //支持闪光模式
if (con1 && con2)
{
[selfchangeDevicePropertySafety:^(AVCaptureDevice *captureDevice) {
if (_videoDevice.flashMode ==AVCaptureFlashModeOn) //闪光灯开
{
[_videoDevicesetFlashMode:AVCaptureFlashModeOff];
[_videoDevicesetTorchMode:AVCaptureTorchModeOff];
}elseif (_videoDevice.flashMode ==AVCaptureFlashModeOff) //闪光灯关
{
[_videoDevicesetFlashMode:AVCaptureFlashModeOn];
[_videoDevicesetTorchMode:AVCaptureTorchModeOn];
}
// else{ //闪光灯自动
// [_videoDevice setFlashMode:AVCaptureFlashModeAuto];
// [_videoDevice setTorchMode:AVCaptureTorchModeAuto];
// }
NSLog(@"现在的闪光模式是AVCaptureFlashModeOn么?是你就扣1, %zd",_videoDevice.flashMode ==AVCaptureFlashModeOn);
}];
sender.selected=!sender.isSelected;
}else{
NSLog(@"不能切换闪光模式");
}
}
//切换前后镜头
- (void)changeCamera{
switch (_videoDevice.position) {
caseAVCaptureDevicePositionBack:
_videoDevice = [selfdeviceWithMediaType:AVMediaTypeVideopreferringPosition:AVCaptureDevicePositionFront];
break;
caseAVCaptureDevicePositionFront:
_videoDevice = [selfdeviceWithMediaType:AVMediaTypeVideopreferringPosition:AVCaptureDevicePositionBack];
break;
default:
return;
break;
}
[selfchangeDevicePropertySafety:^(AVCaptureDevice *captureDevice) {
NSError *error;
AVCaptureDeviceInput *newVideoInput = [[AVCaptureDeviceInputalloc] initWithDevice:_videoDeviceerror:&error];
if (newVideoInput !=nil) {
//必选先 remove才能询问 canAdd
[_captureSessionremoveInput:_videoInput];
if ([_captureSessioncanAddInput:newVideoInput]) {
[_captureSessionaddInput:newVideoInput];
_videoInput = newVideoInput;
}else{
[_captureSessionaddInput:_videoInput];
}
} elseif (error) {
NSLog(@"切换前/后摄像头失败, error = %@", error);
}
}];
}
/**
* 添加点按手势,点按时聚焦
*/
-(void)addGenstureRecognizer{
UITapGestureRecognizer *singleTapGesture=[[UITapGestureRecognizeralloc]initWithTarget:selfaction:@selector(singleTap:)];
singleTapGesture.numberOfTapsRequired =1;
singleTapGesture.delaysTouchesBegan =YES;
UITapGestureRecognizer *doubleTapGesture=[[UITapGestureRecognizeralloc]initWithTarget:selfaction:@selector(doubleTap:)];
doubleTapGesture.numberOfTapsRequired =2;
doubleTapGesture.delaysTouchesBegan =YES;
[singleTapGesture requireGestureRecognizerToFail:doubleTapGesture];
[self.videoViewaddGestureRecognizer:singleTapGesture];
[self.videoViewaddGestureRecognizer:doubleTapGesture];
}
-(void)singleTap:(UITapGestureRecognizer *)tapGesture{
NSLog(@"单击");
CGPoint point= [tapGesturelocationInView:self.videoView];
//将UI坐标转化为摄像头坐标,摄像头聚焦点范围0~1
CGPoint cameraPoint= [_captureVideoPreviewLayercaptureDevicePointOfInterestForPoint:point];
[selfsetFocusCursorAnimationWithPoint:point];
[selfchangeDevicePropertySafety:^(AVCaptureDevice *captureDevice) {
/*
@constant AVCaptureFocusModeLocked 锁定在当前焦距
Indicates that the focus should be locked at the lens' current position.
@constant AVCaptureFocusModeAutoFocus 自动对焦一次,然后切换到焦距锁定
Indicates that the device should autofocus once and then change the focus mode to AVCaptureFocusModeLocked.
@constant AVCaptureFocusModeContinuousAutoFocus 当需要时.自动调整焦距
Indicates that the device should automatically focus when needed.
*/
//聚焦
if ([captureDeviceisFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]) {
[captureDevice setFocusMode:AVCaptureFocusModeContinuousAutoFocus];
NSLog(@"聚焦模式修改为%zd",AVCaptureFocusModeContinuousAutoFocus);
}else{
NSLog(@"聚焦模式修改失败");
}
//聚焦点的位置
if ([captureDeviceisFocusPointOfInterestSupported]) {
[captureDevice setFocusPointOfInterest:cameraPoint];
}
/*
@constant AVCaptureExposureModeLocked 曝光锁定在当前值
Indicates that the exposure should be locked at its current value.
@constant AVCaptureExposureModeAutoExpose 曝光自动调整一次然后锁定
Indicates that the device should automatically adjust exposure once and then change the exposure mode to AVCaptureExposureModeLocked.
@constant AVCaptureExposureModeContinuousAutoExposure 曝光自动调整
Indicates that the device should automatically adjust exposure when needed.
@constant AVCaptureExposureModeCustom 曝光只根据设定的值来
Indicates that the device should only adjust exposure according to user provided ISO, exposureDuration values.
*/
//曝光模式
if ([captureDeviceisExposureModeSupported:AVCaptureExposureModeAutoExpose]) {
[captureDevice setExposureMode:AVCaptureExposureModeAutoExpose];
}else{
NSLog(@"曝光模式修改失败");
}
//曝光点的位置
if ([captureDeviceisExposurePointOfInterestSupported]) {
[captureDevice setExposurePointOfInterest:cameraPoint];
}
}];
}
//设置焦距
-(void)doubleTap:(UITapGestureRecognizer *)tapGesture{
NSLog(@"双击");
[selfchangeDevicePropertySafety:^(AVCaptureDevice *captureDevice) {
if (captureDevice.videoZoomFactor ==1.0) {
CGFloat current =1.5;
if (current < captureDevice.activeFormat.videoMaxZoomFactor) {
[captureDevice rampToVideoZoomFactor:currentwithRate:10];
}
}else{
[captureDevice rampToVideoZoomFactor:1.0withRate:10];
}
}];
}
//光圈动画
-(void)setFocusCursorAnimationWithPoint:(CGPoint)point{
self.focusCircle.center = point;
self.focusCircle.transform =CGAffineTransformIdentity;
self.focusCircle.alpha =1.0;
[UIViewanimateWithDuration:0.5animations:^{
self.focusCircle.transform=CGAffineTransformMakeScale(0.5,0.5);
self.focusCircle.alpha =0.0;
}];
}
//光圈
- (UIView *)focusCircle{
if (!_focusCircle) {
UIView *focusCircle = [[UIViewalloc] init];
focusCircle.frame =CGRectMake(0,0, 100,100);
focusCircle.layer.borderColor =BLUECOLOR.CGColor;
focusCircle.layer.borderWidth =2;
focusCircle.layer.cornerRadius =50;
focusCircle.layer.masksToBounds =YES;
_focusCircle = focusCircle;
[self.videoViewaddSubview:focusCircle];
}
return_focusCircle;
}
-(UIButton *)flashModelBtn{
if (!_flashModelBtn) {
UIButton * flashModelBtn = [UIButtonbuttonWithType:UIButtonTypeCustom];//30/2 40/2
flashModelBtn.frame =CGRectMake(SCREEN_WIDTH/2 -15/2,30, 15 +2, 20 +3);
[flashModelBtn setImage:[UIImageimageNamed:@"sg1"]forState:UIControlStateNormal];
[flashModelBtn setImage:[UIImageimageNamed:@"sg2"]forState:UIControlStateSelected];
_flashModelBtn = flashModelBtn;
_flashModelBtn.imageView.contentMode = UIViewContentModeScaleAspectFill;
[_flashModelBtnaddTarget:selfaction:@selector(changeFlashlight:)forControlEvents:UIControlEventTouchUpInside];
}
return_flashModelBtn;
}
-(UIButton *)changeBtn{
//40/2 33/2
if (!_changeBtn) {
UIButton * changeBtn = [UIButtonbuttonWithType:UIButtonTypeCustom];
changeBtn.frame =CGRectMake(SCREEN_WIDTH -35, 30,20 + 5,16 + 4);
[changeBtn setImage:[UIImageimageNamed:@"zhxj"]forState:UIControlStateNormal];
_changeBtn = changeBtn;
_changeBtn.imageView.contentMode = UIViewContentModeScaleAspectFill;
[_changeBtnaddTarget:selfaction:@selector(changeCamera)forControlEvents:UIControlEventTouchUpInside];
}
return_changeBtn;
}
//更改设备属性前一定要锁上
-(void)changeDevicePropertySafety:(void (^)(AVCaptureDevice *captureDevice))propertyChange{
//也可以直接用_videoDevice,但是下面这种更好
AVCaptureDevice *captureDevice= [_videoInputdevice];
NSError *error;
//注意改变设备属性前一定要首先调用lockForConfiguration:调用完之后使用unlockForConfiguration方法解锁,意义是---进行修改期间,先锁定,防止多处同时修改
BOOL lockAcquired = [captureDevicelockForConfiguration:&error];
NSLog(@"lockForConfiguration");
if (!lockAcquired) {
NSLog(@"锁定设备过程error,错误信息:%@",error.localizedDescription);
}else{
[_captureSessionbeginConfiguration];
propertyChange(captureDevice);
[captureDevice unlockForConfiguration];
[_captureSessioncommitConfiguration];
NSLog(@"unlockForConfiguration");
}
}
#pragma mark - 懒加载
-(UILabel *)cancelTip{
if (_cancelTip ==nil) {
_cancelTip = [[UILabelalloc]initWithFrame:CGRectMake(IPHONE_WIDTH/2 - 40, SCREEN_HEIGHT -120, 80,16)];
_cancelTip.text =@"↑上滑取消";
_cancelTip.font = [UIFontsystemFontOfSize:16];
_cancelTip.textAlignment =NSTextAlignmentCenter;
_cancelTip.backgroundColor = [UIColorclearColor];
_cancelTip.textColor =BLUECOLOR;
_cancelTip.alpha =0;
[self.viewaddSubview:_cancelTip];
}
return_cancelTip;
}
-(void)backBtnClick{
[selfdismissViewControllerAnimated:YEScompletion:nil];
}
- (void)didReceiveMemoryWarning {
[superdidReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
/*
#pragma mark - Navigation
// In a storyboard-based application, you will often want to do a little preparation before navigation
- (void)prepareForSegue:(UIStoryboardSegue *)segue sender:(id)sender {
// Get the new view controller using [segue destinationViewController].
// Pass the selected object to the new view controller.
}
*/
@end
//
// UIView+RMAdditions.h
// RMCategories
//
// Created by Richard McClellan on 5/27/13.
// Copyright (c) 2013 Richard McClellan. All rights reserved.
//
#define SCREEN_WIDTH ([UIScreen mainScreen].bounds.size.width)
#define SCREEN_HEIGHT ([UIScreen mainScreen].bounds.size.height)
#import <UIKit/UIKit.h>
@interface UIView (RMAdditions)
/**
* Shortcut for frame.origin.x.
*
* Sets frame.origin.x = x
*/
@property (nonatomic,assign) CGFloat x;
/**
* Shortcut for frame.origin.x.
*
* Sets frame.origin.x = left
*/
@property (nonatomic,assign) CGFloat left;
/**
* Shortcut for frame.origin.y
*
* Sets frame.origin.y = top
*/
@property (nonatomic,assign) CGFloat top;
/**
* Shortcut for frame.origin.y
*
* Sets frame.origin.y = y
*/
@property (nonatomic,assign) CGFloat y;
/**
* Shortcut for frame.origin.x + frame.size.width
*
* Sets frame.origin.x = right - frame.size.width
*/
@property (nonatomic,assign) CGFloat right;
/**
* Shortcut for frame.origin.y + frame.size.height
*
* Sets frame.origin.y = bottom - frame.size.height
*/
@property (nonatomic,assign) CGFloat bottom;
/**
* Shortcut for frame.size.width
*
* Sets frame.size.width = width
*/
@property (nonatomic,assign) CGFloat width;
/**
* Shortcut for frame.size.height
*
* Sets frame.size.height = height
*/
@property (nonatomic,assign) CGFloat height;
/**
* Shortcut for center.x
*
* Sets center.x = centerX
*/
@property (nonatomic,assign) CGFloat centerX;
/**
* Shortcut for center.y
*
* Sets center.y = centerY
*/
@property (nonatomic,assign) CGFloat centerY;
/**
* Shortcut for origin
*
* Sets frame.origin = origin
*/
@property (nonatomic,assign) CGPoint origin;
/**
* Shortcut for size
*
* Sets frame.size = size
*/
@property (nonatomic,assign) CGSize size;
/**
* Utility to convert UIViewAnimationCurve to UIViewAnimationOptions
*
* Used in UIViewController+RMAdditions for animating view for keyboard changes
*/
+ (UIViewAnimationOptions)animationOptionsWithCurve:(UIViewAnimationCurve)curve;
- (void) addLoadingView;
- (void) addLoadingViewWithText:(NSString *)text;
- (void) removeLoadingView;
@end
//
// UIView+RMAdditions.m
// RMCategories
//
// Created by Richard McClellan on 5/27/13.
// Copyright (c) 2013 Richard McClellan. All rights reserved.
//
#import "UIView+RMAdditions.h"
#import <objc/runtime.h>
@implementation UIView (RMAdditions)
- (CGFloat)left {
returnself.frame.origin.x;
}
- (void)setLeft:(CGFloat)x {
CGRect frame =self.frame;
frame.origin.x = x;
self.frame = frame;
}
- (CGFloat)x {
returnself.frame.origin.x;
}
- (void)setX:(CGFloat)x {
CGRect frame =self.frame;
frame.origin.x = x;
self.frame = frame;
}
- (CGFloat)top {
returnself.frame.origin.y;
}
- (void)setTop:(CGFloat)y {
CGRect frame =self.frame;
frame.origin.y = y;
self.frame = frame;
}
- (CGFloat)y {
returnself.frame.origin.y;
}
- (void)setY:(CGFloat)y {
CGRect frame =self.frame;
frame.origin.y = y;
self.frame = frame;
}
- (CGFloat)right {
returnself.frame.origin.x + self.frame.size.width;
}
- (void)setRight:(CGFloat)right {
CGRect frame =self.frame;
frame.origin.x = right - frame.size.width;
self.frame = frame;
}
- (CGFloat)bottom {
returnself.frame.origin.y + self.frame.size.height;
}
- (void)setBottom:(CGFloat)bottom {
CGRect frame =self.frame;
frame.origin.y = bottom - frame.size.height;
self.frame = frame;
}
- (CGFloat)centerX {
returnself.center.x;
}
- (void)setCenterX:(CGFloat)centerX {
self.center =CGPointMake(centerX, self.center.y);
}
- (CGFloat)centerY {
returnself.center.y;
}
- (void)setCenterY:(CGFloat)centerY {
self.center =CGPointMake(self.center.x, centerY);
}
- (CGFloat)width {
returnself.frame.size.width;
}
- (void)setWidth:(CGFloat)width {
CGRect frame =self.frame;
frame.size.width = width;
self.frame = frame;
}
- (CGFloat)height {
returnself.frame.size.height;
}
- (void)setHeight:(CGFloat)height {
CGRect frame =self.frame;
frame.size.height = height;
self.frame = frame;
}
- (void)setOrigin:(CGPoint)origin {
CGRect frame =self.frame;
frame.origin = origin;
self.frame = frame;
}
- (CGPoint) origin {
returnself.frame.origin;
}
- (void) setSize:(CGSize)size {
CGRect frame =self.frame;
frame.size = size;
self.frame = frame;
}
- (CGSize) size {
returnself.frame.size;
}
+ (UIViewAnimationOptions)animationOptionsWithCurve:(UIViewAnimationCurve)curve {
switch(curve) {
caseUIViewAnimationCurveEaseIn:
returnUIViewAnimationOptionCurveEaseIn;
caseUIViewAnimationCurveEaseInOut:
returnUIViewAnimationOptionCurveEaseInOut;
caseUIViewAnimationCurveEaseOut:
returnUIViewAnimationOptionCurveEaseOut;
caseUIViewAnimationCurveLinear:
returnUIViewAnimationOptionCurveLinear;
}
}
static constvoid *kLoadingViewKey = @"LoadingViewKey";
- (void) addLoadingView {
[selfaddLoadingViewWithText:@"Loading..."];
}
- (void) addLoadingViewWithText:(NSString *)text {
[selfremoveLoadingView];
UIView *loadingView = [[UIViewalloc] initWithFrame:self.bounds];
[loadingView setAutoresizingMask:UIViewAutoresizingFlexibleWidth |UIViewAutoresizingFlexibleHeight];
[loadingView setBackgroundColor:[UIColorcolorWithWhite:0.9alpha:1.0]];
objc_setAssociatedObject(self,kLoadingViewKey, loadingView,OBJC_ASSOCIATION_RETAIN_NONATOMIC);
UILabel *loadingLabel = [[UILabelalloc] initWithFrame:CGRectZero];
loadingLabel.backgroundColor = [UIColorclearColor];
loadingLabel.font = [UIFontsystemFontOfSize:15.0];
loadingLabel.textColor = [UIColorblackColor];
[loadingLabel setText:text];
[loadingLabel sizeToFit];
UIActivityIndicatorView *activityIndicator = [[UIActivityIndicatorViewalloc] initWithActivityIndicatorStyle:UIActivityIndicatorViewStyleGray];
activityIndicator.autoresizingMask =UIViewAutoresizingFlexibleTopMargin |UIViewAutoresizingFlexibleBottomMargin;
[activityIndicator startAnimating];
activityIndicator.left = (self.width - activityIndicator.width - loadingLabel.width - 5.0) / 2;
activityIndicator.centerY =self.centerY;
[loadingView addSubview:activityIndicator];
loadingLabel.left = (self.width - activityIndicator.width - loadingLabel.width - 5.0) / 2 + activityIndicator.width +5.0;
loadingLabel.centerY =self.centerY;
loadingLabel.autoresizingMask =UIViewAutoresizingFlexibleTopMargin |UIViewAutoresizingFlexibleBottomMargin;
[loadingView addSubview:loadingLabel];
[selfaddSubview:loadingView];
}
- (void) removeLoadingView {
UIView *loadingView =objc_getAssociatedObject(self,kLoadingViewKey);
[loadingView removeFromSuperview];
}
@end
[self create];
[[NSNotificationCenterdefaultCenter]addObserver:selfselector:@selector(playbackFinished:)name:AVPlayerItemDidPlayToEndTimeNotificationobject:nil];
#pragma mark - 创建UI
- (void)create
{
_playItem = [AVPlayerItemplayerItemWithURL:self.videoUrl];
_player = [AVPlayerplayerWithPlayerItem:_playItem];
_playerLayer =[AVPlayerLayerplayerLayerWithPlayer:_player];
_playerLayer.frame =CGRectMake(IPHONE_WIDTH -102, 8,95, 95);
_playerLayer.videoGravity=AVLayerVideoGravityResizeAspectFill;//视频填充模式
[self.view.layeraddSublayer:_playerLayer];
[_playerplay];
}
#pragma mark 保存压缩
- (NSURL *)compressedURL
{
NSDateFormatter *formater = [[NSDateFormatteralloc] init];//用时间给文件全名,以免重复,在测试的时候其实可以判断文件是否存在若存在,则删除,重新生成文件即可
[formater setDateFormat:@"yyyy-MM-dd-HH:mm:ss"];
return [NSURLfileURLWithPath:[[NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,NSUserDomainMask, true) lastObject]stringByAppendingPathComponent:[NSStringstringWithFormat:@"%@output.mp4",[formaterstringFromDate:[NSDatedate]]]]];
}
- (CGFloat)fileSize:(NSURL *)path
{
return [[NSDatadataWithContentsOfURL:path]length]/1024.00 /1024.00;
}
- (void)convertVideoQuailtyWithInputURL:(NSURL*)inputURL
outputURL:(NSURL*)outputURL
completeHandler:(void (^)(AVAssetExportSession*))handler
{
[HTXSVProgressHUDToolshowWithStatus:@"视频压缩..."];
AVURLAsset *avAsset = [AVURLAssetURLAssetWithURL:inputURLoptions:nil];
AVAssetExportSession *exportSession = [[AVAssetExportSessionalloc] initWithAsset:avAssetpresetName:AVAssetExportPresetMediumQuality];//AVAssetExportPresetLowQuality AVAssetExportPresetMediumQuality
// NSLog(resultPath); AVAssetExportPresetHighestQuality
exportSession.outputURL = outputURL;
exportSession.outputFileType =AVFileTypeMPEG4;
exportSession.shouldOptimizeForNetworkUse=YES;
exportSession.videoComposition = [selfgetVideoComposition:avAsset];
[exportSession exportAsynchronouslyWithCompletionHandler:^(void)
{
[HTXSVProgressHUDTooldismiss];
switch (exportSession.status) {
caseAVAssetExportSessionStatusCancelled:
NSLog(@"AVAssetExportSessionStatusCancelled");
break;
caseAVAssetExportSessionStatusUnknown:
NSLog(@"AVAssetExportSessionStatusUnknown");
break;
caseAVAssetExportSessionStatusWaiting:
NSLog(@"AVAssetExportSessionStatusWaiting");
break;
caseAVAssetExportSessionStatusExporting:
NSLog(@"AVAssetExportSessionStatusExporting");
break;
caseAVAssetExportSessionStatusCompleted:
{
NSLog(@"AVAssetExportSessionStatusCompleted");
NSLog(@"压缩完毕,压缩后大小 %f MB",[selffileSize:outputURL]);
//UISaveVideoAtPathToSavedPhotosAlbum([outputURL path], self, nil, NULL);//这个是保存到手机相册
[[NSFileManagerdefaultManager] removeItemAtPath:[inputURLpath] error:nil];//取消之后就删除,以免占用手机硬盘空间
[selfuploadVideo:outputURL];
}
break;
caseAVAssetExportSessionStatusFailed:
NSLog(@"AVAssetExportSessionStatusFailed");
break;
}
}];
}
#pragma mark - 解决录像保存角度问题
-(AVMutableVideoComposition *) getVideoComposition:(AVAsset *)asset
{
AVAssetTrack *videoTrack = [[assettracksWithMediaType:AVMediaTypeVideo]objectAtIndex:0];
AVMutableComposition *composition = [AVMutableCompositioncomposition];
AVMutableVideoComposition *videoComposition = [AVMutableVideoCompositionvideoComposition];
CGSize videoSize = videoTrack.naturalSize;
BOOL isPortrait_ = [selfisVideoPortrait:asset];
if(isPortrait_) {
NSLog(@"video is portrait ");
videoSize = CGSizeMake(videoSize.height, videoSize.width);
}
composition.naturalSize = videoSize;
videoComposition.renderSize = videoSize;
// videoComposition.renderSize = videoTrack.naturalSize; //
videoComposition.frameDuration =CMTimeMakeWithSeconds( 1 / videoTrack.nominalFrameRate,600);
AVMutableCompositionTrack *compositionVideoTrack;
compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideopreferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:videoTrack atTime:kCMTimeZeroerror:nil];
AVMutableVideoCompositionLayerInstruction *layerInst;
layerInst = [AVMutableVideoCompositionLayerInstructionvideoCompositionLayerInstructionWithAssetTrack:videoTrack];
[layerInst setTransform:videoTrack.preferredTransformatTime:kCMTimeZero];
AVMutableVideoCompositionInstruction *inst = [AVMutableVideoCompositionInstructionvideoCompositionInstruction];
inst.timeRange =CMTimeRangeMake(kCMTimeZero, asset.duration);
inst.layerInstructions = [NSArrayarrayWithObject:layerInst];
videoComposition.instructions = [NSArrayarrayWithObject:inst];
return videoComposition;
}
-(BOOL) isVideoPortrait:(AVAsset *)asset
{
BOOL isPortrait =FALSE;
NSArray *tracks = [assettracksWithMediaType:AVMediaTypeVideo];
if([tracks count] > 0) {
AVAssetTrack *videoTrack = [tracksobjectAtIndex:0];
CGAffineTransform t = videoTrack.preferredTransform;
// Portrait
if(t.a ==0 && t.b ==1.0 && t.c == -1.0 && t.d ==0)
{
isPortrait = YES;
}
// PortraitUpsideDown
if(t.a ==0 && t.b == -1.0 && t.c ==1.0 && t.d ==0) {
isPortrait = YES;
}
// LandscapeRight
if(t.a ==1.0 && t.b ==0 && t.c ==0 && t.d ==1.0)
{
isPortrait = FALSE;
}
// LandscapeLeft
if(t.a == -1.0 && t.b == 0 && t.c ==0 && t.d == -1.0)
{
isPortrait = FALSE;
}
}
return isPortrait;
}
-(void)uploadVideo:(NSURL*)URL{
NSLog(@"上传视频");
//分界线的标识符
NSString *TWITTERFON_FORM_BOUNDARY =@"AaB03x";
NSURL *url = [NSURLURLWithString:KCreateDynamic];
NSMutableURLRequest *request = [NSMutableURLRequestrequestWithURL:url];
//分界线 -- AaB03x
NSString *MPboundary = [[NSStringalloc]initWithFormat:@"--%@",TWITTERFON_FORM_BOUNDARY];
// 结束符 AaB03x--
NSString *endMPboundary = [[NSStringalloc]initWithFormat:@"%@--",MPboundary];
//http body的字符串
NSMutableString *body = [[NSMutableStringalloc]init];
//参数的集合的所有key的集合
// NSDictionary *params = @{@"appSecret":@"4124bc0a9335c27f086f24ba207a4912",@"memberId":[UserModel sharedInstance].memberId,@"message":self.messageText1.text,@"positionX":@0,@"positionY":@0,@"positionYn":@"N",@"appId":@"ET12412"};
NSString *memStr = [UserModelsharedInstance].memberId;
NSMutableDictionary * _param = [NSMutableDictionarydictionary];
NSString *paraString = [MD5encodeString:self.messageText1.text];
[_param setValue:memStrforKey:@"memberId"];
[_param setValue:self.messageText1.textforKey:@"message"];
[_param setValue:KAPPIDforKey:@"appId"];
[_param setValue:KappSecretforKey:@"appSecret"];
[_param setValue:[CommonToolsgetTimeScamp] forKey:@"timestamp"];
NSArray * paramArr =@[[NSStringstringWithFormat:@"memberId%@",_param[@"memberId"]],[NSStringstringWithFormat:@"message%@",paraString],[NSStringstringWithFormat:@"appId%@",_param[@"appId"]],[NSStringstringWithFormat:@"appSecret%@",_param[@"appSecret"]],[NSStringstringWithFormat:@"timestamp%@",_param[@"timestamp"]]];
[_param setValue:[CommonToolstokenmd5StringFromArray:paramArr]forKey:@"digest"];
NSArray *keys= [_paramallKeys];
//遍历keys
for(int i=0;i<[keyscount];i++) {
//得到当前key
NSString *key=[keysobjectAtIndex:i];
//如果key不是pic,说明value是字符类型,比如name:Boris
//if(![key isEqualToString:@"pic"]) {
//添加分界线,换行
[body appendFormat:@"%@\r\n",MPboundary];
//添加字段名称,换2行
[body appendFormat:@"Content-Disposition: form-data; name=\"%@\"\r\n\r\n",key];
//[body appendString:@"Content-Transfer-Encoding: 8bit"];
//添加字段的值
[body appendFormat:@"%@\r\n",[_paramobjectForKey:key]];
//}
}
////添加分界线,换行
//[body appendFormat:@"%@\r\n",MPboundary];
NSLog(@"------------body---------%@",body);
//声明myRequestData,用来放入http body
NSMutableData *myRequestData=[NSMutableDatadata];
//将body字符串转化为UTF8格式的二进制
[myRequestData appendData:[bodydataUsingEncoding:NSUTF8StringEncoding]];
//循环加入上传图片
for (int i =0 ; i < 1; i ++ ) {
//要上传的图片
// image = self.chosenPostImages[i];
//得到图片的data
// NSData *data;
NSData *data = [NSDatadataWithContentsOfURL:URL];
// if (UIImagePNGRepresentation(image)) {
// //返回为png图像。
// data = UIImagePNGRepresentation(image);
// }else {
// //返回为JPEG图像。
// data = UIImageJPEGRepresentation(image, 1);
// }
// NSData *data = UIImageJPEGRepresentation(image, 0.00001);
// NSUInteger length = [data length];
NSMutableString *imgBody = [[NSMutableStringalloc]init];
//此处循环添加图片文件
//添加图片信息字段
//声明pic字段,文件名为boris.png
//[body appendFormat:[NSString stringWithFormat: @"Content-Disposition: form-data; name=\"File\"; filename=\"%@\"\r\n", [keys objectAtIndex:i]]];
////添加分界线,换行
[imgBody appendFormat:@"%@\r\n",MPboundary];
[imgBody appendFormat:@"Content-Disposition: form-data; name=\"movie\"; filename=\"output.mp4\"\r\n"];
//声明上传文件的格式
// [imgBody appendFormat:@"Content-Type: application/octet-stream"];
//charset=utf-8\r\n\r\n"];
[imgBody appendFormat:@"Content-Type: video/mp4\r\n\r\n"];
//将body字符串转化为UTF8格式的二进制
//[myRequestData appendData:[body dataUsingEncoding:NSUTF8StringEncoding]];
[myRequestData appendData:[imgBodydataUsingEncoding:NSUTF8StringEncoding]];
//将image的data加入
NSLog(@"----imgBody------- %@",imgBody);
[myRequestData appendData:data];
[myRequestData appendData:[@"\r\n" dataUsingEncoding:NSUTF8StringEncoding]];
}
//声明结束符:--AaB03x--
// NSString *end=[[NSString alloc]initWithFormat:@"%@\r\n",endMPboundary];
NSString *end=[[NSStringalloc]initWithFormat:@"\r\n%@",endMPboundary];
//加入结束符--AaB03x--
[myRequestData appendData:[enddataUsingEncoding:NSUTF8StringEncoding]];
//设置HTTPHeader中Content-Type的值
NSString *content=[[NSStringalloc]initWithFormat:@"multipart/form-data; boundary=%@",TWITTERFON_FORM_BOUNDARY];
//设置HTTPHeader
[request setValue:contentforHTTPHeaderField:@"Content-Type"];
//[request setValue:@"keep-alive" forHTTPHeaderField:@"connection"];
//[request setValue:@"UTF-8" forHTTPHeaderField:@"Charsert"];
//设置Content-Length
[request setValue:[NSStringstringWithFormat:@"%lu", (unsignedlong)[myRequestDatalength]] forHTTPHeaderField:@"Content-Length"];
//设置http body
[request setHTTPBody:myRequestData];
//http method
[request setHTTPMethod:@"POST"];
[HTXSVProgressHUDToolshowWithStatus:@"正在发送..."];
// 发送请求
[NSURLConnectionsendAsynchronousRequest:requestqueue:[NSOperationQueuemainQueue] completionHandler:^(NSURLResponse *response,NSData *data, NSError *connectionError) {
if (data) {
NSDictionary *dict = [NSJSONSerializationJSONObjectWithData:data options:NSJSONReadingMutableLeaveserror:nil];
NSLog(@"发表小视频状态请求 == %@", dict);
// _isOne = NO;
if([dict[@"returnCode"]isEqualToString:@"000000"]){
[[NSFileManagerdefaultManager] removeItemAtPath:[URLpath] error:nil];//取消之后就删除,以免占用手机硬盘空间
NSLog(@"上传视频成功");
PostStatusViewController * cvc = [[PostStatusViewControlleralloc] init];
cvc.view.tag =300;
NSDictionary *dict = [[NSDictionaryalloc]initWithObjectsAndKeys:cvc,@"FreshView",nil] ;
NSNotification *notification = [NSNotificationnotificationWithName:@"FreshView"object:niluserInfo:dict];
[[NSNotificationCenterdefaultCenter]postNotification:notification];
//通知出现标签栏
UIViewController *cvc1 = [[UIViewControlleralloc]init];
cvc1.view.tag =1;
NSDictionary *dict1 = [[NSDictionaryalloc]initWithObjectsAndKeys:cvc1,@"Hidden",nil];
NSNotification *notification1 = [NSNotificationnotificationWithName:@"tongzhiHidden"object:niluserInfo:dict1];
[[NSNotificationCenterdefaultCenter]postNotification:notification1];
[self.navigationControllerpopViewControllerAnimated:YES];
}
elseif ([dict[@"returnCode"]integerValue] ==3){
UIAlertView *alertView = [[UIAlertViewalloc]initWithTitle:@"提示"message:@"对不起,请求数据失败!"delegate:selfcancelButtonTitle:@"确定"otherButtonTitles:nil,nil];
[alertView show];
}elseif ([dict[@"returnCode"]integerValue]==43){
UIAlertView * aler = [[UIAlertViewalloc]initWithTitle:[NSStringstringWithFormat:@"%@",dict[@"description"]]message:[NSStringstringWithFormat:@"%@",dict[@"reason"]]delegate:selfcancelButtonTitle:@"确定"otherButtonTitles:nil,nil];
[aler show];
}else{
UIAlertView *alertView = [[UIAlertViewalloc]initWithTitle:@"提示"message:@"对不起,发布失败!"delegate:selfcancelButtonTitle:@"确定"otherButtonTitles:nil,nil];
[alertView show];
}
} else {
NSLog(@"上传失败");
UIAlertView *alertView = [[UIAlertViewalloc]initWithTitle:@"提示"message:@"上传失败!"delegate:selfcancelButtonTitle:@"确定"otherButtonTitles:nil,nil];
[alertView show];
}
[HTXSVProgressHUDTooldismiss];
}];
}
- iOS自定义拍摄小视频压缩上传
- iOS视频拍摄压缩上传和播放
- IOS视频压缩
- IOS视频压缩
- ios视频压缩
- IOS视频压缩
- iOS 视频压缩
- IOS视频压缩
- IOS视频压缩
- [iOS] 视频压缩
- ios-视频压缩
- iOS 视频压缩
- 视频压缩iOS
- iOS视频压缩存储至本地并上传至服务器
- [绍棠] iOS视频压缩存储至本地并上传至服务器
- iOS视频压缩存储至本地并上传至服务器
- iOS拍摄视频,自定义拍摄界面,高清压缩,添加水印
- iOS 之视频压缩
- Oracle 游标使用全解
- 遥感影像去除黑边,提取有效区域矢量文件
- (随笔)JSP 表单处理
- OpenCV Mat的常见属性
- oracle 日期操作 汇总
- iOS自定义拍摄小视频压缩上传
- evm6678l 上跑fatfs的实现过程
- redis.conf
- android 隐藏上下导航栏
- 文件上传并展示上传文件
- AM335x uboot spl分析
- Spring 注解开发
- Jquery对table数据按照某某列排序
- 记录笔记-关于CollectionView的表头表尾的配置