話不多說,上 DEMO 這裡用的是 SVProressHUD, 由於 iOS10 的權限緣故,需要在 plist 裡添加字段,否則會崩潰,具體請看上一篇
//
// ViewController.m
// 錄制視頻
//
// Created by Amydom on 16/8/25.
// Copyright © 2016年 Amydom. All rights reserved.
//
#import "ViewController.h"
#import
#import
#import
#import
#import
#import
#import
#import "SVProgressHUD.h"
static NSString *const AssetCollectionName = @"錄制視頻";
@interface ViewController ()
@property (weak, nonatomic) UIImageView *centerFrameImageView;
@property (weak, nonatomic) UILabel *videoDurationLabel;
@property (nonatomic, assign) BOOL shouldAsync;
@end
@implementation ViewController
- (void)viewDidLoad {
[super viewDidLoad];
[self createBtn];
#pragma mark - 視頻相關
/*
一.保存圖片到【Camera Roll】(相機膠卷)
1.使用函數UIImageWriteToSavedPhotosAlbum
2.使用AssetsLibrary.framework(iOS9開始, 已經過期)
3.使用Photos.framework(iOS8開始可以使用, 從iOS9開始完全取代AssetsLibrary.framework)
二.創建新的【自定義Album】(相簿\相冊)
1.使用AssetsLibrary.framework(iOS9開始, 已經過期)
2.使用Photos.framework(iOS8開始可以使用, 從iOS9開始完全取代AssetsLibrary.framework)
三.將【Camera Roll】(相機膠卷)的圖片 添加到 【自定義Album】(相簿\相冊)中
1.使用AssetsLibrary.framework(iOS9開始, 已經過期)
2.使用Photos.framework(iOS8開始可以使用, 從iOS9開始完全取代AssetsLibrary.framework)
四.Photos.framework須知
1.PHAsset : 一個PHAsset對象就代表一張圖片或者一段視頻
2.PHAssetCollection : 一個PHAssetCollection對象就代表一本相冊
五.PHAssetChangeRequest的基本認識
1.可以對相冊圖片進行【增\刪\改】的操作
六.PHPhotoLibrary的基本認識
1.對相冊的任何修改都必須放在以下其中一個方法的block中
[[PHPhotoLibrary sharedPhotoLibrary] performChangesAndWait:error:];
[[PHPhotoLibrary sharedPhotoLibrary] performChanges:completionHandler:];
*/
}
- (void)createBtn{
// 錄制視頻
UIButton *RecordVideo = [[UIButton alloc]initWithFrame:CGRectMake(100, 100, 100, 100)];
[RecordVideo setTitle:@"開始錄制" forState:UIControlStateNormal];
RecordVideo.backgroundColor = [UIColor lightGrayColor];
[RecordVideo addTarget:self action:@selector(videoFromcamera) forControlEvents:UIControlEventTouchUpInside];
[self.view addSubview:RecordVideo];
// 從選擇視頻
UIButton *SelectLocalVideo = [[UIButton alloc]initWithFrame:CGRectMake(100, 250, 100, 100)];
[SelectLocalVideo setTitle:@"選擇視頻" forState:UIControlStateNormal];
SelectLocalVideo.backgroundColor = [UIColor lightGrayColor];
[SelectLocalVideo addTarget:self action:@selector(videoFromPhotos) forControlEvents:UIControlEventTouchUpInside];
[self.view addSubview:SelectLocalVideo];
}
// 錄制視頻
- (void)videoFromcamera{
[self getVideoWithsourceType:UIImagePickerControllerSourceTypeCamera shouldAsync:YES];
}
// 從相冊中選擇視頻"
- (void)videoFromPhotos{
//UIImagePickerControllerSourceTypeSavedPhotosAlbum - 這個是自定義庫,是由用戶截圖或保存到裡面的
[self getVideoWithsourceType:UIImagePickerControllerSourceTypeSavedPhotosAlbum shouldAsync:NO];
}
//調用攝像頭
- (void)getVideoWithsourceType:(UIImagePickerControllerSourceType)type shouldAsync:(BOOL)shouldAsync{
//取得授權狀態
AVAuthorizationStatus authStatus = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
//判斷當前狀態
if (authStatus == AVAuthorizationStatusRestricted
|| authStatus == AVAuthorizationStatusDenied) {
//拒絕當前 app 訪問[phtot]運行
[SVProgressHUD showInfoWithStatus:@"提醒用戶打開訪問開關 [設置] - [隱私] - [視頻] - [App]"];
return;
}
if ([UIImagePickerController isSourceTypeAvailable:type]) {
UIImagePickerController *picker = [[UIImagePickerController alloc]init];
picker.delegate = self;
//可以編輯
picker.allowsEditing = YES;
//設置資源獲取類型
picker.sourceType = type;
picker.mediaTypes = @[(NSString *)kUTTypeMovie];
[self presentViewController:picker animated:YES completion:NULL];
self.shouldAsync = shouldAsync;
}else{
[SVProgressHUD showInfoWithStatus:@"手機不支持攝像"];
}
}
#pragma mark - UIImagePickerControllerDelegate
- (void)imagePickerController:(UIImagePickerController *)picker didFinishPickingMediaWithInfo:(NSDictionary *)info{
//獲取媒體 Url
NSURL * VideoUrl = [info objectForKey:UIImagePickerControllerMediaURL];
//判斷版本號
if ([UIDevice currentDevice].systemVersion.doubleValue < 9.0) {
ALAssetsLibrary * library = [[ALAssetsLibrary alloc]init];
//創建並行隊列
dispatch_async(dispatch_get_global_queue(0, 0), ^{
//判斷相冊時候兼容視頻,兼容才能保存到相冊
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:VideoUrl]) {
[library writeVideoAtPathToSavedPhotosAlbum:VideoUrl completionBlock:^(NSURL *assetURL, NSError *error) {
dispatch_async(dispatch_get_main_queue(), ^{
//回到主線程,寫入相冊
if (error == nil) {
AVURLAsset *videoAsset = [[AVURLAsset alloc]initWithURL:assetURL options:nil];
Float64 duration = CMTimeGetSeconds(videoAsset.duration);
self.videoDurationLabel.text = [NSString stringWithFormat:@"視頻時長: %.0f秒",duration];
if (_shouldAsync) {
__weak __typeof(self)weakSelf = self;
// Get center frame image asyncly
[weakSelf centerFrameImageWithVideoURL:VideoUrl completion:^(UIImage *image) {
weakSelf.centerFrameImageView.image = image;
}];
} else {
//同步獲取中間幀圖片
UIImage * image = [self frameImageFromVideoURL:VideoUrl];
self.centerFrameImageView.image = image;
}
// Begin to compress and export the video to the output path
// 開始壓縮和導出視頻輸出路徑
NSString * name = [[NSDate date] description];
name = [NSString stringWithFormat:@"%@.mp4", name];
[self compressVideoWithVideoURL:VideoUrl savedName:name completion:^(NSString *savedPath) {
if (savedPath) {
NSLog(@"Compressed successfully. path: %@", savedPath);
} else {
NSLog(@"Compressed failed");
}
}];
[SVProgressHUD showInfoWithStatus:@"保存視頻失敗"];
} else {
[SVProgressHUD showInfoWithStatus:@"保存視頻成功"];
}
});
}];
}
});
}else{
//9.0以後
PHPhotoLibrary * library = [PHPhotoLibrary sharedPhotoLibrary];
dispatch_async(dispatch_get_main_queue(), ^{
NSError * error = nil;
// 用來抓取PHAsset的字符串標識
__block NSString *assetId = nil;
// 用來抓取PHAssetCollectin的字符串標識符
__block NSString *assetCollectionId = nil;
// 保存視頻到【Camera Roll】(相機膠卷)
[library performChangesAndWait:^{
assetId = [PHAssetChangeRequest creationRequestForAssetFromVideoAtFileURL:VideoUrl].placeholderForCreatedAsset.localIdentifier;
} error:&error];
// 獲取曾經創建過的自定義視頻相冊名字
PHAssetCollection * createdAssetCollection = nil;
PHFetchResult< PHAssetCollection *>* assetCollections = [PHAssetCollection fetchAssetCollectionsWithType:PHAssetCollectionTypeAlbum subtype:PHAssetCollectionSubtypeAlbumRegular options:nil];
for (PHAssetCollection * assetCollection in assetCollections) {
if ([assetCollection.localizedTitle isEqualToString: AssetCollectionName]) {
createdAssetCollection = assetCollection;
break;
}
}
//如果這個自定義框架沒有創建過
if (createdAssetCollection == nil) {
//創建新的[自定義的 Album](相簿\相冊)
[library performChangesAndWait:^{
assetCollectionId = [PHAssetCollectionChangeRequest creationRequestForAssetCollectionWithTitle:AssetCollectionName].placeholderForCreatedAssetCollection.localIdentifier;
} error:&error];
//抓取剛創建完的視頻相冊對象
createdAssetCollection = [PHAssetCollection fetchAssetCollectionsWithLocalIdentifiers:@[assetCollectionId] options:nil].firstObject;
}
// 將【Camera Roll】(相機膠卷)的視頻 添加到 【自定義Album】(相簿\相冊)中
[library performChangesAndWait:^{
PHAssetCollectionChangeRequest *request = [PHAssetCollectionChangeRequest changeRequestForAssetCollection:createdAssetCollection];
// 視頻
[request addAssets:[PHAsset fetchAssetsWithLocalIdentifiers:@[assetId] options:nil]];
} error:&error];
// 提示信息
if (error) {
[SVProgressHUD showErrorWithStatus:@"保存視頻失敗!"];
} else {
[SVProgressHUD showSuccessWithStatus:@"保存視頻成功!"];
}
});
}
[picker dismissViewControllerAnimated:YES completion:^{
// for fixing iOS 8.0 problem that frame changed when open camera to record video.
self.tabBarController.view.frame = [[UIScreen mainScreen] bounds];
[self.tabBarController.view layoutIfNeeded];
}];
}
#pragma mark - 同步獲取幀圖
// Get the video's center frame as video poster image
- (UIImage *)frameImageFromVideoURL:(NSURL *)videoURL {
// result
UIImage *image = nil;
// AVAssetImageGenerator
AVAsset *asset = [AVAsset assetWithURL:videoURL];
AVAssetImageGenerator *imageGenerator = [[AVAssetImageGenerator alloc] initWithAsset:asset];
imageGenerator.appliesPreferredTrackTransform = YES;
// calculate the midpoint time of video
Float64 duration = CMTimeGetSeconds([asset duration]);
// 取某個幀的時間,參數一表示哪個時間(秒),參數二表示每秒多少幀
// 通常來說,600是一個常用的公共參數,蘋果有說明:
// 24 frames per second (fps) for film, 30 fps for NTSC (used for TV in North America and
// Japan), and 25 fps for PAL (used for TV in Europe).
// Using a timescale of 600, you can exactly represent any number of frames in these systems
CMTime midpoint = CMTimeMakeWithSeconds(duration / 2.0, 600);
// get the image from
NSError *error = nil;
CMTime actualTime;
// Returns a CFRetained CGImageRef for an asset at or near the specified time.
// So we should mannully release it
CGImageRef centerFrameImage = [imageGenerator copyCGImageAtTime:midpoint
actualTime:&actualTime
error:&error];
if (centerFrameImage != NULL) {
image = [[UIImage alloc] initWithCGImage:centerFrameImage];
// Release the CFRetained image
CGImageRelease(centerFrameImage);
}
return image;
}
#pragma mark - 異步獲取幀圖
//異步獲取幀圖,可以一次回去多幀圖片
- (void)centerFrameImageWithVideoURL:(NSURL *)videoURL completion:(void (^)(UIImage *image))completion{
// AVAssetImageGenerator
AVAsset * asset = [AVAsset assetWithURL:videoURL];
AVAssetImageGenerator *imageGenerator = [[AVAssetImageGenerator alloc]initWithAsset:asset];
imageGenerator.appliesPreferredTrackTransform = YES;
// calculate the midpoint time of video
Float64 duration = CMTimeGetSeconds([asset duration]);
// 取某個幀的時間,參數一表示哪個時間(秒),參數二表示每秒多少幀
// 通常來說,600是一個常用的公共參數,蘋果有說明:
// 24 frames per second (fps) for film, 30 fps for NTSC (used for TV in North America and
// Japan), and 25 fps for PAL (used for TV in Europe).
// Using a timescale of 600, you can exactly represent any number of frames in these systems
CMTime midpoint = CMTimeMakeWithSeconds(duration / 2.0, 600);
//異步獲取多幀圖
NSValue * midTime = [NSValue valueWithCMTime:midpoint];
[imageGenerator generateCGImagesAsynchronouslyForTimes:@[midTime] completionHandler:^(CMTime requestedTime, CGImageRef _Nullable image, CMTime actualTime, AVAssetImageGeneratorResult result, NSError * _Nullable error) {
if (result == AVAssetImageGeneratorSucceeded && image != NULL) {
UIImage * centerFrameImage = [[UIImage alloc]initWithCGImage:image];
dispatch_async(dispatch_get_main_queue(), ^{
if (completion) {
completion(centerFrameImage);
}
});
} else {
dispatch_async(dispatch_get_main_queue(), ^{
if (completion) {
completion(nil);
}
});
}
}];
}
#pragma mark - 壓縮導出視頻
- (void)compressVideoWithVideoURL:(NSURL *)videoURL
savedName:(NSString *)savedName
completion:(void (^)(NSString *savedPath))completion{
// Accessing video by URL
AVURLAsset *videoAsset = [[AVURLAsset alloc] initWithURL:videoURL options:nil];
// Find compatible presets by video asset.
NSArray *presets = [AVAssetExportSession exportPresetsCompatibleWithAsset:videoAsset];
// Begin to compress video
// Now we just compress to low resolution if it supports
// If you need to upload to the server, but server does't support to upload by streaming,
// You can compress the resolution to lower. Or you can support more higher resolution.
//開始壓縮視頻
//現在我們壓縮到低分辨率是否支持
//如果需要上傳服務器,但由流媒體服務器不支持上傳,
//可以壓縮分辨率降低。或者你可以支持更多的高分辨率。
if ([presets containsObject:AVAssetExportPreset640x480]) {
AVAssetExportSession * session = [[AVAssetExportSession alloc]initWithAsset:videoAsset presetName:AVAssetExportPreset640x480];
//NSHomeDirectory() 得到的是應用程序目錄的路徑,在該目錄下有三個文件夾:Documents、Library、temp以及一個.app包!該目錄下就是應用程序的沙盒,應用程序只能訪問該目錄下的文件夾!!!
NSString * doc = [NSHomeDirectory() stringByAppendingPathComponent:@"Documents"];
NSString * folder = [doc stringByAppendingPathComponent:@"錄制視頻"];
BOOL isDir = NO;
BOOL isExist = [[NSFileManager defaultManager] fileExistsAtPath:folder isDirectory:&isDir];
if (!isExist || (isExist && !isDir)) {
NSError * error = nil;
[[NSFileManager defaultManager]createDirectoryAtPath:folder withIntermediateDirectories:YES attributes:nil error:&error];
if (error == nil) {
[SVProgressHUD showInfoWithStatus:@"目錄創建成功"];
} else {
[SVProgressHUD showInfoWithStatus:@"目錄創建失敗"];
}
}
NSString * OutPutPath = [folder stringByAppendingPathComponent:savedName];
session.outputURL = [NSURL fileURLWithPath:OutPutPath];
// Optimize for network use.
session.shouldOptimizeForNetworkUse = true;
NSArray * supportedTypeArray = session.supportedFileTypes;
if ([supportedTypeArray containsObject:AVFileTypeMPEG4]) {
session.outputFileType = AVFileTypeMPEG4;
} else if (supportedTypeArray.count == 0){
[SVProgressHUD showInfoWithStatus:@"No supported file types"];
return;
} else {
session.outputFileType = [supportedTypeArray objectAtIndex:0];
}
// Begin to export video to the output path asynchronously.
//開始出口異步視頻輸出路徑。
[session exportAsynchronouslyWithCompletionHandler:^{
if ([session status] == AVAssetExportSessionStatusCompleted) {
dispatch_async(dispatch_get_main_queue(), ^{
if (completion) {
completion([session.outputURL path
]);
}
});
} else {
dispatch_async(dispatch_get_main_queue(), ^{
if (completion) {
completion(nil);
}
});
}
}];
}
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
#pragma mark - 相關屬性參數
/**
* (1)代理
delegate
(2)幾個基本屬性設置
sourceType //設置資源獲取類型
allowsEditing //是否允許圖片編輯
(3)幾個判斷類方法
*是否可以獲取該類型資源
+ (BOOL)isSourceTypeAvailable:(UIImagePickerControllerSourceType)sourceType;
*是否可以獲取該類型相機(前置和後置 )
+ (BOOL)isCameraDeviceAvailable:(UIImagePickerControllerCameraDevice)cameraDevice;
*是否可以獲取閃光燈
+ (BOOL)isFlashAvailableForCameraDevice:(UIImagePickerControllerCameraDevice)cameraDevice;
(4)代理方法(iOS4後僅存2個可用)
- (void)imagePickerController:(UIImagePickerController *)picker didFinishPickingMediaWithInfo:(NSDictionary *)info;
- (void)imagePickerControllerDidCancel:(UIImagePickerController *)picker;
* 參數info中的鍵
NSString *const UIImagePickerControllerMediaType ;指定用戶選擇的媒體類型(文章最後進行擴展)
NSString *const UIImagePickerControllerOriginalImage ;原始圖片
NSString *const UIImagePickerControllerEditedImage ;修改後的圖片
NSString *const UIImagePickerControllerCropRect ;裁剪尺寸
NSString *const UIImagePickerControllerMediaURL ;媒體的URL
NSString *const UIImagePickerControllerReferenceURL ;原件的URL
NSString *const UIImagePickerControllerMediaMetadata;當來數據來源是照相機的時候這個值才有效
* UIImagePickerControllerMediaType
UIImagePickerControllerMediaType 包含著KUTTypeImage 和KUTTypeMovie
KUTTypeImage 包含:
const CFStringRef kUTTypeImage ;抽象的圖片類型
const CFStringRef kUTTypeJPEG ;
const CFStringRef kUTTypeJPEG2000 ;
const CFStringRef kUTTypeTIFF ;
const CFStringRef kUTTypePICT ;
const CFStringRef kUTTypeGIF ;
const CFStringRef kUTTypePNG ;
const CFStringRef kUTTypeQuickTimeImage ;
const CFStringRef kUTTypeAppleICNS
const CFStringRef kUTTypeBMP;
const CFStringRef kUTTypeICO;
KUTTypeMovie 包含:
const CFStringRef kUTTypeAudiovisualContent ;抽象的聲音視頻
const CFStringRef kUTTypeMovie ;抽象的媒體格式(聲音和視頻)
const CFStringRef kUTTypeVideo ;只有視頻沒有聲音
const CFStringRef kUTTypeAudio ;只有聲音沒有視頻
const CFStringRef kUTTypeQuickTimeMovie ;
const CFStringRef kUTTypeMPEG ;
const CFStringRef kUTTypeMPEG4 ;
const CFStringRef kUTTypeMP3 ;
const CFStringRef kUTTypeMPEG4Audio ;
const CFStringRef kUTTypeAppleProtectedMPEG4Audio;
*/
@end