最近在做關於視頻壓縮剪切的模塊,開始時是完全沒有思路,太費勁了,沒辦法靜下心來繼續研究,終於有點小成果,在此做個記錄,爲了自己下次的使用方便,也爲了能幫助到別人吧!
說一下需求: 我的需求是將一段視頻壓縮成指定格式(比如320 X 480),基於AVFoundation框架
下面上代碼:
storyBoard上直接拉拽UIButton,並關聯事件
#import <AVFoundation/AVFoundation.h>
#import "ParseViewController.h"
@interface ParseViewController ()
@end
@implementation ParseViewController
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view from its nib.
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
#param mark - Actions
- (IBAction)parse:(UIButton *)sender {
[self exportVideo];
}
#param mark - Methods
- (void)exportVideo {
// 爲了方便測試,我將一段視頻直接放到了桌面,從桌面加載的視頻,也將壓縮完成視頻放在桌面
NSString *path = @"/Users/vs/Desktop/BeforParse.m4v"; // 來源路徑
NSString *outputFilePath = @"/Users/vs/Desktop/hehe.mp4"; // 輸出路徑
[self parseVideoWithInputUrl:[NSURL fileURLWithPath:path] outputUrl:[NSURL fileURLWithPath:outputFilePath] blockHandle:^(AVAssetExportSession *avAssetExportSession) {
switch (avAssetExportSession.status) {
case AVAssetExportSessionStatusFailed: // 失敗
NSLog(@"exportSessionError: %@",avAssetExportSession.error.description);
break;
case AVAssetExportSessionStatusExporting:
NSLog(@"AVAssetExportSessionExporting");
break;
case AVAssetExportSessionStatusCompleted: // 成功
NSLog(@"exportSessionCompleted");
dispatch_async(dispatch_get_main_queue(), ^{
[self performSelector:@selector(doSomeThings) withObject:self];
});
break;
}
}];
}
- (void)parseVideoWithInputUrl:(NSURL *)inputUrl outputUrl:(NSURL *)outputUrl blockHandle:(void(^)(AVAssetExportSession *avAssetExportSession)) handle {
AVAsset *avAsset = [AVAsset assetWithURL:inputUrl];
CMTime assetTime = [avAsset duration];
float duration = CMTimeGetSeconds(assetTime);
NSLog(@"視頻時長是:%f",duration);
CMTime totalDuration = kCMTimeZero;
AVMutableComposition *avMutableComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *avMutableCompTrack = [avMutableComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *avAssetTack = [avAsset tracksWithMediaType:AVMediaTypeVideo][0];
NSError *error = nil;
// [avMutableCompTrack insertTimeRange:CMTimeRangeMake(CMTimeMake(0,12), CMTimeMake(12, duration)) ofTrack:avAssetTack atTime:kCMTimeZero error:&error];
[avMutableCompTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, avAsset.duration) ofTrack:avAssetTack atTime:kCMTimeZero error:&error];
if (error) {
NSLog(@"error: %@",error.description);
return;
}
AVMutableVideoCompositionLayerInstruction *avMutableVideoCompositionLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:avMutableCompTrack];
[avMutableVideoCompositionLayerInstruction setTransform:avAssetTack.preferredTransform atTime:kCMTimeZero];
totalDuration = CMTimeAdd(totalDuration, assetTime);
CGSize renderSize = CGSizeMake(0, 0);
renderSize.width = avAssetTack.naturalSize.width;
renderSize.height = avAssetTack.naturalSize.height;
CGFloat rateW = 320/renderSize.width; // 設置壓縮比例
CGFloat rateH = 480/renderSize.height;
// 使用矩陣修改視頻的壓縮比例
CGAffineTransform layerTransform = CGAffineTransformMake(avAssetTack.preferredTransform.a, avAssetTack.preferredTransform.b, avAssetTack.preferredTransform.c, avAssetTack.preferredTransform.d, avAssetTack.preferredTransform.tx * rateW, avAssetTack.preferredTransform.ty * rateH);
layerTransform = CGAffineTransformConcat(layerTransform, CGAffineTransformMake(1, 0, 0, 1, 0, 0));
layerTransform = CGAffineTransformMakeScale(rateW,rateH);
[avMutableVideoCompositionLayerInstruction setTransform:layerTransform atTime:kCMTimeZero];
[avMutableVideoCompositionLayerInstruction setOpacity:0.0 atTime:totalDuration];
AVMutableVideoCompositionInstruction *avMutableVideoCompositionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
// [avMutableVideoCompositionInstruction setTimeRange:CMTimeRangeMake(kCMTimeZero, [avMutableComposition duration])];
avMutableVideoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, totalDuration);
avMutableVideoCompositionInstruction.layerInstructions = [NSArray arrayWithObject:avMutableVideoCompositionLayerInstruction];
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.instructions = [NSArray arrayWithObject:avMutableVideoCompositionInstruction];
videoComposition.renderSize = CGSizeMake(320.f, 480.f); // 視頻的格式
videoComposition.frameDuration = CMTimeMake(1, 10);
AVAssetExportSession *avAssetExportSession = [[AVAssetExportSession alloc] initWithAsset:avMutableComposition presetName:AVAssetExportPreset640x480];
[avAssetExportSession setVideoComposition:videoComposition];
[avAssetExportSession setOutputFileType:AVFileTypeMPEG4];
[avAssetExportSession setOutputURL:outputUrl];
[avAssetExportSession setShouldOptimizeForNetworkUse:YES];
[avAssetExportSession exportAsynchronouslyWithCompletionHandler:^{
handle(avAssetExportSession);
}];
}
- (void)doSomeThings {
// 截取成功需要做的事情
}
@end
這樣對比一下壓縮處理過後的視頻,尺寸、大小都變了。這是我暫時研究出的,如有大家有好的demo,可以向我提出,因爲視頻這塊比較陌生,大家一起學習!
可以參考一下這個博客:blog.csdn.net/lookyou111/article/details/25625609