首先了解下AVFoundation
AVFoundation框架是ios中很重要的框架,是苹果 OS X 系统和 iOS系统中用于处理基于时间的媒体数据的高级框架,其设计过程高度依赖多线程机制。所有与视频音频相关的软硬件控制都在这个框架里面。
AVFoundation是可以用它来播放和创建基于时间的视听媒体的几个框架之一,它提供了基于时间的视听数据的详细界别上的OC接口。可以用它来检查、创建、编辑、重新编码媒体文件。也可以从设备得到输入流和实时捕捉回放过程中操控视频,是用于处理基于时间的媒体数据的高级OC框架。充分利用了多核硬件的优势并大量使用block和Grand Central Dispatch(GCD)机制将复杂的计算进程放在后台线程运行。自动提供硬件加速操作,确保在大部分设备上应用程序能以最佳性能运行。
而AVAssetWriter和AVPlayer也是属于AVFoundation框架内的
Github代码地址
一、采集音视频
/** 负责输入和输出设备之间的数据传递 */
@property (strong, nonatomic) AVCaptureSession *captureSession;
/** 视频输入 */
@property (nonatomic, strong) AVCaptureDeviceInput *videoInput;
/** 视频输出 */
@property (nonatomic, strong) AVCaptureVideoDataOutput *videoOutput;
/** 声音输出 */
@property (nonatomic, strong) AVCaptureAudioDataOutput *audioOutput;
/** 预览图层 */
@property (strong, nonatomic) AVCaptureVideoPreviewLayer *captureVideoPreviewLayer;
这里使用这些类来采集音视频。
首先,初始化AVCaptureSession
- (AVCaptureSession *)captureSession
{
if (_captureSession == nil)
{
_captureSession = [[AVCaptureSession alloc] init];
if ([_captureSession canSetSessionPreset:AVCaptureSessionPresetHigh])
{
_captureSession.sessionPreset = AVCaptureSessionPresetHigh;
}
}
return _captureSession;
}
初始化视频音频输入输出
/**
* 设置视频输入
*/
- (void)setupVideo
{
AVCaptureDevice *captureDevice = [self getCameraDeviceWithPosition:AVCaptureDevicePositionBack];
if (!captureDevice)
{
NSLog(@"取得后置摄像头时出现问题.");
return;
}
NSError *error = nil;
AVCaptureDeviceInput *videoInput = [[AVCaptureDeviceInput alloc] initWithDevice:captureDevice error:&error];
if (error)
{
NSLog(@"取得设备输入videoInput对象时出错,错误原因:%@", error);
return;
}
//3、将设备输出添加到会话中
if ([self.captureSession canAddInput:videoInput])
{
[self.captureSession addInput:videoInput];
}
self.videoOutput = [[AVCaptureVideoDataOutput alloc] init];
self.videoOutput.alwaysDiscardsLateVideoFrames = NO; //立即丢弃旧帧,节省内存,默认YES
[self.videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
[self.videoOutput setSampleBufferDelegate:self queue:self.videoQueue];
if ([self.captureSession canAddOutput:self.videoOutput])
{
[self.captureSession addOutput:self.videoOutput];
}
AVCaptureConnection *connection = [self.videoOutput connectionWithMediaType:AVMediaTypeVideo];
[connection setVideoOrientation:AVCaptureVideoOrientationPortrait];
self.videoInput = videoInput;
}
/**
* 设置音频录入
*/
- (void)setupAudio
{
NSError *error = nil;
AVCaptureDeviceInput *audioInput = [[AVCaptureDeviceInput alloc] initWithDevice:[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio] error:&error];
if (error)
{
NSLog(@"取得设备输入audioInput对象时出错,错误原因:%@", error);
return;
}
if ([self.captureSession canAddInput:audioInput])
{
[self.captureSession addInput:audioInput];
}
self.audioOutput = [[AVCaptureAudioDataOutput alloc] init];
[self.audioOutput setSampleBufferDelegate:self queue:self.videoQueue];
if([self.captureSession canAddOutput:self.audioOutput])
{
[self.captureSession addOutput:self.audioOutput];
}
}
设置预览图层
/**
* 设置预览layer
*/
- (void)setupCaptureVideoPreviewLayer
{
_captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.captureSession];
_captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspect; //填充模式
[_captureVideoPreviewLayer setFrame:self.superView.bounds];
[self.superView.layer addSublayer:_captureVideoPreviewLayer];
}
然后开始采集
采集的音视频数据回调代理方法:
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
@autoreleasepool
{
//视频
if (connection == [self.videoOutput connectionWithMediaType:AVMediaTypeVideo])
{
@synchronized(self)
{
if (self.captureBlock) {
self.captureBlock(sampleBuffer, AVMediaTypeVideo);
}
}
}
//音频
if (connection == [self.audioOutput connectionWithMediaType:AVMediaTypeAudio])
{
@synchronized(self)
{
if (self.captureBlock) {
self.captureBlock(sampleBuffer, AVMediaTypeAudio);
}
}
}
}
}
这里将采集的数据传递出去,进行录制处理
二、录制音视频
/** 写入音视频 */
@property (nonatomic, strong) AVAssetWriter *assetWriter;
/** 写入视频输出 */
@property (nonatomic, strong) AVAssetWriterInput *assetWriterVideoInput;
/** 写入音频输出 */
@property (nonatomic, strong) AVAssetWriterInput *assetWriterAudioInput;
这里使用AVAssetWriter来录制音视频
首先初始化AVAssetWriter
/**
* 设置写入视频属性
*/
- (void)setUpWriter
{
if (self.videoURL == nil)
{
return;
}
self.assetWriter = [AVAssetWriter assetWriterWithURL:self.videoURL fileType:AVFileTypeMPEG4 error:nil];
//写入视频大小
NSInteger numPixels = kScreenWidth * kScreenHeight;
//每像素比特
CGFloat bitsPerPixel = 12.0;
NSInteger bitsPerSecond = numPixels * bitsPerPixel;
// 码率和帧率设置
NSDictionary *compressionProperties = @{ AVVideoAverageBitRateKey : @(bitsPerSecond),
AVVideoExpectedSourceFrameRateKey : @(15),
AVVideoMaxKeyFrameIntervalKey : @(15),
AVVideoProfileLevelKey : AVVideoProfileLevelH264BaselineAutoLevel };
CGFloat width = kScreenWidth;
CGFloat height = kScreenHeight;
//视频属性
NSDictionary *videoCompressionSettings = @{ AVVideoCodecKey : AVVideoCodecTypeH264,
AVVideoWidthKey : @(width * 2),
AVVideoHeightKey : @(height * 2),
AVVideoScalingModeKey : AVVideoScalingModeResizeAspectFill,
AVVideoCompressionPropertiesKey : compressionProperties };
_assetWriterVideoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoCompressionSettings];
//expectsMediaDataInRealTime 必须设为yes,需要从capture session 实时获取数据
_assetWriterVideoInput.expectsMediaDataInRealTime = YES;
// 音频设置
NSDictionary *audioCompressionSettings = @{ AVEncoderBitRatePerChannelKey : @(28000),
AVFormatIDKey : @(kAudioFormatMPEG4AAC),
AVNumberOfChannelsKey : @(1),
AVSampleRateKey : @(22050) };
_assetWriterAudioInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioCompressionSettings];
_assetWriterAudioInput.expectsMediaDataInRealTime = YES;
if ([_assetWriter canAddInput:_assetWriterVideoInput])
{
[_assetWriter addInput:_assetWriterVideoInput];
}
else
{
NSLog(@"AssetWriter videoInput append Failed");
}
if ([_assetWriter canAddInput:_assetWriterAudioInput])
{
[_assetWriter addInput:_assetWriterAudioInput];
}
else
{
NSLog(@"AssetWriter audioInput Append Failed");
}
_canWrite = NO;
}
然后写入数据
/**
* 开始写入数据
*/
- (void)appendSampleBuffer:(CMSampleBufferRef)sampleBuffer ofMediaType:(NSString *)mediaType
{
if (sampleBuffer == NULL)
{
NSLog(@"empty sampleBuffer");
return;
}
@autoreleasepool
{
if (!self.canWrite && mediaType == AVMediaTypeVideo && self.assetWriter && self.assetWriter.status != AVAssetWriterStatusWriting)
{
[self.assetWriter startWriting];
[self.assetWriter startSessionAtSourceTime:CMSampleBufferGetPresentationTimeStamp(sampleBuffer)];
self.canWrite = YES;
}
//写入视频数据
if (mediaType == AVMediaTypeVideo && self.assetWriterVideoInput.readyForMoreMediaData)
{
if (![self.assetWriterVideoInput appendSampleBuffer:sampleBuffer])
{
@synchronized (self)
{
[self stopVideoRecorder];
}
}
}
//写入音频数据
if (mediaType == AVMediaTypeAudio && self.assetWriterAudioInput.readyForMoreMediaData)
{
if (![self.assetWriterAudioInput appendSampleBuffer:sampleBuffer])
{
@synchronized (self)
{
[self stopVideoRecorder];
}
}
}
}
}
结束录制后保存并预览播放
/**
* 结束录制视频
*/
- (void)stopVideoRecorder
{
__weak __typeof(self)weakSelf = self;
if(_assetWriter && _assetWriter.status == AVAssetWriterStatusWriting)
{
[_assetWriter finishWritingWithCompletionHandler:^{
weakSelf.canWrite = NO;
weakSelf.assetWriter = nil;
weakSelf.assetWriterAudioInput = nil;
weakSelf.assetWriterVideoInput = nil;
}];
}
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(0.3f * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
[weakSelf saveVideo];
[weakSelf previewVideoAfterShoot];
});
}
三、保存并播放音视频
在保存前,需要先将录制的音视频数据合成
- (void)cropWithVideoUrlStr:(NSURL *)videoUrl completion:(void (^)(NSURL *outputURL, Float64 videoDuration, BOOL isSuccess))completionHandle
{
AVURLAsset *asset =[[AVURLAsset alloc] initWithURL:videoUrl options:nil];
//获取视频总时长
Float64 endTime = CMTimeGetSeconds(asset.duration);
if (endTime > 10)
{
endTime = 10.0f;
}
Float64 startTime = 0;
NSString *outputFilePath = [self createVideoFilePath];
NSURL *outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
NSArray *compatiblePresets = [AVAssetExportSession exportPresetsCompatibleWithAsset:asset];
if ([compatiblePresets containsObject:AVAssetExportPresetMediumQuality])
{
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc]
initWithAsset:asset presetName:AVAssetExportPresetPassthrough];
NSURL *outputURL = outputFileUrl;
exportSession.outputURL = outputURL;
exportSession.outputFileType = AVFileTypeMPEG4;
exportSession.shouldOptimizeForNetworkUse = YES;
CMTime start = CMTimeMakeWithSeconds(startTime, asset.duration.timescale);
CMTime duration = CMTimeMakeWithSeconds(endTime - startTime,asset.duration.timescale);
CMTimeRange range = CMTimeRangeMake(start, duration);
exportSession.timeRange = range;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
switch ([exportSession status]) {
case AVAssetExportSessionStatusFailed:
{
NSLog(@"合成失败:%@", [[exportSession error] description]);
completionHandle(outputURL, endTime, NO);
}
break;
case AVAssetExportSessionStatusCancelled:
{
completionHandle(outputURL, endTime, NO);
}
break;
case AVAssetExportSessionStatusCompleted:
{
completionHandle(outputURL, endTime, YES);
}
break;
default:
{
completionHandle(outputURL, endTime, NO);
} break;
}
}];
}
}
然后再将视频保存到手机相册
使用Photos框架保存
/**
保存视频
*/
- (void)saveVideo
{
[self cropWithVideoUrlStr:self.videoURL completion:^(NSURL *videoUrl, Float64 videoDuration, BOOL isSuccess) {
if (isSuccess)
{
NSDictionary *infoDictionary = [[NSBundle mainBundle] infoDictionary];
NSString * assetCollectionName = [infoDictionary objectForKey:@"CFBundleDisplayName"];
if (assetCollectionName == nil)
{
assetCollectionName = @"视频相册";
}
__block NSString *blockAssetCollectionName = assetCollectionName;
__block NSURL *blockVideoUrl = videoUrl;
PHPhotoLibrary *library = [PHPhotoLibrary sharedPhotoLibrary];
dispatch_async(dispatch_get_main_queue(), ^{
NSError *error = nil;
__block NSString *assetId = nil;
__block NSString *assetCollectionId = nil;
// 保存视频到【Camera Roll】(相机胶卷)
[library performChangesAndWait:^{
assetId = [PHAssetChangeRequest creationRequestForAssetFromVideoAtFileURL:blockVideoUrl].placeholderForCreatedAsset.localIdentifier;
} error:&error];
NSLog(@"error1: %@", error);
// 获取曾经创建过的自定义视频相册名字
PHAssetCollection *createdAssetCollection = nil;
PHFetchResult <PHAssetCollection*> *assetCollections = [PHAssetCollection fetchAssetCollectionsWithType:PHAssetCollectionTypeAlbum subtype:PHAssetCollectionSubtypeAlbumRegular options:nil];
for (PHAssetCollection *assetCollection in assetCollections)
{
if ([assetCollection.localizedTitle isEqualToString:blockAssetCollectionName])
{
createdAssetCollection = assetCollection;
break;
}
}
//如果这个自定义框架没有创建过
if (createdAssetCollection == nil)
{
//创建新的[自定义的 Album](相簿\相册)
[library performChangesAndWait:^{
assetCollectionId = [PHAssetCollectionChangeRequest creationRequestForAssetCollectionWithTitle:blockAssetCollectionName].placeholderForCreatedAssetCollection.localIdentifier;
} error:&error];
NSLog(@"error2: %@", error);
//抓取刚创建完的视频相册对象
createdAssetCollection = [PHAssetCollection fetchAssetCollectionsWithLocalIdentifiers:@[assetCollectionId] options:nil].firstObject;
}
// 将【Camera Roll】(相机胶卷)的视频 添加到【自定义Album】(相簿\相册)中
[library performChangesAndWait:^{
PHAssetCollectionChangeRequest *request = [PHAssetCollectionChangeRequest changeRequestForAssetCollection:createdAssetCollection];
[request addAssets:[PHAsset fetchAssetsWithLocalIdentifiers:@[assetId] options:nil]];
} error:&error];
NSLog(@"error3: %@", error);
});
}
else
{
NSLog(@"保存视频失败!");
[[NSFileManager defaultManager] removeItemAtURL:self.videoURL error:nil];
self.videoURL = nil;
[[NSFileManager defaultManager] removeItemAtURL:videoUrl error:nil];
}
}];
}
然后就可以去播放了
这里使用AVPlayer来播放
/** 视频预览View */
@property (strong, nonatomic) UIView *videoPreviewContainerView;
/** 播放器 */
@property (strong, nonatomic) AVPlayer *player;
- (void)previewVideoAfterShoot
{
if (self.videoURL == nil || self.videoPreviewContainerView != nil)
{
return;
}
AVURLAsset *asset = [AVURLAsset assetWithURL:self.videoURL];
// 初始化AVPlayer
self.videoPreviewContainerView = [[UIView alloc] initWithFrame:CGRectMake(0, 0, kScreenWidth, kScreenHeight)];
self.videoPreviewContainerView.backgroundColor = [UIColor blackColor];
AVPlayerItem * playerItem = [AVPlayerItem playerItemWithAsset:asset];
self.player = [[AVPlayer alloc] initWithPlayerItem:playerItem];
AVPlayerLayer *playerLayer = [AVPlayerLayer playerLayerWithPlayer:self.player];
playerLayer.frame = CGRectMake(0, 0, kScreenWidth, kScreenHeight);
playerLayer.videoGravity = AVLayerVideoGravityResizeAspect;
[self.videoPreviewContainerView.layer addSublayer:playerLayer];
// 其余UI布局设置
[self.view addSubview:self.videoPreviewContainerView];
[self.view bringSubviewToFront:self.videoPreviewContainerView];
// 重复播放预览视频
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(playVideoFinished:) name:AVPlayerItemDidPlayToEndTimeNotification object:playerItem];
// 开始播放
[self.player play];
}