最近碰到需求,要拍照時候沒有聲音,用AVCaptureStillImageOutput拍照會有快門聲,查了網上的使用了反向聲音的方式,但是因爲要連拍會導致有些時候聲音不同步,還是會有漏網的快門聲,最後研究了一下使用了AVCaptureVideoDataOutput來對圖像進行輸出,沒有了快門聲音,代碼如下:
//初始化
- (void)initAVCaptureSession{
AVCaptureSession *session = [[AVCaptureSession alloc] init];
NSError *error;
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
//更改這個設置的時候必須先鎖定設備,修改完後再解鎖,否則崩潰
[device lockForConfiguration:nil];
//設置閃光燈爲自動
if ([device isFlashModeSupported:AVCaptureFlashModeOff]) {
[device setFlashMode:AVCaptureFlashModeOff];
};
[device unlockForConfiguration];
self.videoInput = [[AVCaptureDeviceInput alloc] initWithDevice:device error:&error];
if (error) {
NSLog(@"%@",error);
}
// Create a VideoDataOutput and add it to the session
self.imageOutput = [[AVCaptureVideoDataOutput alloc] init];
[self.session addOutput:self.imageOutput];
// Configure your output.
dispatch_queue_t queue = dispatch_queue_create("myQueue", NULL);
[self.imageOutput setSampleBufferDelegate:self queue:queue];
// Specify the pixel format
self.imageOutput.videoSettings = [NSDictionary dictionaryWithObject:
[NSNumber numberWithInt:kCVPixelFormatType_32BGRA]
forKey:(id)kCVPixelBufferPixelFormatTypeKey];
// Start the session running to start the flow of data
[session startRunning];
// Assign session to an ivar.
[self setSession:session];
if ([self.session canAddInput:self.videoInput]) {
[self.session addInput:self.videoInput];
}
if ([self.session canAddOutput:self.imageOutput]) {
[self.session addOutput:self.imageOutput];
}
AVCaptureDevicePosition desiredPosition = AVCaptureDevicePositionFront;
for (AVCaptureDevice *d in [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
if ([d position] == desiredPosition) {
[self.previewLayer.session beginConfiguration];
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:d error:nil];
for (AVCaptureInput *oldInput in self.previewLayer.session.inputs) {
[[self.previewLayer session] removeInput:oldInput];
}
[self.previewLayer.session addInput:input];
[self.previewLayer.session commitConfiguration];
break;
}
}
}
//避免前置攝像頭反轉
- (AVCaptureVideoOrientation)avOrientationForDeviceOrientation:(UIDeviceOrientation)deviceOrientation
{
AVCaptureVideoOrientation result = (AVCaptureVideoOrientation)deviceOrientation;
if ( deviceOrientation == UIDeviceOrientationLandscapeLeft )
result = AVCaptureVideoOrientationLandscapeRight;
else if ( deviceOrientation == UIDeviceOrientationLandscapeRight )
result = AVCaptureVideoOrientationLandscapeLeft;
return result;
}
//使用前置攝像頭
- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation
{
return (interfaceOrientation == UIInterfaceOrientationPortrait);
}
//照片獲取,以及其他處理
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection
{
if(sampleBuffer == NULL)
{
return ;
}
// Create a UIImage from the sample buffer data
UIImage *oldImage = [self imageFromSampleBuffer:sampleBuffer];
NSData *newData = UIImageJPEGRepresentation(oldImage, 0.3);
[dataArray addObject:newData];
NSString *imageName = [NSString stringWithFormat:@"%@.jpg",[self getCurrentTimeInterval]]; //保存圖片本地
NSString *savedImagePath = [LOCATION_IMAGES_PATH stringByAppendingPathComponent:imageName];
// NSLog(@"%@",savedImagePath);
[newData writeToFile:savedImagePath atomically:NO];
}
// Create a UIImage from sample buffer data
- (UIImage *) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer
{
// Get a CMSampleBuffer's Core Video image buffer for the media data
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
// Lock the base address of the pixel buffer
CVPixelBufferLockBaseAddress(imageBuffer, 0);
// Get the number of bytes per row for the pixel buffer
void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);
// Get the number of bytes per row for the pixel buffer
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
// Get the pixel buffer width and height
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
// Create a device-dependent RGB color space
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
// Create a bitmap graphics context with the sample buffer data
CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8,
bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
// Create a Quartz image from the pixel data in the bitmap graphics context
CGImageRef quartzImage = CGBitmapContextCreateImage(context);
// Unlock the pixel buffer
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
// Free up the context and color space
CGContextRelease(context);
CGColorSpaceRelease(colorSpace);
// Create an image object from the Quartz image
UIImage *image = [UIImage imageWithCGImage:quartzImage];
// Release the Quartz image
CGImageRelease(quartzImage);
return (image);
}