最好的方法是使用avCaptureSession对象。我做的正是你在我的免费应用程序“现场效果摄像头”中所说的。
在线上有几个代码示例,a也将帮助您实现这一点。下面是一个可能有帮助的示例代码块:
- (void) activateCameraFeed
{
videoSettings = nil;
#if USE_32BGRA
pixelFormatCode = [[NSNumber alloc] initWithUnsignedInt:(unsigned int)kCVPixelFormatType_32BGRA];
pixelFormatKey = [[NSString alloc] initWithString:(NSString *)kCVPixelBufferPixelFormatTypeKey];
videoSettings = [[NSDictionary alloc] initWithObjectsAndKeys:pixelFormatCode, pixelFormatKey, nil];
#endif
videoDataOutputQueue = dispatch_queue_create("com.jellyfilledstudios.ImageCaptureQueue", NULL);
captureVideoOutput = [[AVCaptureVideoDataOutput alloc] init];
[captureVideoOutput setAlwaysDiscardsLateVideoFrames:YES];
[captureVideoOutput setSampleBufferDelegate:self queue:videoDataOutputQueue];
[captureVideoOutput setVideoSettings:videoSettings];
[captureVideoOutput setMinFrameDuration:kCMTimeZero];
dispatch_release(videoDataOutputQueue); // AVCaptureVideoDataOutput uses dispatch_retain() & dispatch_release() so we can dispatch_release() our reference now
if ( useFrontCamera )
{
currentCameraDeviceIndex = frontCameraDeviceIndex;
cameraImageOrientation = UIImageOrientationLeftMirrored;
}
else
{
currentCameraDeviceIndex = backCameraDeviceIndex;
cameraImageOrientation = UIImageOrientationRight;
}
selectedCamera = [[AVCaptureDevice devices] objectAtIndex:(NSUInteger)currentCameraDeviceIndex];
captureVideoInput = [AVCaptureDeviceInput deviceInputWithDevice:selectedCamera error:nil];
captureSession = [[AVCaptureSession alloc] init];
[captureSession beginConfiguration];
[self setCaptureConfiguration];
[captureSession addInput:captureVideoInput];
[captureSession addOutput:captureVideoOutput];
[captureSession commitConfiguration];
[captureSession startRunning];
}
// AVCaptureVideoDataOutputSampleBufferDelegate
// AVCaptureAudioDataOutputSampleBufferDelegate
//
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
if ( captureOutput==captureVideoOutput )
{
[self performImageCaptureFrom:sampleBuffer fromConnection:connection];
}
[pool drain];
}
- (void) performImageCaptureFrom:(CMSampleBufferRef)sampleBuffer
{
CVImageBufferRef imageBuffer;
if ( CMSampleBufferGetNumSamples(sampleBuffer) != 1 )
return;
if ( !CMSampleBufferIsValid(sampleBuffer) )
return;
if ( !CMSampleBufferDataIsReady(sampleBuffer) )
return;
imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
if ( CVPixelBufferGetPixelFormatType(imageBuffer) != kCVPixelFormatType_32BGRA )
return;
CVPixelBufferLockBaseAddress(imageBuffer,0);
uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
int bufferSize = bytesPerRow * height;
uint8_t *tempAddress = malloc( bufferSize );
memcpy( tempAddress, baseAddress, bytesPerRow * height );
baseAddress = tempAddress;
//
// Apply affects to the pixels stored in (uint32_t *)baseAddress
//
//
// example: grayScale( (uint32_t *)baseAddress, width, height );
// example: sepia( (uint32_t *)baseAddress, width, height );
//
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef newContext = nil;
if ( cameraDeviceSetting != CameraDeviceSetting640x480 ) // not an iPhone4 or iTouch 5th gen
newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaNoneSkipFirst);
else
newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGImageRef newImage = CGBitmapContextCreateImage( newContext );
CGColorSpaceRelease( colorSpace );
CGContextRelease( newContext );
free( tempAddress );
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
if ( newImage == nil )
{
return;
}
// To be able to display the CGImageRef newImage in your UI you will need to do it like this
// because you are running on a different thread hereâ¦
//
[self performSelectorOnMainThread:@selector(newCameraImageNotification:) withObject:(id)newImage waitUntilDone:YES];
}