工具類提供預覽圖像畫面,自動處理旋轉,並且以主動方式抓取圖像(這樣幀率可以無限大)
系統的接口多是異步接收圖像,像我這種強迫症怎麼受得了,必須吧被動接收圖像的方式改成主動抓取。
頭文件
#import#import //這些比例都是4:3的比例。 typedef enum TKVideoFrameSize { tkVideoFrame480x360 = 480 << 16 | 360, tkVideoFrame720x540 = 720 << 16 | 540, //用這個分辨率,效率會快很多。 }TKVideoFrameSize; @interface TKVideoCapture : NSObject - (bool) create:(UIView*)preview frame:(TKVideoFrameSize)type ; - (bool) destory; - (bool) start ; - (bool) stop ; //返回 字節順序 BGRA BGRA 的圖像數據。 - (uint8_t*) get_image_rgb32:(uint32_t*)length ; @end
#import "TKVideoCapture.h" #import#import #import #import #import "TKLock.h" @interface TKVideoCapture () { TKVideoFrameSize _frametype ; UIView* _preview ; AVCaptureSession* _captureSession ; AVCaptureVideoPreviewLayer* _capturePreview ; AVCaptureVideoDataOutput * _captureOutput ; AVCaptureDevice* _captureDevice ; AVCaptureDeviceInput* _captureInput ; uint8_t* _buffer_temp ; //每一幀數據都存儲到這個緩存中 uint8_t* _buffer_obox ; //需要使用時,從tempbuf 拷貝過來。 CGRect _subImageRect ; //子圖片的位置。 TKLock* _buffer_lock ; } @end @implementation TKVideoCapture - (void) do_create { self->_captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo] ; self->_captureInput = [AVCaptureDeviceInput deviceInputWithDevice:self->_captureDevice error:nil]; self->_captureOutput = [[AVCaptureVideoDataOutput alloc] init]; if(self->_captureOutput) [self->_captureOutput setAlwaysDiscardsLateVideoFrames:true]; dispatch_queue_t queue = dispatch_queue_create("cameraQueue", NULL); [self->_captureOutput setSampleBufferDelegate:self queue:queue]; dispatch_release(queue); NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey; NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA]; NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key]; [self->_captureOutput setVideoSettings:videoSettings]; self->_captureSession = [[AVCaptureSession alloc] init]; uint16_t width = (uint16_t)(((uint32_t)_frametype) >> 16) ; uint16_t height = (uint16_t)(((uint32_t)_frametype) & 0xFFFF) ; _buffer_temp = (uint8_t*)malloc(width * height * 4); _buffer_obox = (uint8_t*)malloc(width * height * 4); //0.75是預定比例 switch (_frametype) { case tkVideoFrame480x360: { _captureSession.sessionPreset = AVCaptureSessionPreset640x480 ; _subImageRect = CGRectMake((640-360)/2, 0, 360, 480); break; } case tkVideoFrame720x540: { _captureSession.sessionPreset = AVCaptureSessionPresetiFrame1280x720 ; _subImageRect = CGRectMake((1280-540)/2, 0, 540, 720); break; } default: break; } if(self->_captureInput != nil) [self->_captureSession addInput:self->_captureInput]; [self->_captureSession addOutput:self->_captureOutput]; self->_capturePreview = [AVCaptureVideoPreviewLayer layerWithSession: self->_captureSession]; self->_capturePreview.frame = self->_preview.bounds;//CGRectMake(100, 0, 100, 100); self->_capturePreview.videoGravity = AVLayerVideoGravityResizeAspectFill; self->_capturePreview.connection.videoOrientation = [self getOrientation] ; [self->_preview.layer addSublayer: self->_capturePreview]; _buffer_lock = [[TKLock alloc] init]; [_buffer_lock open]; } - (bool) create:(UIView*)preview frame:(TKVideoFrameSize)type { self->_frametype = type ; self->_preview = preview ; [self performSelectorOnMainThread:@selector(do_create) withObject:self waitUntilDone:true]; return true ; } - (AVCaptureVideoOrientation) getOrientation { UIInterfaceOrientation orientation = [UIApplication sharedApplication].statusBarOrientation ; switch(orientation) { case UIInterfaceOrientationPortrait: return AVCaptureVideoOrientationPortrait; case UIInterfaceOrientationPortraitUpsideDown: return AVCaptureVideoOrientationPortraitUpsideDown; case UIInterfaceOrientationLandscapeLeft: return AVCaptureVideoOrientationLandscapeLeft; case UIInterfaceOrientationLandscapeRight: return AVCaptureVideoOrientationLandscapeRight; } return AVCaptureVideoOrientationLandscapeLeft ; } - (void) do_destory { [_buffer_lock close]; [_buffer_lock release]; _buffer_lock = nil ; free(_buffer_temp); free(_buffer_obox); _buffer_temp = NULL ; _buffer_obox = NULL ; [self->_captureSession stopRunning]; [self->_capturePreview removeFromSuperlayer]; [self->_captureOutput release]; [self->_captureSession release]; self->_captureSession = nil ; self->_capturePreview = nil ; self->_captureOutput = nil ; self->_captureDevice = nil ; self->_captureInput = nil ; self->_preview = nil ; } - (bool) destory { [self performSelectorOnMainThread:@selector(do_destory) withObject:self waitUntilDone:true]; return true ; } - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection { CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); CVPixelBufferLockBaseAddress(imageBuffer,0); uint8_t* baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer); size_t width = CVPixelBufferGetWidth(imageBuffer); size_t height = CVPixelBufferGetHeight(imageBuffer); size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer); CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); CGDataProviderRef provider = CGDataProviderCreateWithData(NULL, baseAddress, bytesPerRow * height, NULL); CGImageRef imageRef = CGImageCreate(width, height, 8, 32, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little|kCGImageAlphaPremultipliedFirst, provider, NULL, false, kCGRenderingIntentDefault); CGImageRef subImageRef = CGImageCreateWithImageInRect(imageRef, _subImageRect); size_t subWidth = _subImageRect.size.height ; size_t subHeight = _subImageRect.size.width ; CGContextRef context = CGBitmapContextCreate(NULL, subWidth, subHeight, CGImageGetBitsPerComponent(subImageRef), 0, CGImageGetColorSpace(subImageRef), CGImageGetBitmapInfo(subImageRef)); CGContextTranslateCTM(context, 0, subHeight); CGContextRotateCTM(context, -M_PI/2); CGContextDrawImage(context, CGRectMake(0, 0, subHeight, subWidth), subImageRef); uint8_t* data = (uint8_t*)CGBitmapContextGetData(context); [_buffer_lock lock]; memcpy(_buffer_temp, data, subWidth * subHeight * 4); [_buffer_lock unlock]; CGContextRelease(context); CGImageRelease(imageRef); CGImageRelease(subImageRef); CGDataProviderRelease(provider); CGColorSpaceRelease(colorSpace); CVPixelBufferUnlockBaseAddress(imageBuffer,0); } - (void) do_start { [self->_captureSession startRunning]; } - (void) do_stop { [self->_captureSession stopRunning]; } - (bool) start { [self performSelectorOnMainThread:@selector(do_start) withObject:self waitUntilDone:true]; return true ; } - (bool) stop { [self performSelectorOnMainThread:@selector(do_stop) withObject:self waitUntilDone:true]; return true ; } - (uint8_t*) get_image_rgb32:(uint32_t*)length { uint16_t width = (uint16_t)(((uint32_t)_frametype) >> 16) ; uint16_t height = (uint16_t)(((uint32_t)_frametype) & 0xFFFF) ; //從攝像頭輸出數據采集數據 [_buffer_lock lock]; memcpy(_buffer_obox, _buffer_temp, width * height * 4); [_buffer_lock unlock]; if(length) *length = width * height * 4 ; return _buffer_obox ; } @end