BDFaceVideoCaptureDevice.m 6.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174
  1. //
  2. // BDFaceVideoCaptureDevice.m
  3. // FaceSDKSample_IOS
  4. //
  5. // Created by 阿凡树 on 2017/5/23.
  6. // Copyright © 2017年 Baidu. All rights reserved.
  7. //
  8. #import "BDFaceVideoCaptureDevice.h"
  9. @interface BDFaceVideoCaptureDevice () <AVCaptureVideoDataOutputSampleBufferDelegate> {
  10. dispatch_queue_t _videoBufferQueue;
  11. }
  12. @property (nonatomic, readwrite, retain) AVCaptureSession *captureSession;
  13. @property (nonatomic, readwrite, retain) AVCaptureDevice *captureDevice;
  14. @property (nonatomic, readwrite, retain) AVCaptureDeviceInput *captureInput;
  15. @property (nonatomic, readwrite, retain) AVCaptureVideoDataOutput *videoDataOutput;
  16. @property (nonatomic, readwrite, assign) BOOL isSessionBegin;
  17. @end
  18. @implementation BDFaceVideoCaptureDevice
  19. - (void)setPosition:(AVCaptureDevicePosition)position {
  20. if (_position ^ position) {
  21. _position = position;
  22. if (self.isSessionBegin) {
  23. [self resetSession];
  24. }
  25. }
  26. }
  27. - (instancetype)init {
  28. if (self = [super init]) {
  29. _captureSession = [[AVCaptureSession alloc] init];
  30. _videoBufferQueue = dispatch_queue_create("video_buffer_handle_queue", NULL);
  31. _isSessionBegin = NO;
  32. _position = AVCaptureDevicePositionFront;
  33. }
  34. return self;
  35. }
  36. - (AVCaptureDevice *)cameraWithPosition:(AVCaptureDevicePosition) position {
  37. NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
  38. for (AVCaptureDevice *device in devices) {
  39. if ([device position] == position) {
  40. return device;
  41. }
  42. }
  43. return nil;
  44. }
  45. - (void)startSession {
  46. #if TARGET_OS_SIMULATOR
  47. NSLog(@"模拟器没有摄像头,此功能只有真机可用");
  48. #else
  49. if (self.captureSession.running) {
  50. return;
  51. }
  52. if (!self.isSessionBegin) {
  53. self.isSessionBegin = YES;
  54. // 配置相机设备
  55. _captureDevice = [self cameraWithPosition:_position];
  56. // 初始化输入
  57. NSError *error = nil;
  58. _captureInput = [[AVCaptureDeviceInput alloc] initWithDevice:_captureDevice error:&error];
  59. if (error == nil) {
  60. [_captureSession addInput:_captureInput];
  61. } else {
  62. if ([self.delegate respondsToSelector:@selector(captureError)]) {
  63. [self.delegate captureError];
  64. }
  65. }
  66. // 输出设置
  67. _videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
  68. _videoDataOutput.videoSettings = @{(NSString *)kCVPixelBufferPixelFormatTypeKey:@(kCVPixelFormatType_32BGRA)};
  69. [_videoDataOutput setSampleBufferDelegate:self queue:_videoBufferQueue];
  70. [_captureSession addOutput:_videoDataOutput];
  71. AVCaptureConnection* connection = [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
  72. connection.videoOrientation = AVCaptureVideoOrientationPortrait;
  73. // 调节摄像头翻转
  74. [connection setVideoMirrored:YES];
  75. // connection.videoMirrored = (_position == AVCaptureDevicePositionFront);
  76. [self.captureSession startRunning];
  77. }
  78. #endif
  79. }
  80. - (void)stopSession {
  81. #if TARGET_OS_SIMULATOR
  82. NSLog(@"模拟器没有摄像头,此功能只有真机可用");
  83. #else
  84. if (!self.captureSession.running) {
  85. return;
  86. }
  87. if(self.isSessionBegin){
  88. self.isSessionBegin = NO;
  89. [self.captureSession stopRunning];
  90. if(nil != self.captureInput){
  91. [self.captureSession removeInput:self.captureInput];
  92. }
  93. if(nil != self.videoDataOutput){
  94. [self.captureSession removeOutput:self.videoDataOutput];
  95. }
  96. }
  97. #endif
  98. }
  99. - (void)resetSession {
  100. [self stopSession];
  101. [self startSession];
  102. }
  103. #pragma mark - AVCaptureVideoDataOutputSampleBufferDelegate
  104. - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
  105. if (!_runningStatus) {
  106. return;
  107. }
  108. UIImage* sampleImage = [self imageFromSamplePlanerPixelBuffer:sampleBuffer];
  109. if ([self.delegate respondsToSelector:@selector(captureOutputSampleBuffer:)] && sampleImage != nil) {
  110. [self.delegate captureOutputSampleBuffer:sampleImage];
  111. }
  112. }
  113. /**
  114. * 把 CMSampleBufferRef 转化成 UIImage 的方法,参考自:
  115. * https://stackoverflow.com/questions/19310437/convert-cmsamplebufferref-to-uiimage-with-yuv-color-space
  116. * note1 : SDK要求 colorSpace 为 CGColorSpaceCreateDeviceRGB
  117. * note2 : SDK需要 ARGB 格式的图片
  118. */
  119. - (UIImage *) imageFromSamplePlanerPixelBuffer:(CMSampleBufferRef)sampleBuffer{
  120. @autoreleasepool {
  121. // Get a CMSampleBuffer's Core Video image buffer for the media data
  122. CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
  123. // Lock the base address of the pixel buffer
  124. CVPixelBufferLockBaseAddress(imageBuffer, 0);
  125. // Get the number of bytes per row for the plane pixel buffer
  126. void *baseAddress = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);
  127. // Get the number of bytes per row for the plane pixel buffer
  128. size_t bytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer,0);
  129. // Get the pixel buffer width and height
  130. size_t width = CVPixelBufferGetWidth(imageBuffer);
  131. size_t height = CVPixelBufferGetHeight(imageBuffer);
  132. // Create a device-dependent RGB color space
  133. CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
  134. // Create a bitmap graphics context with the sample buffer data
  135. CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8,
  136. bytesPerRow, colorSpace, kCGImageAlphaNoneSkipFirst | kCGBitmapByteOrder32Little);
  137. // Create a Quartz image from the pixel data in the bitmap graphics context
  138. CGImageRef quartzImage = CGBitmapContextCreateImage(context);
  139. // Unlock the pixel buffer
  140. CVPixelBufferUnlockBaseAddress(imageBuffer,0);
  141. // Free up the context and color space
  142. CGContextRelease(context);
  143. CGColorSpaceRelease(colorSpace);
  144. // Create an image object from the Quartz image
  145. UIImage *image = [UIImage imageWithCGImage:quartzImage];
  146. // Release the Quartz image
  147. CGImageRelease(quartzImage);
  148. return (image);
  149. }
  150. }
  151. @end