faceView.m 6.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163
  1. //
  2. // faceView.m
  3. // FaceIOS
  4. //
  5. // Created by archie on 17/4/13.
  6. // Copyright © 2017年 Facebook. All rights reserved.
  7. //
  8. #ifndef kScreenWidth
  9. #define kScreenWidth [UIScreen mainScreen].bounds.size.width
  10. #endif
  11. #ifndef kScreenHeight
  12. #define kScreenHeight [UIScreen mainScreen].bounds.size.height
  13. #endif
  14. #import "faceView.h"
  15. #import <AVFoundation/AVFoundation.h>
  16. #import "calculateObject.h"
  17. @interface faceView()<AVCaptureVideoDataOutputSampleBufferDelegate>{
  18. UIView*faceRect;
  19. UILabel*Promptla;
  20. BOOL pickImageSelected;
  21. }
  22. @property(nonatomic,strong)AVCaptureSession*session;
  23. @property(nonatomic,strong) AVCaptureStillImageOutput*captrueOut;
  24. @property(nonatomic,strong)CAShapeLayer*circleLayer;
  25. @property(nonatomic,strong)AVCaptureVideoPreviewLayer*previewLayer;
  26. @end
  27. @implementation faceView
  28. -(instancetype)initWithFrame:(CGRect)frame{
  29. if (self=[super initWithFrame:frame]) {
  30. [self capture];
  31. }
  32. return self;
  33. }
  34. -(void)capture{
  35. pickImageSelected=YES;
  36. NSDateFormatter*formatter=[[NSDateFormatter alloc]init];
  37. [formatter setDateFormat:@"yyyyMMddHHmmss"];
  38. NSString*tmpName=[formatter stringFromDate:[NSDate date]];
  39. [UIApplication sharedApplication].idleTimerDisabled=YES;
  40. self.session=[[AVCaptureSession alloc]init];
  41. [self.session setSessionPreset:AVCaptureSessionPresetMedium];
  42. NSArray*as=[AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
  43. [as enumerateObjectsUsingBlock:^(id _Nonnull obj, NSUInteger idx, BOOL * _Nonnull stop) {
  44. AVCaptureDevice*device=obj;
  45. if (device.position==AVCaptureDevicePositionBack) {
  46. AVCaptureDeviceInput*input=[[AVCaptureDeviceInput alloc]initWithDevice:device error:nil];
  47. [self.session addInput:input];
  48. }
  49. }];
  50. AVCaptureVideoDataOutput*Output=[[AVCaptureVideoDataOutput alloc]init];
  51. Output.alwaysDiscardsLateVideoFrames=YES;
  52. [Output setSampleBufferDelegate:self queue:dispatch_queue_create(tmpName.UTF8String, DISPATCH_QUEUE_SERIAL)];
  53. NSDictionary*pixelDic=[NSDictionary dictionaryWithObject:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA] forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
  54. [Output setVideoSettings:pixelDic];
  55. [self.session addOutput:Output];
  56. CGSize screenSize =[UIScreen mainScreen].bounds.size;
  57. self.previewLayer=[AVCaptureVideoPreviewLayer layerWithSession:self.session];
  58. self.previewLayer.frame=CGRectMake(0, 0, screenSize.width,self.bounds.size.height);
  59. self.previewLayer.videoGravity=AVLayerVideoGravityResizeAspectFill;
  60. [self.layer addSublayer:self.previewLayer];
  61. if (self.frame.size.height==kScreenHeight) {
  62. UIImageView*imgView=[[UIImageView alloc]initWithFrame:CGRectMake(0, 0, kScreenWidth, kScreenHeight)];
  63. imgView.image=[UIImage imageNamed:@"Recognition"];
  64. [self addSubview:imgView];
  65. }
  66. [self.session startRunning];
  67. Promptla=[[UILabel alloc] initWithFrame:CGRectMake((kScreenWidth-200)/2, (kScreenHeight-70)/2, 200, 70)];
  68. Promptla.layer.cornerRadius=7;
  69. Promptla.layer.masksToBounds=YES;
  70. Promptla.numberOfLines=0;
  71. Promptla.font=[UIFont fontWithName:@"Arial" size:15];
  72. Promptla.textAlignment=NSTextAlignmentCenter;
  73. Promptla.backgroundColor=[UIColor whiteColor];
  74. Promptla.textColor=[UIColor blackColor];
  75. Promptla.text=@"";
  76. Promptla.hidden=YES;
  77. [self addSubview:Promptla];
  78. // faceRect=[[UIView alloc]initWithFrame:CGRectMake(0, 0, 100, 100)];
  79. // faceRect.layer.borderWidth=2;
  80. // faceRect.layer.borderColor=[UIColor redColor].CGColor;
  81. // [self addSubview:faceRect];
  82. }
  83. -(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{
  84. [connection setVideoOrientation:AVCaptureVideoOrientationPortrait];
  85. CVPixelBufferRef imageBuffer=CMSampleBufferGetImageBuffer(sampleBuffer);
  86. CVPixelBufferLockBaseAddress(imageBuffer, 0);
  87. void*baseAddress=CVPixelBufferGetBaseAddress(imageBuffer);
  88. size_t width=CVPixelBufferGetWidth(imageBuffer);
  89. size_t height=CVPixelBufferGetHeight(imageBuffer);
  90. size_t bytesPerRow=CVPixelBufferGetBytesPerRow(imageBuffer);
  91. CGColorSpaceRef space=CGColorSpaceCreateDeviceRGB();
  92. CGContextRef context=CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, space, kCGBitmapByteOrder32Little|kCGImageAlphaPremultipliedFirst);
  93. CGImageRef imageRef=CGBitmapContextCreateImage(context);
  94. CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
  95. UIImage*img=[UIImage imageWithCGImage:imageRef scale:1 orientation:UIImageOrientationLeft];
  96. CGImageRelease(imageRef);
  97. CGColorSpaceRelease(space);
  98. CGContextRelease(context);
  99. [calculateObject recognitionFace:img completion:^(int faces, CGRect rect) {
  100. dispatch_async(dispatch_get_main_queue(), ^{
  101. if (pickImageSelected) {
  102. if (faces>1) {
  103. Promptla.hidden=NO;
  104. Promptla.text=@"人数过多,请只对准申请人.";
  105. }else if(faces<1){
  106. Promptla.hidden=NO;
  107. Promptla.text=@"请对准申请人.";
  108. }else{
  109. NSLog(@"矩形:%@",NSStringFromCGRect(rect));
  110. if (rect.size.width>135*(kScreenWidth/320)&&fabs(rect.origin.x)<185){
  111. //成功
  112. [self.session stopRunning];
  113. Promptla.hidden=YES;
  114. pickImageSelected=NO;
  115. Promptla.text=@"";
  116. NSDateFormatter*formatter=[[NSDateFormatter alloc]init];
  117. [formatter setDateFormat:@"yyyyMMddHHmmss"];
  118. NSString*tmpName=[formatter stringFromDate:[NSDate date]];
  119. NSArray* paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
  120. NSString* documentsDirectory = [paths objectAtIndex:0];
  121. NSString* fullPathToFile = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:@"%@.jpg",tmpName]];
  122. if ([UIImageJPEGRepresentation(img, 1.0) writeToFile:fullPathToFile atomically:NO]) {
  123. self.callBackImagePath(fullPathToFile);
  124. }
  125. // if (self.callBackImageData) {
  126. // self.callBackImageData(UIImageJPEGRepresentation(img, 1.0));
  127. // }
  128. }else if (fabs(rect.origin.x)>150){
  129. Promptla.hidden=NO;
  130. Promptla.text=@"请申请人摆正姿势";
  131. }else{
  132. Promptla.hidden=NO;
  133. Promptla.text=@"申请人和设备距离不要过远.";
  134. }
  135. }
  136. }
  137. });
  138. }];
  139. }
  140. @end