// // CameraViewController.m // LLSimpleCamera // // Created by Ömer Faruk Gül on 24/10/14. // Copyright (c) 2014 Ömer Faruk Gül. All rights reserved. // #import "LLSimpleCamera.h" #import #import "UIImage+FixOrientation.h" #import "LLSimpleCamera+Helper.h" @interface LLSimpleCamera () @property (strong, nonatomic) UIView *preview; @property (strong, nonatomic) AVCaptureStillImageOutput *stillImageOutput; @property (strong, nonatomic) AVCaptureSession *session; @property (strong, nonatomic) AVCaptureDevice *videoCaptureDevice; @property (strong, nonatomic) AVCaptureDeviceInput *videoDeviceInput; @property (strong, nonatomic) AVCaptureVideoPreviewLayer *captureVideoPreviewLayer; @property (strong, nonatomic) UITapGestureRecognizer *tapGesture; @property (strong, nonatomic) CALayer *focusBoxLayer; @property (strong, nonatomic) CAAnimation *focusBoxAnimation; @property (strong, nonatomic) AVCaptureMovieFileOutput *movieFileOutput; @property (strong, nonatomic) UIPinchGestureRecognizer *pinchGesture; @property (nonatomic, assign) CGFloat beginGestureScale; @property (nonatomic, assign) CGFloat effectiveScale; @property (nonatomic, copy) void (^didRecordCompletionBlock)(LLSimpleCamera *camera, NSURL *outputFileUrl, NSError *error); @end NSString *const LLSimpleCameraErrorDomain = @"LLSimpleCameraErrorDomain"; @implementation LLSimpleCamera #pragma mark - Initialize - (instancetype)initWithQuality:(NSString *)quality position:(LLCameraPosition)position { self = [super initWithNibName:nil bundle:nil]; if(self) { [self setupWithQuality:quality position:position]; } return self; } - (instancetype)initWithCoder:(NSCoder *)aDecoder { if (self = [super initWithCoder:aDecoder]) { [self setupWithQuality:AVCaptureSessionPresetHigh position:LLCameraPositionRear]; } return self; } - (void)setupWithQuality:(NSString *)quality position:(LLCameraPosition)position { _cameraQuality = quality; _position = position; _fixOrientationAfterCapture = NO; _tapToFocus = YES; _useDeviceOrientation = NO; _flash = LLCameraFlashOff; _mirror = LLCameraMirrorAuto; _recording = NO; _zoomingEnabled = YES; _effectiveScale = 1.0f; } - (void)viewDidLoad { [super viewDidLoad]; self.view.backgroundColor = [UIColor clearColor]; self.view.autoresizingMask = UIViewAutoresizingNone; self.preview = [[UIView alloc] initWithFrame:CGRectZero]; self.preview.backgroundColor = [UIColor clearColor]; [self.view addSubview:self.preview]; // tap to focus self.tapGesture = [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(previewTapped:)]; self.tapGesture.numberOfTapsRequired = 1; [self.tapGesture setDelaysTouchesEnded:NO]; [self.preview addGestureRecognizer:self.tapGesture]; //pinch to zoom if (_zoomingEnabled) { self.pinchGesture = [[UIPinchGestureRecognizer alloc] initWithTarget:self action:@selector(handlePinchGesture:)]; self.pinchGesture.delegate = self; [self.preview addGestureRecognizer:self.pinchGesture]; } // add focus box to view [self addDefaultFocusBox]; } #pragma mark Pinch Delegate - (BOOL)gestureRecognizerShouldBegin:(UIGestureRecognizer *)gestureRecognizer { if ( [gestureRecognizer isKindOfClass:[UIPinchGestureRecognizer class]] ) { _beginGestureScale = _effectiveScale; } return YES; } - (void)handlePinchGesture:(UIPinchGestureRecognizer *)recognizer { BOOL allTouchesAreOnThePreviewLayer = YES; NSUInteger numTouches = [recognizer numberOfTouches], i; for ( i = 0; i < numTouches; ++i ) { CGPoint location = [recognizer locationOfTouch:i inView:self.preview]; CGPoint convertedLocation = [self.preview.layer convertPoint:location fromLayer:self.view.layer]; if ( ! [self.preview.layer containsPoint:convertedLocation] ) { allTouchesAreOnThePreviewLayer = NO; break; } } if (allTouchesAreOnThePreviewLayer) { _effectiveScale = _beginGestureScale * recognizer.scale; if (_effectiveScale < 1.0f) _effectiveScale = 1.0f; if (_effectiveScale > self.videoCaptureDevice.activeFormat.videoMaxZoomFactor) _effectiveScale = self.videoCaptureDevice.activeFormat.videoMaxZoomFactor; NSError *error = nil; if ([self.videoCaptureDevice lockForConfiguration:&error]) { [self.videoCaptureDevice rampToVideoZoomFactor:_effectiveScale withRate:100]; [self.videoCaptureDevice unlockForConfiguration]; } else { [self passError:error]; } } } #pragma mark - Camera - (void)attachToViewController:(UIViewController *)vc withFrame:(CGRect)frame { [vc addChildViewController:self]; self.view.frame = frame; [vc.view addSubview:self.view]; [self didMoveToParentViewController:vc]; } - (void)start { [LLSimpleCamera requestCameraPermission:^(BOOL granted) { if(granted) { [self initialize]; } else { NSError *error = [NSError errorWithDomain:LLSimpleCameraErrorDomain code:LLSimpleCameraErrorCodeCameraPermission userInfo:nil]; [self passError:error]; } }]; } - (void)initialize { if(!_session) { _session = [[AVCaptureSession alloc] init]; _session.sessionPreset = self.cameraQuality; // preview layer CGRect bounds = self.preview.layer.bounds; _captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.session]; _captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill; _captureVideoPreviewLayer.bounds = bounds; _captureVideoPreviewLayer.position = CGPointMake(CGRectGetMidX(bounds), CGRectGetMidY(bounds)); [self.preview.layer addSublayer:_captureVideoPreviewLayer]; AVCaptureDevicePosition devicePosition; switch (self.position) { case LLCameraPositionRear: if([self.class isRearCameraAvailable]) { devicePosition = AVCaptureDevicePositionBack; } else { devicePosition = AVCaptureDevicePositionFront; _position = LLCameraPositionFront; } break; case LLCameraPositionFront: if([self.class isFrontCameraAvailable]) { devicePosition = AVCaptureDevicePositionFront; } else { devicePosition = AVCaptureDevicePositionBack; _position = LLCameraPositionRear; } break; default: devicePosition = AVCaptureDevicePositionUnspecified; break; } if(devicePosition == AVCaptureDevicePositionUnspecified) { self.videoCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; } else { self.videoCaptureDevice = [self cameraWithPosition:devicePosition]; } NSError *error = nil; _videoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:_videoCaptureDevice error:&error]; if (!_videoDeviceInput) { [self passError:error]; return; } if([self.session canAddInput:_videoDeviceInput]) { [self.session addInput:_videoDeviceInput]; self.captureVideoPreviewLayer.connection.videoOrientation = [self orientationForConnection]; } // continiously adjust white balance self.whiteBalanceMode = AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance; // image output self.stillImageOutput = [[AVCaptureStillImageOutput alloc] init]; NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil]; [self.stillImageOutput setOutputSettings:outputSettings]; [self.session addOutput:self.stillImageOutput]; } //if we had disabled the connection on capture, re-enable it if (![self.captureVideoPreviewLayer.connection isEnabled]) { [self.captureVideoPreviewLayer.connection setEnabled:YES]; } [self.session startRunning]; } - (void)stop { [self.session stopRunning]; self.session = nil; } #pragma mark - Image Capture -(void)capture:(void (^)(LLSimpleCamera *camera, UIImage *image, NSDictionary *metadata, NSError *error))onCapture exactSeenImage:(BOOL)exactSeenImage animationBlock:(void (^)(AVCaptureVideoPreviewLayer *))animationBlock { if(!self.session) { NSError *error = [NSError errorWithDomain:LLSimpleCameraErrorDomain code:LLSimpleCameraErrorCodeSession userInfo:nil]; onCapture(self, nil, nil, error); return; } // get connection and set orientation AVCaptureConnection *videoConnection = [self captureConnection]; videoConnection.videoOrientation = [self orientationForConnection]; BOOL flashActive = self.videoCaptureDevice.flashActive; if (!flashActive && animationBlock) { animationBlock(self.captureVideoPreviewLayer); } [self.stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error) { UIImage *image = nil; NSDictionary *metadata = nil; // check if we got the image buffer if (imageSampleBuffer != NULL) { CFDictionaryRef exifAttachments = CMGetAttachment(imageSampleBuffer, kCGImagePropertyExifDictionary, NULL); if(exifAttachments) { metadata = (__bridge NSDictionary*)exifAttachments; } NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer]; image = [[UIImage alloc] initWithData:imageData]; if(exactSeenImage) { image = [self cropImage:image usingPreviewLayer:self.captureVideoPreviewLayer]; } if(self.fixOrientationAfterCapture) { image = [image fixOrientation]; } } // trigger the block if(onCapture) { dispatch_async(dispatch_get_main_queue(), ^{ onCapture(self, image, metadata, error); }); } }]; } -(void)capture:(void (^)(LLSimpleCamera *camera, UIImage *image, NSDictionary *metadata, NSError *error))onCapture exactSeenImage:(BOOL)exactSeenImage { [self capture:onCapture exactSeenImage:exactSeenImage animationBlock:^(AVCaptureVideoPreviewLayer *layer) { CABasicAnimation *animation = [CABasicAnimation animationWithKeyPath:@"opacity"]; animation.duration = 0.1; animation.autoreverses = YES; animation.repeatCount = 0.0; animation.fromValue = [NSNumber numberWithFloat:1.0]; animation.toValue = [NSNumber numberWithFloat:0.1]; animation.fillMode = kCAFillModeForwards; animation.removedOnCompletion = NO; [layer addAnimation:animation forKey:@"animateOpacity"]; }]; } -(void)capture:(void (^)(LLSimpleCamera *camera, UIImage *image, NSDictionary *metadata, NSError *error))onCapture { [self capture:onCapture exactSeenImage:NO]; } - (void)captureOutput:(AVCaptureFileOutput *)captureOutput didStartRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray *)connections { self.recording = YES; if(self.onStartRecording) self.onStartRecording(self); } - (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error { self.recording = NO; [self enableTorch:NO]; if(self.didRecordCompletionBlock) { self.didRecordCompletionBlock(self, outputFileURL, error); } } - (void)enableTorch:(BOOL)enabled { // check if the device has a torch, otherwise don't do anything if([self isTorchAvailable]) { AVCaptureTorchMode torchMode = enabled ? AVCaptureTorchModeOn : AVCaptureTorchModeOff; NSError *error; if ([self.videoCaptureDevice lockForConfiguration:&error]) { [self.videoCaptureDevice setTorchMode:torchMode]; [self.videoCaptureDevice unlockForConfiguration]; } else { [self passError:error]; } } } #pragma mark - Helpers - (void)passError:(NSError *)error { if(self.onError) { __weak typeof(self) weakSelf = self; self.onError(weakSelf, error); } } - (AVCaptureConnection *)captureConnection { AVCaptureConnection *videoConnection = nil; for (AVCaptureConnection *connection in self.stillImageOutput.connections) { for (AVCaptureInputPort *port in [connection inputPorts]) { if ([[port mediaType] isEqual:AVMediaTypeVideo]) { videoConnection = connection; break; } } if (videoConnection) { break; } } return videoConnection; } - (void)setVideoCaptureDevice:(AVCaptureDevice *)videoCaptureDevice { _videoCaptureDevice = videoCaptureDevice; if(videoCaptureDevice.flashMode == AVCaptureFlashModeAuto) { _flash = LLCameraFlashAuto; } else if(videoCaptureDevice.flashMode == AVCaptureFlashModeOn) { _flash = LLCameraFlashOn; } else if(videoCaptureDevice.flashMode == AVCaptureFlashModeOff) { _flash = LLCameraFlashOff; } else { _flash = LLCameraFlashOff; } _effectiveScale = 1.0f; // trigger block if(self.onDeviceChange) { __weak typeof(self) weakSelf = self; self.onDeviceChange(weakSelf, videoCaptureDevice); } } - (BOOL)isFlashAvailable { return self.videoCaptureDevice.hasFlash && self.videoCaptureDevice.isFlashAvailable; } - (BOOL)isTorchAvailable { return self.videoCaptureDevice.hasTorch && self.videoCaptureDevice.isTorchAvailable; } - (BOOL)updateFlashMode:(LLCameraFlash)cameraFlash { if(!self.session) return NO; AVCaptureFlashMode flashMode; if(cameraFlash == LLCameraFlashOn) { flashMode = AVCaptureFlashModeOn; } else if(cameraFlash == LLCameraFlashAuto) { flashMode = AVCaptureFlashModeAuto; } else { flashMode = AVCaptureFlashModeOff; } if([self.videoCaptureDevice isFlashModeSupported:flashMode]) { NSError *error; if([self.videoCaptureDevice lockForConfiguration:&error]) { self.videoCaptureDevice.flashMode = flashMode; [self.videoCaptureDevice unlockForConfiguration]; _flash = cameraFlash; return YES; } else { [self passError:error]; return NO; } } else { return NO; } } - (void)setWhiteBalanceMode:(AVCaptureWhiteBalanceMode)whiteBalanceMode { if ([self.videoCaptureDevice isWhiteBalanceModeSupported:whiteBalanceMode]) { NSError *error; if ([self.videoCaptureDevice lockForConfiguration:&error]) { [self.videoCaptureDevice setWhiteBalanceMode:whiteBalanceMode]; [self.videoCaptureDevice unlockForConfiguration]; } else { [self passError:error]; } } } - (void)setMirror:(LLCameraMirror)mirror { _mirror = mirror; if(!self.session) { return; } AVCaptureConnection *videoConnection = [_movieFileOutput connectionWithMediaType:AVMediaTypeVideo]; AVCaptureConnection *pictureConnection = [_stillImageOutput connectionWithMediaType:AVMediaTypeVideo]; switch (mirror) { case LLCameraMirrorOff: { if ([videoConnection isVideoMirroringSupported]) { [videoConnection setVideoMirrored:NO]; } if ([pictureConnection isVideoMirroringSupported]) { [pictureConnection setVideoMirrored:NO]; } break; } case LLCameraMirrorOn: { if ([videoConnection isVideoMirroringSupported]) { [videoConnection setVideoMirrored:YES]; } if ([pictureConnection isVideoMirroringSupported]) { [pictureConnection setVideoMirrored:YES]; } break; } case LLCameraMirrorAuto: { BOOL shouldMirror = (_position == LLCameraPositionFront); if ([videoConnection isVideoMirroringSupported]) { [videoConnection setVideoMirrored:shouldMirror]; } if ([pictureConnection isVideoMirroringSupported]) { [pictureConnection setVideoMirrored:shouldMirror]; } break; } } return; } - (LLCameraPosition)togglePosition { if(!self.session) { return self.position; } if(self.position == LLCameraPositionRear) { self.cameraPosition = LLCameraPositionFront; } else { self.cameraPosition = LLCameraPositionRear; } return self.position; } - (void)setCameraPosition:(LLCameraPosition)cameraPosition { if(_position == cameraPosition || !self.session) { return; } if(cameraPosition == LLCameraPositionRear && ![self.class isRearCameraAvailable]) { return; } if(cameraPosition == LLCameraPositionFront && ![self.class isFrontCameraAvailable]) { return; } [self.session beginConfiguration]; // remove existing input [self.session removeInput:self.videoDeviceInput]; // get new input AVCaptureDevice *device = nil; if(self.videoDeviceInput.device.position == AVCaptureDevicePositionBack) { device = [self cameraWithPosition:AVCaptureDevicePositionFront]; } else { device = [self cameraWithPosition:AVCaptureDevicePositionBack]; } if(!device) { return; } // add input to session NSError *error = nil; AVCaptureDeviceInput *videoInput = [[AVCaptureDeviceInput alloc] initWithDevice:device error:&error]; if(error) { [self passError:error]; [self.session commitConfiguration]; return; } _position = cameraPosition; [self.session addInput:videoInput]; [self.session commitConfiguration]; self.videoCaptureDevice = device; self.videoDeviceInput = videoInput; [self setMirror:_mirror]; } // Find a camera with the specified AVCaptureDevicePosition, returning nil if one is not found - (AVCaptureDevice *)cameraWithPosition:(AVCaptureDevicePosition) position { NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; for (AVCaptureDevice *device in devices) { if (device.position == position) return device; } return nil; } #pragma mark - Focus - (void)previewTapped:(UIGestureRecognizer *)gestureRecognizer { if(!self.tapToFocus) { return; } CGPoint touchedPoint = [gestureRecognizer locationInView:self.preview]; CGPoint pointOfInterest = [self convertToPointOfInterestFromViewCoordinates:touchedPoint previewLayer:self.captureVideoPreviewLayer ports:self.videoDeviceInput.ports]; [self focusAtPoint:pointOfInterest]; [self showFocusBox:touchedPoint]; } - (void)addDefaultFocusBox { CALayer *focusBox = [[CALayer alloc] init]; focusBox.cornerRadius = 5.0f; focusBox.bounds = CGRectMake(0.0f, 0.0f, 70, 60); focusBox.borderWidth = 3.0f; focusBox.borderColor = [[UIColor yellowColor] CGColor]; focusBox.opacity = 0.0f; [self.view.layer addSublayer:focusBox]; CABasicAnimation *focusBoxAnimation = [CABasicAnimation animationWithKeyPath:@"opacity"]; focusBoxAnimation.duration = 0.75; focusBoxAnimation.autoreverses = NO; focusBoxAnimation.repeatCount = 0.0; focusBoxAnimation.fromValue = [NSNumber numberWithFloat:1.0]; focusBoxAnimation.toValue = [NSNumber numberWithFloat:0.0]; [self alterFocusBox:focusBox animation:focusBoxAnimation]; } - (void)alterFocusBox:(CALayer *)layer animation:(CAAnimation *)animation { self.focusBoxLayer = layer; self.focusBoxAnimation = animation; } - (void)focusAtPoint:(CGPoint)point { AVCaptureDevice *device = self.videoCaptureDevice; if (device.isFocusPointOfInterestSupported && [device isFocusModeSupported:AVCaptureFocusModeAutoFocus]) { NSError *error; if ([device lockForConfiguration:&error]) { device.focusPointOfInterest = point; device.focusMode = AVCaptureFocusModeAutoFocus; [device unlockForConfiguration]; } else { [self passError:error]; } } } - (void)showFocusBox:(CGPoint)point { if(self.focusBoxLayer) { // clear animations [self.focusBoxLayer removeAllAnimations]; // move layer to the touch point [CATransaction begin]; [CATransaction setValue: (id) kCFBooleanTrue forKey: kCATransactionDisableActions]; self.focusBoxLayer.position = point; [CATransaction commit]; } if(self.focusBoxAnimation) { // run the animation [self.focusBoxLayer addAnimation:self.focusBoxAnimation forKey:@"animateOpacity"]; } } #pragma mark - UIViewController - (void)viewWillAppear:(BOOL)animated { [super viewWillAppear:animated]; } - (void)viewWillDisappear:(BOOL)animated { [super viewWillDisappear:animated]; } - (void)viewWillLayoutSubviews { [super viewWillLayoutSubviews]; self.preview.frame = CGRectMake(0, 0, self.view.bounds.size.width, self.view.bounds.size.height); CGRect bounds = self.preview.bounds; self.captureVideoPreviewLayer.bounds = bounds; self.captureVideoPreviewLayer.position = CGPointMake(CGRectGetMidX(bounds), CGRectGetMidY(bounds)); self.captureVideoPreviewLayer.connection.videoOrientation = [self orientationForConnection]; } - (AVCaptureVideoOrientation)orientationForConnection { AVCaptureVideoOrientation videoOrientation = AVCaptureVideoOrientationPortrait; if(self.useDeviceOrientation) { switch ([UIDevice currentDevice].orientation) { case UIDeviceOrientationLandscapeLeft: // yes to the right, this is not bug! videoOrientation = AVCaptureVideoOrientationLandscapeRight; break; case UIDeviceOrientationLandscapeRight: videoOrientation = AVCaptureVideoOrientationLandscapeLeft; break; case UIDeviceOrientationPortraitUpsideDown: videoOrientation = AVCaptureVideoOrientationPortraitUpsideDown; break; default: videoOrientation = AVCaptureVideoOrientationPortrait; break; } } else { switch ([[UIApplication sharedApplication] statusBarOrientation]) { case UIInterfaceOrientationLandscapeLeft: videoOrientation = AVCaptureVideoOrientationLandscapeLeft; break; case UIInterfaceOrientationLandscapeRight: videoOrientation = AVCaptureVideoOrientationLandscapeRight; break; case UIInterfaceOrientationPortraitUpsideDown: videoOrientation = AVCaptureVideoOrientationPortraitUpsideDown; break; default: videoOrientation = AVCaptureVideoOrientationPortrait; break; } } return videoOrientation; } - (void)willRotateToInterfaceOrientation:(UIInterfaceOrientation)toInterfaceOrientation duration:(NSTimeInterval)duration { [super willRotateToInterfaceOrientation:toInterfaceOrientation duration:duration]; // layout subviews is not called when rotating from landscape right/left to left/right if (UIInterfaceOrientationIsLandscape(self.interfaceOrientation) && UIInterfaceOrientationIsLandscape(toInterfaceOrientation)) { [self.view setNeedsLayout]; } } - (void)didReceiveMemoryWarning { [super didReceiveMemoryWarning]; } - (void)dealloc { [self stop]; } #pragma mark - Class Methods + (void)requestCameraPermission:(void (^)(BOOL granted))completionBlock { if ([AVCaptureDevice respondsToSelector:@selector(requestAccessForMediaType: completionHandler:)]) { [AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo completionHandler:^(BOOL granted) { // return to main thread dispatch_async(dispatch_get_main_queue(), ^{ if(completionBlock) { completionBlock(granted); } }); }]; } else { completionBlock(YES); } } + (BOOL)isFrontCameraAvailable { return [UIImagePickerController isCameraDeviceAvailable:UIImagePickerControllerCameraDeviceFront]; } + (BOOL)isRearCameraAvailable { return [UIImagePickerController isCameraDeviceAvailable:UIImagePickerControllerCameraDeviceRear]; } @end