12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394 |
- //
- // LLSimpleCamera+Helper.m
- // LLSimpleCameraExample
- //
- // Created by Ömer Faruk Gül on 20/02/16.
- // Copyright © 2016 Ömer Faruk Gül. All rights reserved.
- //
- #import "LLSimpleCamera+Helper.h"
- @implementation LLSimpleCamera (Helper)
- - (CGPoint)convertToPointOfInterestFromViewCoordinates:(CGPoint)viewCoordinates
- previewLayer:(AVCaptureVideoPreviewLayer *)previewLayer
- ports:(NSArray<AVCaptureInputPort *> *)ports
- {
- CGPoint pointOfInterest = CGPointMake(.5f, .5f);
- CGSize frameSize = previewLayer.frame.size;
-
- if ( [previewLayer.videoGravity isEqualToString:AVLayerVideoGravityResize] ) {
- pointOfInterest = CGPointMake(viewCoordinates.y / frameSize.height, 1.f - (viewCoordinates.x / frameSize.width));
- } else {
- CGRect cleanAperture;
- for (AVCaptureInputPort *port in ports) {
- if (port.mediaType == AVMediaTypeVideo) {
- cleanAperture = CMVideoFormatDescriptionGetCleanAperture([port formatDescription], YES);
- CGSize apertureSize = cleanAperture.size;
- CGPoint point = viewCoordinates;
-
- CGFloat apertureRatio = apertureSize.height / apertureSize.width;
- CGFloat viewRatio = frameSize.width / frameSize.height;
- CGFloat xc = .5f;
- CGFloat yc = .5f;
-
- if ( [previewLayer.videoGravity isEqualToString:AVLayerVideoGravityResizeAspect] ) {
- if (viewRatio > apertureRatio) {
- CGFloat y2 = frameSize.height;
- CGFloat x2 = frameSize.height * apertureRatio;
- CGFloat x1 = frameSize.width;
- CGFloat blackBar = (x1 - x2) / 2;
- if (point.x >= blackBar && point.x <= blackBar + x2) {
- xc = point.y / y2;
- yc = 1.f - ((point.x - blackBar) / x2);
- }
- } else {
- CGFloat y2 = frameSize.width / apertureRatio;
- CGFloat y1 = frameSize.height;
- CGFloat x2 = frameSize.width;
- CGFloat blackBar = (y1 - y2) / 2;
- if (point.y >= blackBar && point.y <= blackBar + y2) {
- xc = ((point.y - blackBar) / y2);
- yc = 1.f - (point.x / x2);
- }
- }
- } else if ([previewLayer.videoGravity isEqualToString:AVLayerVideoGravityResizeAspectFill]) {
- if (viewRatio > apertureRatio) {
- CGFloat y2 = apertureSize.width * (frameSize.width / apertureSize.height);
- xc = (point.y + ((y2 - frameSize.height) / 2.f)) / y2;
- yc = (frameSize.width - point.x) / frameSize.width;
- } else {
- CGFloat x2 = apertureSize.height * (frameSize.height / apertureSize.width);
- yc = 1.f - ((point.x + ((x2 - frameSize.width) / 2)) / x2);
- xc = point.y / frameSize.height;
- }
- }
-
- pointOfInterest = CGPointMake(xc, yc);
- break;
- }
- }
- }
-
- return pointOfInterest;
- }
- - (UIImage *)cropImage:(UIImage *)image usingPreviewLayer:(AVCaptureVideoPreviewLayer *)previewLayer
- {
- CGRect previewBounds = previewLayer.bounds;
- CGRect outputRect = [previewLayer metadataOutputRectOfInterestForRect:previewBounds];
-
- CGImageRef takenCGImage = image.CGImage;
- size_t width = CGImageGetWidth(takenCGImage);
- size_t height = CGImageGetHeight(takenCGImage);
- CGRect cropRect = CGRectMake(outputRect.origin.x * width, outputRect.origin.y * height,
- outputRect.size.width * width, outputRect.size.height * height);
-
- CGImageRef cropCGImage = CGImageCreateWithImageInRect(takenCGImage, cropRect);
- image = [UIImage imageWithCGImage:cropCGImage scale:1 orientation:image.imageOrientation];
- CGImageRelease(cropCGImage);
-
- return image;
- }
- @end
|