SLAvCaptureSession.m 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350
  1. //
  2. // SLAvCaptureSession.m
  3. // DarkMode
  4. //
  5. // Created by wsl on 2019/11/7.
  6. // Copyright © 2019 https://github.com/wsl2ls ----- All rights reserved.
  7. //
  8. #import "SLAvCaptureSession.h"
  9. #import <CoreMotion/CoreMotion.h>
  10. @interface SLAvCaptureSession () <AVCapturePhotoCaptureDelegate, AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate>
  11. @property (nonatomic, strong) AVCaptureSession *session; //采集会话
  12. @property (nonatomic, strong) AVCaptureVideoPreviewLayer *previewLayer;//摄像头采集内容展示区域
  13. @property (nonatomic, strong) AVCaptureDeviceInput *audioInput; //音频输入流
  14. @property (nonatomic, strong) AVCaptureDeviceInput *videoInput; //视频输入流
  15. @property (nonatomic, strong) AVCaptureVideoDataOutput *videoDataOutput; //视频数据帧输出流
  16. @property (nonatomic, strong) AVCaptureAudioDataOutput *audioDataOutput; //音频数据帧输出流
  17. @property (nonatomic, strong) CMMotionManager *motionManager; //运动传感器 监测设备方向
  18. @end
  19. @implementation SLAvCaptureSession
  20. + (instancetype)sharedAvCaptureSession {
  21. static SLAvCaptureSession *avCaptureSession = nil;
  22. static dispatch_once_t onceToken;
  23. dispatch_once(&onceToken, ^{
  24. avCaptureSession = [[SLAvCaptureSession alloc] init];
  25. });
  26. return avCaptureSession;
  27. }
  28. #pragma mark - Override
  29. - (instancetype)init {
  30. self = [super init];
  31. if (self) {
  32. }
  33. return self;
  34. }
  35. - (void)dealloc {
  36. [self stopRunning];
  37. }
  38. #pragma mark - HelpMethods
  39. //获取指定位置的摄像头
  40. - (AVCaptureDevice *)getCameraDeviceWithPosition:(AVCaptureDevicePosition)positon{
  41. if (@available(iOS 10.2, *)) {
  42. AVCaptureDeviceDiscoverySession *dissession = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:@[AVCaptureDeviceTypeBuiltInDualCamera,AVCaptureDeviceTypeBuiltInTelephotoCamera,AVCaptureDeviceTypeBuiltInWideAngleCamera] mediaType:AVMediaTypeVideo position:positon];
  43. for (AVCaptureDevice *device in dissession.devices) {
  44. if ([device position] == positon) {
  45. return device;
  46. }
  47. }
  48. } else {
  49. NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
  50. for (AVCaptureDevice *device in devices) {
  51. if ([device position] == positon) {
  52. return device;
  53. }
  54. }
  55. }
  56. return nil;
  57. }
  58. //最小缩放值 焦距
  59. - (CGFloat)minZoomFactor {
  60. CGFloat minZoomFactor = 1.0;
  61. if (@available(iOS 11.0, *)) {
  62. minZoomFactor = [self.videoInput device].minAvailableVideoZoomFactor;
  63. }
  64. return minZoomFactor;
  65. }
  66. //最大缩放值 焦距
  67. - (CGFloat)maxZoomFactor {
  68. CGFloat maxZoomFactor = [self.videoInput device].activeFormat.videoMaxZoomFactor;
  69. if (@available(iOS 11.0, *)) {
  70. maxZoomFactor = [self.videoInput device].maxAvailableVideoZoomFactor;
  71. }
  72. if (maxZoomFactor > 6) {
  73. maxZoomFactor = 6.0;
  74. }
  75. return maxZoomFactor;
  76. }
  77. #pragma mark - Getter
  78. - (AVCaptureSession *)session{
  79. if (_session == nil){
  80. _session = [[AVCaptureSession alloc] init];
  81. //高质量采集率
  82. [_session setSessionPreset:AVCaptureSessionPreset1280x720];
  83. if([_session canAddInput:self.videoInput]) [_session addInput:self.videoInput]; //添加视频输入流
  84. if([_session canAddInput:self.audioInput]) [_session addInput:self.audioInput]; //添加音频输入流
  85. if([_session canAddOutput:self.videoDataOutput]) [_session addOutput:self.videoDataOutput]; //视频数据输出流 纯画面
  86. if([_session canAddOutput:self.audioDataOutput]) [_session addOutput:self.audioDataOutput]; //音频数据输出流
  87. AVCaptureConnection * captureVideoConnection = [self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
  88. // 设置是否为镜像,前置摄像头采集到的数据本来就是翻转的,这里设置为镜像把画面转回来
  89. if (self.devicePosition == AVCaptureDevicePositionFront && captureVideoConnection.supportsVideoMirroring) {
  90. captureVideoConnection.videoMirrored = YES;
  91. }
  92. captureVideoConnection.videoOrientation = AVCaptureVideoOrientationPortrait;
  93. }
  94. return _session;
  95. }
  96. - (AVCaptureDeviceInput *)videoInput {
  97. if (_videoInput == nil) {
  98. //添加一个视频输入设备 默认是后置摄像头
  99. AVCaptureDevice *videoCaptureDevice = [self getCameraDeviceWithPosition:AVCaptureDevicePositionBack];
  100. //创建视频输入流
  101. _videoInput = [AVCaptureDeviceInput deviceInputWithDevice:videoCaptureDevice error:nil];
  102. if (!_videoInput){
  103. NSLog(@"获得摄像头失败");
  104. return nil;
  105. }
  106. }
  107. return _videoInput;
  108. }
  109. - (AVCaptureDeviceInput *)audioInput {
  110. if (_audioInput == nil) {
  111. NSError * error = nil;
  112. //添加一个音频输入/捕获设备
  113. AVCaptureDevice * audioCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
  114. _audioInput = [[AVCaptureDeviceInput alloc] initWithDevice:audioCaptureDevice error:&error];
  115. if (error) {
  116. NSLog(@"获得音频输入设备失败:%@",error.localizedDescription);
  117. }
  118. }
  119. return _audioInput;
  120. }
  121. - (AVCaptureVideoDataOutput *)videoDataOutput {
  122. if (_videoDataOutput == nil) {
  123. _videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
  124. [_videoDataOutput setSampleBufferDelegate:self queue:dispatch_get_global_queue(0, 0)];
  125. }
  126. return _videoDataOutput;
  127. }
  128. - (AVCaptureAudioDataOutput *)audioDataOutput {
  129. if (_audioDataOutput == nil) {
  130. _audioDataOutput = [[AVCaptureAudioDataOutput alloc] init];
  131. [_audioDataOutput setSampleBufferDelegate:self queue:dispatch_get_global_queue(0, 0)];
  132. }
  133. return _audioDataOutput;
  134. }
  135. - (AVCaptureVideoPreviewLayer *)previewLayer {
  136. if (_previewLayer == nil) {
  137. _previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.session];
  138. _previewLayer.videoGravity = AVLayerVideoGravityResizeAspect;
  139. }
  140. return _previewLayer;
  141. }
  142. - (CMMotionManager *)motionManager {
  143. if (!_motionManager) {
  144. _motionManager = [[CMMotionManager alloc] init];
  145. }
  146. return _motionManager;
  147. }
  148. - (BOOL)isRunning {
  149. return self.session.isRunning;
  150. }
  151. - (AVCaptureDevicePosition)devicePosition {
  152. if([[self.videoInput device] position] == AVCaptureDevicePositionUnspecified) {
  153. return AVCaptureDevicePositionBack;
  154. }
  155. AVCaptureDevicePosition devicePosition = [[self.videoInput device] position];
  156. return devicePosition;
  157. }
  158. - (CGFloat)videoZoomFactor {
  159. return [self.videoInput device].videoZoomFactor;
  160. }
  161. #pragma mark - Setter
  162. - (void)setPreview:(nullable UIView *)preview {
  163. if (preview == nil) {
  164. [self.previewLayer removeFromSuperlayer];
  165. }else {
  166. self.previewLayer.frame = preview.bounds;
  167. [preview.layer addSublayer:self.previewLayer];
  168. }
  169. _preview = preview;
  170. }
  171. - (void)setVideoZoomFactor:(CGFloat)videoZoomFactor {
  172. NSError *error = nil;
  173. if (videoZoomFactor <= self.maxZoomFactor &&
  174. videoZoomFactor >= self.minZoomFactor){
  175. if ([[self.videoInput device] lockForConfiguration:&error] ) {
  176. [self.videoInput device].videoZoomFactor = videoZoomFactor;
  177. [[self.videoInput device] unlockForConfiguration];
  178. } else {
  179. NSLog( @"调节焦距失败: %@", error );
  180. }
  181. }
  182. }
  183. - (void)setShootingOrientation:(UIDeviceOrientation)shootingOrientation {
  184. if (_shootingOrientation == shootingOrientation) {
  185. return;
  186. }
  187. _shootingOrientation = shootingOrientation;
  188. }
  189. #pragma mark - EventsHandle
  190. ///启动捕获
  191. - (void)startRunning {
  192. if(!self.session.isRunning) {
  193. [self.session startRunning];
  194. }
  195. [self startUpdateDeviceDirection];
  196. }
  197. ///结束捕获
  198. - (void)stopRunning {
  199. if (self.session.isRunning) {
  200. [self.session stopRunning];
  201. }
  202. [self stopUpdateDeviceDirection];
  203. }
  204. //设置聚焦点和模式 默认连续自动聚焦和自动曝光模式
  205. - (void)focusAtPoint:(CGPoint)focalPoint {
  206. //将UI坐标转化为摄像头坐标 (0,0) -> (1,1)
  207. CGPoint cameraPoint = [self.previewLayer captureDevicePointOfInterestForPoint:focalPoint];
  208. AVCaptureDevice *captureDevice = [self.videoInput device];
  209. NSError * error;
  210. //注意改变设备属性前一定要首先调用lockForConfiguration:调用完之后使用unlockForConfiguration方法解锁
  211. if ([captureDevice lockForConfiguration:&error]) {
  212. if ([captureDevice isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]) {
  213. if ([captureDevice isFocusPointOfInterestSupported]) {
  214. [captureDevice setFocusPointOfInterest:cameraPoint];
  215. }
  216. [captureDevice setFocusMode:AVCaptureFocusModeContinuousAutoFocus];
  217. }
  218. //曝光模式
  219. if ([captureDevice isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]) {
  220. if ([captureDevice isExposurePointOfInterestSupported]) {
  221. [captureDevice setExposurePointOfInterest:cameraPoint];
  222. }
  223. [captureDevice setExposureMode:AVCaptureExposureModeContinuousAutoExposure];
  224. }
  225. [captureDevice unlockForConfiguration];
  226. } else {
  227. NSLog(@"设置聚焦点错误:%@", error.localizedDescription);
  228. }
  229. }
  230. //切换前/后置摄像头
  231. - (void)switchsCamera:(AVCaptureDevicePosition)devicePosition {
  232. //当前设备方向
  233. if (self.devicePosition == devicePosition) {
  234. return;
  235. }
  236. AVCaptureDeviceInput *videoInput = [AVCaptureDeviceInput deviceInputWithDevice:[self getCameraDeviceWithPosition:devicePosition] error:nil];
  237. //先开启配置,配置完成后提交配置改变
  238. [self.session beginConfiguration];
  239. //移除原有输入对象
  240. [self.session removeInput:self.videoInput];
  241. //添加新的输入对象
  242. if ([self.session canAddInput:videoInput]) {
  243. [self.session addInput:videoInput];
  244. self.videoInput = videoInput;
  245. }
  246. //视频输入对象发生了改变 视频输出的链接也要重新初始化
  247. AVCaptureConnection * captureConnection = [self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
  248. if (self.devicePosition == AVCaptureDevicePositionFront && captureConnection.supportsVideoMirroring) {
  249. captureConnection.videoMirrored = YES;
  250. }
  251. captureConnection.videoOrientation = AVCaptureVideoOrientationPortrait;
  252. //提交新的输入对象
  253. [self.session commitConfiguration];
  254. }
  255. #pragma mark - 重力感应监测设备方向
  256. ///开始监听设备方向
  257. - (void)startUpdateDeviceDirection {
  258. if ([self.motionManager isAccelerometerAvailable] == YES) {
  259. //回调会一直调用,建议获取到就调用下面的停止方法,需要再重新开始,当然如果需求是实时不间断的话可以等离开页面之后再stop
  260. [self.motionManager setAccelerometerUpdateInterval:1.0];
  261. __weak typeof(self) weakSelf = self;
  262. [self.motionManager startAccelerometerUpdatesToQueue:[NSOperationQueue currentQueue] withHandler:^(CMAccelerometerData *accelerometerData, NSError *error) {
  263. double x = accelerometerData.acceleration.x;
  264. double y = accelerometerData.acceleration.y;
  265. if ((fabs(y) + 0.1f) >= fabs(x)) {
  266. if (y >= 0.1f) {
  267. // NSLog(@"Down");
  268. if (weakSelf.shootingOrientation == UIDeviceOrientationPortraitUpsideDown) {
  269. return ;
  270. }
  271. weakSelf.shootingOrientation = UIDeviceOrientationPortraitUpsideDown;
  272. } else {
  273. // NSLog(@"Portrait");
  274. if (weakSelf.shootingOrientation == UIDeviceOrientationPortrait) {
  275. return ;
  276. }
  277. weakSelf.shootingOrientation = UIDeviceOrientationPortrait;
  278. }
  279. } else {
  280. if (x >= 0.1f) {
  281. // NSLog(@"Right");
  282. if (weakSelf.shootingOrientation == UIDeviceOrientationLandscapeRight) {
  283. return ;
  284. }
  285. weakSelf.shootingOrientation = UIDeviceOrientationLandscapeRight;
  286. } else if (x <= 0.1f) {
  287. // NSLog(@"Left");
  288. if (weakSelf.shootingOrientation == UIDeviceOrientationLandscapeLeft) {
  289. return ;
  290. }
  291. weakSelf.shootingOrientation = UIDeviceOrientationLandscapeLeft;
  292. } else {
  293. // NSLog(@"Portrait");
  294. if (weakSelf.shootingOrientation == UIDeviceOrientationPortrait) {
  295. return ;
  296. }
  297. weakSelf.shootingOrientation = UIDeviceOrientationPortrait;
  298. }
  299. }
  300. }];
  301. }
  302. }
  303. /// 停止监测方向
  304. - (void)stopUpdateDeviceDirection {
  305. if ([self.motionManager isAccelerometerActive] == YES) {
  306. [self.motionManager stopAccelerometerUpdates];
  307. _motionManager = nil;
  308. }
  309. }
  310. #pragma mark - AVCaptureVideoDataOutputSampleBufferDelegate AVCaptureAudioDataOutputSampleBufferDelegate 实时输出音视频
  311. /// 实时输出采集到的音视频帧内容
  312. - (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
  313. if (!sampleBuffer) {
  314. return;
  315. }
  316. //提供对外接口,方便自定义处理
  317. if (output == self.videoDataOutput) {
  318. if([self.delegate respondsToSelector:@selector(captureSession:didOutputVideoSampleBuffer:fromConnection:)]) {
  319. [self.delegate captureSession:self didOutputVideoSampleBuffer:sampleBuffer fromConnection:connection];
  320. }
  321. }
  322. if (output == self.audioDataOutput) {
  323. if([self.delegate respondsToSelector:@selector(captureSession:didOutputAudioSampleBuffer:fromConnection:)]) {
  324. [self.delegate captureSession:self didOutputAudioSampleBuffer:sampleBuffer fromConnection:connection];
  325. }
  326. }
  327. }
  328. /// 实时输出丢弃的音视频帧内容
  329. - (void)captureOutput:(AVCaptureOutput *)output didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection API_AVAILABLE(ios(6.0)) {
  330. }
  331. @end