BEVideoCapture.m 20 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569
  1. // Copyright (C) 2018 Beijing Bytedance Network Technology Co., Ltd.
  2. #import "BEVideoCapture.h"
  3. #import <UIKit/UIKit.h>
  4. #import <UIView+Toast.h>
  5. @interface BEVideoCapture()<AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureMetadataOutputObjectsDelegate ,AVCaptureAudioDataOutputSampleBufferDelegate>
  6. @property (nonatomic, readwrite) AVCaptureDevicePosition devicePosition; // default AVCaptureDevicePositionFront
  7. @property (nonatomic, strong) AVCaptureDeviceInput * deviceInput;
  8. @property (nonatomic, strong) AVCaptureVideoDataOutput * dataOutput;
  9. @property (nonatomic, strong) AVCaptureSession *session;
  10. @property (nonatomic, strong) AVCaptureDevice *device;
  11. @property (nonatomic, strong) dispatch_queue_t bufferQueue;
  12. @property (nonatomic, assign) BOOL isPaused;
  13. @property (nonatomic, assign) BOOL isFlipped;
  14. @property (nonatomic, strong) NSMutableArray *observerArray;
  15. @property (nonatomic, strong) AVCaptureMetadataOutput *metaDataOutput;
  16. @property (nonatomic, strong) AVCaptureVideoPreviewLayer *previewLayer;
  17. //@property (nonatomic, strong) UIView *maskView; //扫描框
  18. @property (nonatomic, strong) CAShapeLayer *maskLayer;
  19. @property (nonatomic, assign) BOOL isFirstApply;
  20. //@property (nonatomic,strong) AVCaptureSession *session;
  21. //@property (nonatomic,strong) dispatch_queue_t videoQueue;
  22. @property (nonatomic,strong) dispatch_queue_t audioQueue;
  23. @property (nonatomic,strong) AVCaptureVideoDataOutput *videoOutput;
  24. @property (nonatomic,strong) AVCaptureAudioDataOutput *audioOutput;
  25. @property (nonatomic,strong) AVCaptureConnection *videoConnection;
  26. @property (nonatomic,strong) AVCaptureConnection *audioConnection;
  27. @end
  28. #define mainSreenSize [UIScreen mainScreen].bounds.size
  29. #define scanRectWidth 240
  30. #define scanRectHeight 360
  31. #define scanRect CGRectMake(mainSreenSize.width / 2 - scanRectWidth / 2, \
  32. mainSreenSize.height / 2 - scanRectHeight / 2, scanRectWidth, scanRectHeight) \
  33. @implementation BEVideoCapture
  34. #pragma mark - Lifetime
  35. - (instancetype)init {
  36. self = [super init];
  37. if (self) {
  38. self.isPaused = YES;
  39. self.isFlipped = YES;
  40. self.videoOrientation = AVCaptureVideoOrientationPortrait;
  41. [self _setupCaptureSession];
  42. self.observerArray = [NSMutableArray array];
  43. _isFirstApply = true;
  44. }
  45. return self;
  46. }
  47. - (void)dealloc {
  48. if (!_session) {
  49. return;
  50. }
  51. _isPaused = YES;
  52. [_session beginConfiguration];
  53. [_session removeOutput:_dataOutput];
  54. [_session removeInput:_deviceInput];
  55. [_session commitConfiguration];
  56. if ([_session isRunning]) {
  57. [_session stopRunning];
  58. }
  59. _session = nil;
  60. for (id observer in self.observerArray) {
  61. [[NSNotificationCenter defaultCenter] removeObserver:observer];
  62. }
  63. }
  64. #pragma mark - Public
  65. - (void)startRunning {
  66. if (!(_dataOutput || _metaDataOutput)) {
  67. return;
  68. }
  69. if (_session && ![_session isRunning]) {
  70. [_session startRunning];
  71. _isPaused = NO;
  72. }
  73. }
  74. - (void)stopRunning {
  75. if (_session && [_session isRunning]) {
  76. [_session stopRunning];
  77. _isPaused = YES;
  78. }
  79. }
  80. - (void)pause {
  81. _isPaused = true;
  82. }
  83. - (void)resume {
  84. _isPaused = false;
  85. }
  86. - (void)switchCamera {
  87. if (_session == nil) {
  88. return;
  89. }
  90. AVCaptureDevicePosition targetPosition = _devicePosition == AVCaptureDevicePositionFront ? AVCaptureDevicePositionBack: AVCaptureDevicePositionFront;
  91. [self switchCamera:targetPosition];
  92. }
  93. - (void)switchCamera:(AVCaptureDevicePosition)targetPosition {
  94. if (_devicePosition == targetPosition) {
  95. return;
  96. }
  97. AVCaptureDevice *targetDevice = [self _cameraDeviceWithPosition:targetPosition];
  98. if (targetDevice == nil) {
  99. return;
  100. }
  101. NSError *error = nil;
  102. AVCaptureDeviceInput *deviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:targetDevice error:&error];
  103. if(!deviceInput || error) {
  104. [self _throwError:VideoCaptureErrorFailedCreateInput];
  105. //NSLog(@"Error creating capture device input: %@", error.localizedDescription);
  106. return;
  107. }
  108. [self pause];
  109. [_session beginConfiguration];
  110. [_session removeInput:_deviceInput];
  111. if ([_session canAddInput:deviceInput]) {
  112. [_session addInput:deviceInput];
  113. _deviceInput = deviceInput;
  114. _device = targetDevice;
  115. _devicePosition = targetPosition;
  116. [self setOrientation:_videoOrientation];
  117. [self setFlip:targetPosition == AVCaptureDevicePositionFront ? YES : NO];
  118. }
  119. [_session commitConfiguration];
  120. [self resume];
  121. }
  122. - (void)setFlip:(BOOL)isFlip {
  123. if (_session == nil || _dataOutput == nil) {
  124. return;
  125. }
  126. AVCaptureConnection *videoConnection = [_dataOutput connectionWithMediaType:AVMediaTypeVideo];
  127. if (videoConnection) {
  128. if ([videoConnection isVideoMirroringSupported]) {
  129. [videoConnection setVideoMirrored:isFlip];
  130. _isFlipped = isFlip;
  131. }
  132. }
  133. }
  134. - (void)setOrientation:(AVCaptureVideoOrientation)orientation {
  135. if (_session == nil || _dataOutput == nil) {
  136. return;
  137. }
  138. AVCaptureConnection *videoConnection = [_dataOutput connectionWithMediaType:AVMediaTypeVideo];
  139. if (videoConnection) {
  140. if ([videoConnection isVideoOrientationSupported]) {
  141. [videoConnection setVideoOrientation:orientation];
  142. _videoOrientation = orientation;
  143. }
  144. }
  145. }
  146. - (CGFloat)maxBias {
  147. return 1.58;
  148. }
  149. - (CGFloat)minBias {
  150. return -1.38;
  151. }
  152. - (CGFloat)ratio {
  153. return [self maxBias] - [self minBias];
  154. }
  155. - (void)setExposure:(float)exposure {
  156. if (_device == nil) return ;
  157. NSError *error;
  158. //syn exposureTargetBias logic
  159. CGFloat bias = [self maxBias] - exposure * [self ratio];
  160. bias = MIN(MAX(bias, [self minBias]), [self maxBias]);
  161. [_device lockForConfiguration:&error];
  162. [_device setExposureTargetBias:bias completionHandler:nil];
  163. if ([_device isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]){
  164. [_device setExposureMode:AVCaptureExposureModeContinuousAutoExposure];
  165. }
  166. [_device unlockForConfiguration];
  167. [_session commitConfiguration];
  168. }
  169. - (void) setExposurePointOfInterest:(CGPoint) point{
  170. if (_device == nil) return ;
  171. [_device lockForConfiguration:nil];
  172. if ([_device isExposurePointOfInterestSupported]) {
  173. [_device setExposurePointOfInterest:point];
  174. }
  175. if ([_device isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]){
  176. [_device setExposureMode:AVCaptureExposureModeContinuousAutoExposure];
  177. }
  178. [_device unlockForConfiguration];
  179. }
  180. - (void) setFocusPointOfInterest:(CGPoint) point{
  181. if (_device == nil) return ;
  182. [_device lockForConfiguration:nil];
  183. if ([_device isFocusPointOfInterestSupported])
  184. [_device setFocusPointOfInterest:point];
  185. if ([_device isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]){
  186. [_device setFocusMode:AVCaptureFocusModeContinuousAutoFocus];
  187. }
  188. [_device unlockForConfiguration];
  189. }
  190. #pragma mark - Private
  191. - (void)_requestCameraAuthorization:(void (^)(BOOL granted))handler {
  192. AVAuthorizationStatus authStatus = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
  193. if (authStatus == AVAuthorizationStatusNotDetermined) {
  194. [AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo completionHandler:^(BOOL granted) {
  195. handler(granted);
  196. }];
  197. } else if (authStatus == AVAuthorizationStatusAuthorized) {
  198. handler(true);
  199. } else {
  200. handler(false);
  201. }
  202. }
  203. // request for authorization first
  204. - (void)_setupCaptureSession {
  205. [self _requestCameraAuthorization:^(BOOL granted) {
  206. if (granted) {
  207. [self __setupCaptureSession];
  208. } else {
  209. [self _throwError:VideoCaptureErrorAuthNotGranted];
  210. }
  211. }];
  212. }
  213. - (void)__setupCaptureSession {
  214. //dy todo
  215. // return;
  216. _session = [[AVCaptureSession alloc] init];
  217. [_session beginConfiguration];
  218. if ([_session canSetSessionPreset:AVCaptureSessionPreset1280x720]) {
  219. [_session setSessionPreset:AVCaptureSessionPreset1280x720];
  220. _sessionPreset = AVCaptureSessionPreset1280x720;
  221. } else {
  222. [_session setSessionPreset:AVCaptureSessionPresetHigh];
  223. _sessionPreset = AVCaptureSessionPresetHigh;
  224. }
  225. [_session commitConfiguration];
  226. _device = [self _cameraDeviceWithPosition:AVCaptureDevicePositionFront];
  227. [self _setCameraParaments];
  228. [self setExposure:0.5];
  229. _devicePosition = AVCaptureDevicePositionFront;
  230. _bufferQueue = dispatch_queue_create("HTSCameraBufferQueue", NULL);
  231. // Input
  232. NSError *error = nil;
  233. _deviceInput = [AVCaptureDeviceInput deviceInputWithDevice:_device error:&error];
  234. if (!_deviceInput) {
  235. [_delegate videoCapture:self didFailedToStartWithError:VideoCaptureErrorFailedCreateInput];
  236. return;
  237. }
  238. // Output
  239. int iCVPixelFormatType = _isOutputWithYUV ? kCVPixelFormatType_420YpCbCr8BiPlanarFullRange : kCVPixelFormatType_32BGRA;
  240. _dataOutput = [[AVCaptureVideoDataOutput alloc] init];
  241. [_dataOutput setAlwaysDiscardsLateVideoFrames:YES];
  242. [_dataOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:iCVPixelFormatType] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
  243. [_dataOutput setSampleBufferDelegate:self queue:_bufferQueue];
  244. [_session beginConfiguration];
  245. if ([_session canAddOutput:_dataOutput]) {
  246. [_session addOutput:_dataOutput];
  247. } else {
  248. [self _throwError:VideoCaptureErrorFailedAddDataOutput];
  249. NSLog( @"Could not add video data output to the session" );
  250. }
  251. if ([_session canAddInput:_deviceInput]) {
  252. [_session addInput:_deviceInput];
  253. }else{
  254. [self _throwError:VideoCaptureErrorFailedAddDeviceInput];
  255. NSLog( @"Could not add device input to the session" );
  256. }
  257. [_session commitConfiguration];
  258. [self setFlip:_isFlipped];
  259. [self setOrientation:_videoOrientation];
  260. // _videoConnection = [_dataOutput connectionWithMediaType:AVMediaTypeVideo];
  261. // AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
  262. // AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:&error];
  263. // if (error) {
  264. // //NSLog(@"Error getting video input device: %@", error.description);
  265. // }
  266. // if ([_session canAddInput:audioInput]) {
  267. // [_session addInput:audioInput];
  268. // }
  269. // _audioQueue = dispatch_queue_create("Audio Capture Queue", DISPATCH_QUEUE_SERIAL);
  270. // _audioOutput = [[AVCaptureAudioDataOutput alloc] init];
  271. // [_audioOutput setSampleBufferDelegate:self queue:_audioQueue];
  272. // if ([_session canAddOutput:_audioOutput]) {
  273. // [_session addOutput:_audioOutput];
  274. // }
  275. // // 保存Connection,用于在SampleBufferDelegate中判断数据来源(是Video/Audio?)
  276. // _videoConnection = [_dataOutput connectionWithMediaType:AVMediaTypeVideo];
  277. // _audioConnection = [_audioOutput connectionWithMediaType:AVMediaTypeAudio];
  278. [self registerNotification];
  279. [self startRunning];
  280. }
  281. - (void)_setCameraParaments {
  282. [_device lockForConfiguration:nil];
  283. //设置自动对焦
  284. if ([_device isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]){
  285. [_device setFocusMode:AVCaptureFocusModeContinuousAutoFocus];
  286. }
  287. //设置自动曝光
  288. if ([_device isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]){
  289. [_device setExposureMode:AVCaptureExposureModeContinuousAutoExposure];
  290. }
  291. //设置曝光补偿的值
  292. // [_device setExposureTargetBias:0.98 completionHandler:nil];
  293. [_device unlockForConfiguration];
  294. }
  295. - (void)registerNotification
  296. {
  297. __weak typeof(self) weakSelf = self;
  298. [self.observerArray addObject:[[NSNotificationCenter defaultCenter] addObserverForName:UIApplicationDidBecomeActiveNotification object:nil queue:[NSOperationQueue mainQueue] usingBlock:^(NSNotification * _Nonnull note) {
  299. __strong typeof(weakSelf) strongSelf = weakSelf;
  300. [strongSelf startRunning];
  301. }]];
  302. [self.observerArray addObject:[[NSNotificationCenter defaultCenter] addObserverForName:UIApplicationWillResignActiveNotification object:nil queue:[NSOperationQueue mainQueue] usingBlock:^(NSNotification * _Nonnull note) {
  303. __strong typeof(weakSelf) strongSelf = weakSelf;
  304. [strongSelf stopRunning];
  305. }]];
  306. }
  307. - (void)_throwError:(VideoCaptureError)error {
  308. if (_delegate && [_delegate respondsToSelector:@selector(videoCapture:didFailedToStartWithError:)]) {
  309. [_delegate videoCapture:self didFailedToStartWithError:error];
  310. }
  311. }
  312. - (AVCaptureDevice *)_cameraDeviceWithPosition:(AVCaptureDevicePosition)position {
  313. AVCaptureDevice *deviceRet = nil;
  314. if (position != AVCaptureDevicePositionUnspecified) {
  315. NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
  316. for (AVCaptureDevice *device in devices) {
  317. if ([device position] == position) {
  318. deviceRet = device;
  319. }
  320. }
  321. }
  322. return deviceRet;
  323. }
  324. #pragma mark - Util
  325. - (CGSize)videoSize {
  326. if (_dataOutput.videoSettings) {
  327. CGFloat width = [[_dataOutput.videoSettings objectForKey:@"Width"] floatValue];
  328. CGFloat height = [[_dataOutput.videoSettings objectForKey:@"Height"] floatValue];
  329. return CGSizeMake(width, height);
  330. }
  331. return CGSizeZero;
  332. }
  333. - (CGRect)getZoomedRectWithRect:(CGRect)rect scaleToFit:(BOOL)scaleToFit {
  334. CGRect rectRet = rect;
  335. if (_dataOutput.videoSettings) {
  336. CGFloat width = [[_dataOutput.videoSettings objectForKey:@"Width"] floatValue];
  337. CGFloat height = [[_dataOutput.videoSettings objectForKey:@"Height"] floatValue];
  338. CGFloat scaleX = width / CGRectGetWidth(rect);
  339. CGFloat scaleY = height / CGRectGetHeight(rect);
  340. CGFloat scale = scaleToFit ? fmaxf(scaleX, scaleY) : fminf(scaleX, scaleY);
  341. width = round(width / scale);
  342. height = round(height / scale);
  343. // CGFloat x = rect.origin.x - (width - rect.size.width) / 2.0f;
  344. // CGFloat y = rect.origin.y - (height - rect.size.height) / 2.0f;
  345. rectRet = CGRectMake(0, 0, width, height);
  346. }
  347. return rectRet;
  348. }
  349. - (CGRect)getZoomedRectWithRect1:(CGRect)rect scaleToFit:(BOOL)bScaleToFit
  350. {
  351. CGRect rectRet = rect;
  352. CGFloat fWidth = 720;
  353. CGFloat fHeight = 1280;
  354. // if ([[YQUIToolsManager getDeciceInfoTool] get480Device]) {
  355. // fWidth = 480;
  356. // fHeight = 640;
  357. // }
  358. float fScaleX = fWidth / CGRectGetWidth(rect);
  359. float fScaleY = fHeight / CGRectGetHeight(rect);
  360. float fScale = bScaleToFit ? fmaxf(fScaleX, fScaleY) : fminf(fScaleX, fScaleY);
  361. fWidth /= fScale;
  362. fHeight /= fScale;
  363. CGFloat fX = rect.origin.x - (fWidth - rect.size.width) / 2.0f;
  364. CGFloat fY = rect.origin.y - (fHeight - rect.size.height) / 2.0f;
  365. rectRet = CGRectMake(fX, fY, fWidth, fHeight);
  366. return rectRet;
  367. }
  368. #pragma mark - AVCaptureAudioDataOutputSampleBufferDelegate
  369. - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
  370. if (!_isPaused) {
  371. if (_delegate && [_delegate respondsToSelector:@selector(videoCapture:didOutputSampleBuffer:andType:)]) {
  372. // if (connection == _videoConnection) { // Video
  373. // [_delegate videoCapture:self didOutputSampleBuffer:sampleBuffer andType:1];
  374. // }else if (connection == _audioConnection) { // Audio
  375. // [_delegate videoCapture:self didOutputSampleBuffer:sampleBuffer andType:2];
  376. // }
  377. [_delegate videoCapture:self didOutputSampleBuffer:sampleBuffer andType:1];
  378. }
  379. }
  380. }
  381. #pragma mark - getter && setter
  382. - (void)setSessionPreset:(NSString *)sessionPreset {
  383. if ([sessionPreset isEqualToString:_sessionPreset]) {
  384. return;
  385. }
  386. if (!_session) {
  387. return;
  388. }
  389. [self pause];
  390. [_session beginConfiguration];
  391. if ([_session canSetSessionPreset:sessionPreset]) {
  392. [_session setSessionPreset:sessionPreset];
  393. _sessionPreset = sessionPreset;
  394. }
  395. [self.session commitConfiguration];
  396. [self resume];
  397. }
  398. - (void)setIsOutputWithYUV:(BOOL)isOutputWithYUV {
  399. if (_isOutputWithYUV == isOutputWithYUV) {
  400. return;
  401. }
  402. _isOutputWithYUV = isOutputWithYUV;
  403. int iCVPixelFormatType = _isOutputWithYUV ? kCVPixelFormatType_420YpCbCr8BiPlanarFullRange : kCVPixelFormatType_32BGRA;
  404. AVCaptureVideoDataOutput *dataOutput = [[AVCaptureVideoDataOutput alloc] init];
  405. [dataOutput setAlwaysDiscardsLateVideoFrames:YES];
  406. [dataOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:iCVPixelFormatType] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
  407. [dataOutput setSampleBufferDelegate:self queue:_bufferQueue];
  408. [self pause];
  409. [_session beginConfiguration];
  410. [_session removeOutput:_dataOutput];
  411. if ([_session canAddOutput:dataOutput]) {
  412. [_session addOutput:dataOutput];
  413. _dataOutput = dataOutput;
  414. }else{
  415. [self _throwError:VideoCaptureErrorFailedAddDataOutput];
  416. //NSLog(@"session add data output failed when change output buffer pixel format.");
  417. }
  418. [_session commitConfiguration];
  419. [self resume];
  420. /// make the buffer portrait
  421. [self setOrientation:_videoOrientation];
  422. [self setFlip:_isFlipped];
  423. }
  424. - (void)coverToMetadataOutputRectOfInterestForRect:(CGRect)cropRect {
  425. CGSize size = [UIScreen mainScreen].bounds.size;
  426. CGFloat p1 = size.height/size.width;
  427. CGFloat p2 = 0.0;
  428. if ([_session.sessionPreset isEqualToString:AVCaptureSessionPreset1280x720]) {
  429. p2 = 1280./720.;
  430. }
  431. else if ([_session.sessionPreset isEqualToString:AVCaptureSessionPreset640x480]) {
  432. p2 = 640./480.;
  433. }
  434. if (p1 < p2) {
  435. CGFloat fixHeight = size.width * p2;
  436. CGFloat fixPadding = (fixHeight - size.height)/2;
  437. _metaDataOutput.rectOfInterest = CGRectMake((cropRect.origin.y + fixPadding)/fixHeight,
  438. (size.width-(cropRect.size.width+cropRect.origin.x))/size.width,
  439. cropRect.size.height/fixHeight,
  440. cropRect.size.width/size.width);
  441. } else {
  442. CGFloat fixWidth = size.height * (1/p2);
  443. CGFloat fixPadding = (fixWidth - size.width)/2;
  444. _metaDataOutput.rectOfInterest = CGRectMake(cropRect.origin.y/size.height,
  445. (size.width-(cropRect.size.width+cropRect.origin.x)+fixPadding)/fixWidth,
  446. cropRect.size.height/size.height,
  447. cropRect.size.width/fixWidth);
  448. }
  449. }
  450. #pragma mark - gettre
  451. - (AVCaptureVideoPreviewLayer* )previewLayer{
  452. if (!_previewLayer){
  453. _previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.session];
  454. _previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
  455. _previewLayer.frame = [UIScreen mainScreen].bounds;
  456. _previewLayer.backgroundColor = [UIColor grayColor].CGColor;
  457. }
  458. return _previewLayer;
  459. }
  460. - (CAShapeLayer *)maskLayer{
  461. if (!_maskLayer){
  462. //CGRect mainBounds = [UIScreen mainScreen].bounds;
  463. CGSize mainScreenSize = [UIScreen mainScreen].bounds.size;
  464. CGFloat mainScreenWidth = mainScreenSize.width;
  465. CGFloat mainScreenHeight = mainScreenSize.height;
  466. CGFloat left = mainScreenWidth / 2 - scanRectWidth / 2;
  467. CGFloat right = mainScreenWidth - left;
  468. CGFloat top = mainScreenHeight / 2 - scanRectWidth / 2;
  469. CGFloat bottom = mainScreenHeight - top;
  470. //添加了上下左右四个框来实现扫码的扫描框
  471. UIBezierPath *leftPath = [UIBezierPath bezierPathWithRect:CGRectMake(0., 0., left, mainScreenHeight)];
  472. UIBezierPath *rightPath = [UIBezierPath bezierPathWithRect:CGRectMake(right, 0., left, mainScreenHeight)];
  473. UIBezierPath *topPath = [UIBezierPath bezierPathWithRect:CGRectMake(left, 0., scanRectWidth, top)];
  474. UIBezierPath *bottomPath = [UIBezierPath bezierPathWithRect:CGRectMake(left, bottom, scanRectWidth, bottom)];
  475. [leftPath appendPath:rightPath];
  476. [leftPath appendPath:topPath];
  477. [leftPath appendPath:bottomPath];
  478. _maskLayer = [CAShapeLayer layer];
  479. _maskLayer.backgroundColor = (__bridge CGColorRef _Nullable)([UIColor blackColor]);
  480. _maskLayer.opacity = 0.5;
  481. _maskLayer.path = leftPath.CGPath;
  482. }
  483. return _maskLayer;
  484. }
  485. @end