QRScannerViewController.m 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337
  1. // _____ _
  2. // |_ _| |_ _ _ ___ ___ _ __ __ _
  3. // | | | ' \| '_/ -_) -_) ' \/ _` |_
  4. // |_| |_||_|_| \___\___|_|_|_\__,_(_)
  5. //
  6. // Threema iOS Client
  7. // Copyright (c) 2014-2020 Threema GmbH
  8. //
  9. // This program is free software: you can redistribute it and/or modify
  10. // it under the terms of the GNU Affero General Public License, version 3,
  11. // as published by the Free Software Foundation.
  12. //
  13. // This program is distributed in the hope that it will be useful,
  14. // but WITHOUT ANY WARRANTY; without even the implied warranty of
  15. // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  16. // GNU Affero General Public License for more details.
  17. //
  18. // You should have received a copy of the GNU Affero General Public License
  19. // along with this program. If not, see <https://www.gnu.org/licenses/>.
  20. #import "QRScannerViewController.h"
  21. #import <AVFoundation/AVFoundation.h>
  22. #import "MBProgressHUD.h"
  23. #import "Threema-Swift.h"
  24. #ifdef DEBUG
  25. static const DDLogLevel ddLogLevel = DDLogLevelVerbose;
  26. #else
  27. static const DDLogLevel ddLogLevel = DDLogLevelWarning;
  28. #endif
  29. #define MEDIA_TYPE AVMediaTypeVideo
  30. @interface Barcode : NSObject
  31. @property (nonatomic, strong) AVMetadataMachineReadableCodeObject *metadataObject;
  32. @property (nonatomic, strong) UIBezierPath *cornersPath;
  33. @property (nonatomic, strong) UIBezierPath *boundingBoxPath;
  34. @end
  35. @implementation Barcode
  36. @end
  37. @implementation QRScannerViewController {
  38. AVCaptureSession *_captureSession;
  39. AVCaptureDevice *_videoDevice;
  40. AVCaptureDeviceInput *_videoInput;
  41. AVCaptureVideoPreviewLayer *_previewLayer;
  42. AVCaptureMetadataOutput *_metadataOutput;
  43. BOOL _running;
  44. NSMutableDictionary *_barcodes;
  45. CGFloat _initialPinchZoom;
  46. }
  47. #pragma mark -
  48. - (instancetype)init
  49. {
  50. self = [super init];
  51. if (self) {
  52. self.navigationItem.rightBarButtonItem = [[UIBarButtonItem alloc] initWithBarButtonSystemItem:UIBarButtonSystemItemCancel target:self action:@selector(cancelScan)];
  53. }
  54. return self;
  55. }
  56. - (void)loadView {
  57. [super loadView];
  58. self.view.backgroundColor = [UIColor blackColor];
  59. self.previewView = [[UIView alloc] initWithFrame:self.view.bounds];
  60. [MBProgressHUD showHUDAddedTo:self.view animated:YES];
  61. [self.view addSubview:self.previewView];
  62. }
  63. - (void)viewDidLoad {
  64. [super viewDidLoad];
  65. if ([self hasCameraAccess]) {
  66. [self setupCaptureSession];
  67. }
  68. _barcodes = [NSMutableDictionary new];
  69. }
  70. - (void)dealloc {
  71. [[NSNotificationCenter defaultCenter] removeObserver:self];
  72. }
  73. - (void)viewDidAppear:(BOOL)animated {
  74. [super viewDidAppear:animated];
  75. [self startRunning];
  76. [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(applicationWillEnterForeground:) name:UIApplicationWillEnterForegroundNotification object:nil];
  77. [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(applicationDidEnterBackground:) name:UIApplicationDidEnterBackgroundNotification object:nil];
  78. }
  79. - (void)viewWillDisappear:(BOOL)animated {
  80. [super viewWillDisappear:animated];
  81. [self stopRunning];
  82. [[NSNotificationCenter defaultCenter] removeObserver:self];
  83. if (_barcodes.count == 0) {
  84. [self.delegate qrScannerViewControllerDidCancel:self];
  85. }
  86. }
  87. #pragma mark - Notifications
  88. - (void)applicationWillEnterForeground:(NSNotification*)note {
  89. [self startRunning];
  90. }
  91. - (void)applicationDidEnterBackground:(NSNotification*)note {
  92. [self stopRunning];
  93. }
  94. #pragma mark - Actions
  95. - (void)cancelScan {
  96. [self.delegate qrScannerViewControllerDidCancel:self];
  97. }
  98. - (void)pinchDetected:(UIPinchGestureRecognizer*)recogniser {
  99. if (!_videoDevice) return;
  100. if (recogniser.state == UIGestureRecognizerStateBegan) {
  101. _initialPinchZoom = _videoDevice.videoZoomFactor;
  102. }
  103. NSError *error = nil;
  104. [_videoDevice lockForConfiguration:&error];
  105. if (!error) {
  106. CGFloat zoomFactor;
  107. CGFloat scale = recogniser.scale;
  108. if (scale < 1.0f) {
  109. zoomFactor = _initialPinchZoom - pow(_videoDevice.activeFormat.videoMaxZoomFactor, 1.0f - recogniser.scale);
  110. } else {
  111. zoomFactor = _initialPinchZoom + pow(_videoDevice.activeFormat.videoMaxZoomFactor, (recogniser.scale - 1.0f) / 2.0f);
  112. }
  113. zoomFactor = MIN(10.0f, zoomFactor);
  114. zoomFactor = MAX(1.0f, zoomFactor);
  115. _videoDevice.videoZoomFactor = zoomFactor;
  116. [_videoDevice unlockForConfiguration];
  117. }
  118. }
  119. #pragma mark - Video stuff
  120. - (void)startRunning {
  121. if (_captureSession) {
  122. if (_running) return;
  123. [_captureSession startRunning];
  124. _metadataOutput.metadataObjectTypes = _metadataOutput.availableMetadataObjectTypes;
  125. if ([[VoIPCallStateManager shared] currentCallState] == CallStateIdle) {
  126. [[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryAmbient withOptions:0 error:nil];
  127. [[AVAudioSession sharedInstance] setActive:YES error:nil];
  128. }
  129. _running = YES;
  130. }
  131. }
  132. - (void)stopRunning {
  133. if (_captureSession) {
  134. if (!_running) return;
  135. [_captureSession stopRunning];
  136. if ([[VoIPCallStateManager shared] currentCallState] == CallStateIdle) {
  137. [[AVAudioSession sharedInstance] setActive:NO withOptions:AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation error:nil];
  138. }
  139. _running = NO;
  140. }
  141. }
  142. - (BOOL)hasCameraAccess {
  143. AVAuthorizationStatus authStatus = [AVCaptureDevice authorizationStatusForMediaType:MEDIA_TYPE];
  144. if (authStatus == AVAuthorizationStatusAuthorized) {
  145. return YES;
  146. } else if(authStatus == AVAuthorizationStatusDenied || authStatus == AVAuthorizationStatusRestricted){
  147. [self showCameraAccessAlert];
  148. } else if(authStatus == AVAuthorizationStatusNotDetermined){
  149. [AVCaptureDevice requestAccessForMediaType:MEDIA_TYPE completionHandler:^(BOOL granted) {
  150. if(granted){
  151. dispatch_async(dispatch_get_main_queue(), ^{
  152. [self setupCaptureSession];
  153. [self startRunning];
  154. });
  155. } else {
  156. DDLogError(@"Camera access not granted");
  157. }
  158. }];
  159. }
  160. return NO;
  161. }
  162. - (void)showCameraAccessAlert {
  163. [UIAlertTemplate showAlertWithOwner:self title:NSLocalizedString(@"camera_disabled_title", nil) message:NSLocalizedString(@"camera_disabled_message", nil) actionOk:nil];
  164. }
  165. - (void)setupCaptureSession {
  166. if (_captureSession) return;
  167. _videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:MEDIA_TYPE];
  168. if (!_videoDevice) {
  169. DDLogError(@"No video camera on this device!");
  170. return;
  171. }
  172. _captureSession = [[AVCaptureSession alloc] init];
  173. _videoInput = [[AVCaptureDeviceInput alloc] initWithDevice:_videoDevice error:nil];
  174. if ([_captureSession canAddInput:_videoInput]) {
  175. [_captureSession addInput:_videoInput];
  176. }
  177. _previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:_captureSession];
  178. _previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
  179. _previewLayer.frame = _previewView.bounds;
  180. [_previewView.layer addSublayer:_previewLayer];
  181. [_previewView addGestureRecognizer:[[UIPinchGestureRecognizer alloc] initWithTarget:self action:@selector(pinchDetected:)]];
  182. _metadataOutput = [[AVCaptureMetadataOutput alloc] init];
  183. dispatch_queue_t metadataQueue = dispatch_queue_create("ch.threema.app.qrmetadata", 0);
  184. [_metadataOutput setMetadataObjectsDelegate:self queue:metadataQueue];
  185. if ([_captureSession canAddOutput:_metadataOutput]) {
  186. [_captureSession addOutput:_metadataOutput];
  187. }
  188. }
  189. #pragma mark -
  190. - (Barcode*)processMetadataObject:(AVMetadataMachineReadableCodeObject*)code {
  191. if (code.stringValue == nil)
  192. return nil; /* e.g. when scanning binary data */
  193. Barcode *barcode = _barcodes[code.stringValue];
  194. if (!barcode) {
  195. barcode = [Barcode new];
  196. _barcodes[code.stringValue] = barcode;
  197. }
  198. barcode.metadataObject = code;
  199. // Create the path joining code's corners
  200. CGMutablePathRef cornersPath = CGPathCreateMutable();
  201. CGPoint point;
  202. CGPointMakeWithDictionaryRepresentation((CFDictionaryRef)code.corners[0], &point);
  203. CGPathMoveToPoint(cornersPath, nil, point.x, point.y);
  204. for (int i = 1; i < code.corners.count; i++) {
  205. CGPointMakeWithDictionaryRepresentation((CFDictionaryRef)code.corners[i], &point);
  206. CGPathAddLineToPoint(cornersPath, nil, point.x, point.y);
  207. }
  208. CGPathCloseSubpath(cornersPath);
  209. barcode.cornersPath = [UIBezierPath bezierPathWithCGPath:cornersPath];
  210. CGPathRelease(cornersPath);
  211. barcode.boundingBoxPath = [UIBezierPath bezierPathWithRect:code.bounds];
  212. return barcode;
  213. }
  214. #pragma mark - AVCaptureMetadataOutputObjectsDelegate
  215. - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection {
  216. NSSet *originalBarcodes = [NSSet setWithArray:_barcodes.allValues];
  217. NSMutableSet *foundBarcodes = [NSMutableSet new];
  218. [metadataObjects enumerateObjectsUsingBlock:^(AVMetadataObject *obj, NSUInteger idx, BOOL *stop) {
  219. DDLogVerbose(@"Metadata: %@", obj);
  220. if ([obj isKindOfClass:[AVMetadataMachineReadableCodeObject class]]) {
  221. AVMetadataMachineReadableCodeObject *code = (AVMetadataMachineReadableCodeObject*)[_previewLayer transformedMetadataObjectForMetadataObject:obj];
  222. Barcode *barcode = [self processMetadataObject:code];
  223. if (barcode != nil)
  224. [foundBarcodes addObject:barcode];
  225. }
  226. }];
  227. NSMutableSet *newBarcodes = [foundBarcodes mutableCopy];
  228. [newBarcodes minusSet:originalBarcodes];
  229. NSMutableSet *goneBarcodes = [originalBarcodes mutableCopy];
  230. [goneBarcodes minusSet:foundBarcodes];
  231. [goneBarcodes enumerateObjectsUsingBlock:^(Barcode *barcode, BOOL *stop) {
  232. [_barcodes removeObjectForKey:barcode.metadataObject.stringValue];
  233. }];
  234. dispatch_sync(dispatch_get_main_queue(), ^{
  235. // Remove all old layers
  236. NSArray *allSublayers = [_previewView.layer.sublayers copy];
  237. [allSublayers enumerateObjectsUsingBlock:^(CALayer *layer, NSUInteger idx, BOOL *stop) {
  238. if (layer != _previewLayer) {
  239. [layer removeFromSuperlayer];
  240. }
  241. }];
  242. // Add new layers
  243. [foundBarcodes enumerateObjectsUsingBlock:^(Barcode *barcode, BOOL *stop) {
  244. CAShapeLayer *cornersPathLayer = [CAShapeLayer new];
  245. cornersPathLayer.path = barcode.cornersPath.CGPath;
  246. cornersPathLayer.lineWidth = 2.0f;
  247. cornersPathLayer.strokeColor = [UIColor blueColor].CGColor;
  248. cornersPathLayer.fillColor = [UIColor colorWithRed:0.0f green:0.0f blue:1.0f alpha:0.5f].CGColor;
  249. [_previewView.layer addSublayer:cornersPathLayer];
  250. }];
  251. [newBarcodes enumerateObjectsUsingBlock:^(Barcode *barcode, BOOL *stop) {
  252. // call delegate with slight delay so that user can see our fancy box
  253. [self stopRunning];
  254. dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(0.2 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
  255. [self.delegate qrScannerViewController:self didScanResult:barcode.metadataObject.stringValue];
  256. });
  257. }];
  258. });
  259. }
  260. @end