From db9d4071ce7d96ca43996c05d06eab2b48b1bf0d Mon Sep 17 00:00:00 2001 From: Khaled Alshehri <39571180+i5xx@users.noreply.github.com> Date: Wed, 8 Aug 2018 02:27:52 +0300 Subject: [PATCH] Add files via upload --- ManagedCapturer/SCCaptureDeviceResolver.m | 147 ++ .../SCCaptureFaceDetectionParser.h | 43 + .../SCCaptureFaceDetectionParser.m | 94 + ManagedCapturer/SCCaptureFaceDetector.h | 31 + .../SCCaptureFaceDetectorTrigger.h | 22 + .../SCCaptureFaceDetectorTrigger.m | 97 + .../SCCaptureMetadataObjectParser.h | 23 + .../SCCaptureMetadataObjectParser.m | 38 + .../SCCaptureMetadataOutputDetector.h | 19 + .../SCCaptureMetadataOutputDetector.m | 175 ++ ManagedCapturer/SCCapturer.h | 225 ++ .../SCCapturerBufferedVideoWriter.h | 44 + .../SCCapturerBufferedVideoWriter.m | 430 ++++ ManagedCapturer/SCCapturerDefines.h | 20 + ManagedCapturer/SCCapturerToken.h | 18 + ManagedCapturer/SCCapturerToken.m | 30 + ManagedCapturer/SCCapturerTokenProvider.h | 20 + ManagedCapturer/SCCapturerTokenProvider.m | 42 + ManagedCapturer/SCExposureState.h | 18 + ManagedCapturer/SCExposureState.m | 47 + ManagedCapturer/SCFileAudioCaptureSession.h | 19 + ManagedCapturer/SCFileAudioCaptureSession.m | 243 ++ ManagedCapturer/SCManagedAudioStreamer.h | 20 + ManagedCapturer/SCManagedAudioStreamer.m | 115 + ...SCManagedCaptureDevice+SCManagedCapturer.h | 71 + ...reDevice+SCManagedDeviceCapacityAnalyzer.h | 17 + ManagedCapturer/SCManagedCaptureDevice.h | 60 + ManagedCapturer/SCManagedCaptureDevice.m | 821 +++++++ ...CManagedCaptureDeviceAutoExposureHandler.h | 17 + ...CManagedCaptureDeviceAutoExposureHandler.m | 63 + .../SCManagedCaptureDeviceAutoFocusHandler.h | 18 + .../SCManagedCaptureDeviceAutoFocusHandler.m | 131 + ...SCManagedCaptureDeviceDefaultZoomHandler.h | 25 + ...SCManagedCaptureDeviceDefaultZoomHandler.m | 93 + ...dCaptureDeviceDefaultZoomHandler_Private.h | 17 + .../SCManagedCaptureDeviceExposureHandler.h | 22 + ...reDeviceFaceDetectionAutoExposureHandler.h | 28 + ...reDeviceFaceDetectionAutoExposureHandler.m | 121 + ...ptureDeviceFaceDetectionAutoFocusHandler.h | 28 + ...ptureDeviceFaceDetectionAutoFocusHandler.m | 153 ++ .../SCManagedCaptureDeviceFocusHandler.h | 28 + .../SCManagedCaptureDeviceHandler.h | 23 + .../SCManagedCaptureDeviceHandler.m | 77 + ...tureDeviceLinearInterpolationZoomHandler.h | 12 + ...tureDeviceLinearInterpolationZoomHandler.m | 190 ++ ...CaptureDeviceLockOnRecordExposureHandler.h | 20 + ...CaptureDeviceLockOnRecordExposureHandler.m | 90 + ...gedCaptureDeviceSavitzkyGolayZoomHandler.h | 13 + ...gedCaptureDeviceSavitzkyGolayZoomHandler.m | 95 + ...SCManagedCaptureDeviceSubjectAreaHandler.h | 23 + ...SCManagedCaptureDeviceSubjectAreaHandler.m | 67 + ...gedCaptureDeviceThresholdExposureHandler.h | 19 + ...gedCaptureDeviceThresholdExposureHandler.m | 133 + ...CaptureFaceDetectionAdjustingPOIResource.h | 61 + ...CaptureFaceDetectionAdjustingPOIResource.m | 232 ++ .../SCManagedCapturePreviewLayerController.h | 80 + .../SCManagedCapturePreviewLayerController.m | 563 +++++ ManagedCapturer/SCManagedCapturePreviewView.h | 25 + ManagedCapturer/SCManagedCapturePreviewView.m | 173 ++ .../SCManagedCapturePreviewViewDebugView.h | 14 + .../SCManagedCapturePreviewViewDebugView.m | 204 ++ ManagedCapturer/SCManagedCaptureSession.h | 67 + ManagedCapturer/SCManagedCaptureSession.m | 74 + ManagedCapturer/SCManagedCapturer.h | 23 + ManagedCapturer/SCManagedCapturer.m | 26 + .../SCManagedCapturerARSessionHandler.h | 26 + .../SCManagedCapturerARSessionHandler.m | 76 + ManagedCapturer/SCManagedCapturerListener.h | 135 + .../SCManagedCapturerListenerAnnouncer.h | 12 + .../SCManagedCapturerListenerAnnouncer.mm | 505 ++++ .../SCManagedCapturerSampleMetadata.h | 26 + .../SCManagedCapturerSampleMetadata.m | 24 + ManagedCapturer/SCManagedCapturerState.h | 93 + ManagedCapturer/SCManagedCapturerState.m | 359 +++ ManagedCapturer/SCManagedCapturerState.value | 20 + .../SCManagedCapturerStateBuilder.h | 46 + .../SCManagedCapturerStateBuilder.m | 158 ++ ManagedCapturer/SCManagedCapturerUtils.h | 36 + ManagedCapturer/SCManagedCapturerUtils.m | 153 ++ ManagedCapturer/SCManagedCapturerV1.h | 57 + ManagedCapturer/SCManagedCapturerV1.m | 2165 +++++++++++++++++ ManagedCapturer/SCManagedCapturerV1_Private.h | 20 + .../SCManagedDeviceCapacityAnalyzer.h | 32 + .../SCManagedDeviceCapacityAnalyzer.m | 294 +++ .../SCManagedDeviceCapacityAnalyzerHandler.h | 20 + .../SCManagedDeviceCapacityAnalyzerHandler.m | 72 + .../SCManagedDeviceCapacityAnalyzerListener.h | 35 + ...dDeviceCapacityAnalyzerListenerAnnouncer.h | 12 + ...DeviceCapacityAnalyzerListenerAnnouncer.mm | 146 ++ .../SCManagedDroppedFramesReporter.h | 25 + .../SCManagedDroppedFramesReporter.m | 86 + ManagedCapturer/SCManagedFrameHealthChecker.h | 57 + ManagedCapturer/SCManagedFrameHealthChecker.m | 709 ++++++ .../SCManagedFrontFlashController.h | 18 + .../SCManagedFrontFlashController.m | 105 + .../SCManagedLegacyStillImageCapturer.h | 13 + .../SCManagedLegacyStillImageCapturer.m | 460 ++++ ManagedCapturer/SCManagedPhotoCapturer.h | 13 + 98 files changed, 12005 insertions(+) create mode 100644 ManagedCapturer/SCCaptureDeviceResolver.m create mode 100644 ManagedCapturer/SCCaptureFaceDetectionParser.h create mode 100644 ManagedCapturer/SCCaptureFaceDetectionParser.m create mode 100644 ManagedCapturer/SCCaptureFaceDetector.h create mode 100644 ManagedCapturer/SCCaptureFaceDetectorTrigger.h create mode 100644 ManagedCapturer/SCCaptureFaceDetectorTrigger.m create mode 100644 ManagedCapturer/SCCaptureMetadataObjectParser.h create mode 100644 ManagedCapturer/SCCaptureMetadataObjectParser.m create mode 100644 ManagedCapturer/SCCaptureMetadataOutputDetector.h create mode 100644 ManagedCapturer/SCCaptureMetadataOutputDetector.m create mode 100644 ManagedCapturer/SCCapturer.h create mode 100644 ManagedCapturer/SCCapturerBufferedVideoWriter.h create mode 100644 ManagedCapturer/SCCapturerBufferedVideoWriter.m create mode 100644 ManagedCapturer/SCCapturerDefines.h create mode 100644 ManagedCapturer/SCCapturerToken.h create mode 100644 ManagedCapturer/SCCapturerToken.m create mode 100644 ManagedCapturer/SCCapturerTokenProvider.h create mode 100644 ManagedCapturer/SCCapturerTokenProvider.m create mode 100644 ManagedCapturer/SCExposureState.h create mode 100644 ManagedCapturer/SCExposureState.m create mode 100644 ManagedCapturer/SCFileAudioCaptureSession.h create mode 100644 ManagedCapturer/SCFileAudioCaptureSession.m create mode 100644 ManagedCapturer/SCManagedAudioStreamer.h create mode 100644 ManagedCapturer/SCManagedAudioStreamer.m create mode 100644 ManagedCapturer/SCManagedCaptureDevice+SCManagedCapturer.h create mode 100644 ManagedCapturer/SCManagedCaptureDevice+SCManagedDeviceCapacityAnalyzer.h create mode 100644 ManagedCapturer/SCManagedCaptureDevice.h create mode 100644 ManagedCapturer/SCManagedCaptureDevice.m create mode 100644 ManagedCapturer/SCManagedCaptureDeviceAutoExposureHandler.h create mode 100644 ManagedCapturer/SCManagedCaptureDeviceAutoExposureHandler.m create mode 100644 ManagedCapturer/SCManagedCaptureDeviceAutoFocusHandler.h create mode 100644 ManagedCapturer/SCManagedCaptureDeviceAutoFocusHandler.m create mode 100644 ManagedCapturer/SCManagedCaptureDeviceDefaultZoomHandler.h create mode 100644 ManagedCapturer/SCManagedCaptureDeviceDefaultZoomHandler.m create mode 100644 ManagedCapturer/SCManagedCaptureDeviceDefaultZoomHandler_Private.h create mode 100644 ManagedCapturer/SCManagedCaptureDeviceExposureHandler.h create mode 100644 ManagedCapturer/SCManagedCaptureDeviceFaceDetectionAutoExposureHandler.h create mode 100644 ManagedCapturer/SCManagedCaptureDeviceFaceDetectionAutoExposureHandler.m create mode 100644 ManagedCapturer/SCManagedCaptureDeviceFaceDetectionAutoFocusHandler.h create mode 100644 ManagedCapturer/SCManagedCaptureDeviceFaceDetectionAutoFocusHandler.m create mode 100644 ManagedCapturer/SCManagedCaptureDeviceFocusHandler.h create mode 100644 ManagedCapturer/SCManagedCaptureDeviceHandler.h create mode 100644 ManagedCapturer/SCManagedCaptureDeviceHandler.m create mode 100644 ManagedCapturer/SCManagedCaptureDeviceLinearInterpolationZoomHandler.h create mode 100644 ManagedCapturer/SCManagedCaptureDeviceLinearInterpolationZoomHandler.m create mode 100644 ManagedCapturer/SCManagedCaptureDeviceLockOnRecordExposureHandler.h create mode 100644 ManagedCapturer/SCManagedCaptureDeviceLockOnRecordExposureHandler.m create mode 100644 ManagedCapturer/SCManagedCaptureDeviceSavitzkyGolayZoomHandler.h create mode 100644 ManagedCapturer/SCManagedCaptureDeviceSavitzkyGolayZoomHandler.m create mode 100644 ManagedCapturer/SCManagedCaptureDeviceSubjectAreaHandler.h create mode 100644 ManagedCapturer/SCManagedCaptureDeviceSubjectAreaHandler.m create mode 100644 ManagedCapturer/SCManagedCaptureDeviceThresholdExposureHandler.h create mode 100644 ManagedCapturer/SCManagedCaptureDeviceThresholdExposureHandler.m create mode 100644 ManagedCapturer/SCManagedCaptureFaceDetectionAdjustingPOIResource.h create mode 100644 ManagedCapturer/SCManagedCaptureFaceDetectionAdjustingPOIResource.m create mode 100644 ManagedCapturer/SCManagedCapturePreviewLayerController.h create mode 100644 ManagedCapturer/SCManagedCapturePreviewLayerController.m create mode 100644 ManagedCapturer/SCManagedCapturePreviewView.h create mode 100644 ManagedCapturer/SCManagedCapturePreviewView.m create mode 100644 ManagedCapturer/SCManagedCapturePreviewViewDebugView.h create mode 100644 ManagedCapturer/SCManagedCapturePreviewViewDebugView.m create mode 100644 ManagedCapturer/SCManagedCaptureSession.h create mode 100644 ManagedCapturer/SCManagedCaptureSession.m create mode 100644 ManagedCapturer/SCManagedCapturer.h create mode 100644 ManagedCapturer/SCManagedCapturer.m create mode 100644 ManagedCapturer/SCManagedCapturerARSessionHandler.h create mode 100644 ManagedCapturer/SCManagedCapturerARSessionHandler.m create mode 100644 ManagedCapturer/SCManagedCapturerListener.h create mode 100644 ManagedCapturer/SCManagedCapturerListenerAnnouncer.h create mode 100644 ManagedCapturer/SCManagedCapturerListenerAnnouncer.mm create mode 100644 ManagedCapturer/SCManagedCapturerSampleMetadata.h create mode 100644 ManagedCapturer/SCManagedCapturerSampleMetadata.m create mode 100644 ManagedCapturer/SCManagedCapturerState.h create mode 100644 ManagedCapturer/SCManagedCapturerState.m create mode 100644 ManagedCapturer/SCManagedCapturerState.value create mode 100644 ManagedCapturer/SCManagedCapturerStateBuilder.h create mode 100644 ManagedCapturer/SCManagedCapturerStateBuilder.m create mode 100644 ManagedCapturer/SCManagedCapturerUtils.h create mode 100644 ManagedCapturer/SCManagedCapturerUtils.m create mode 100644 ManagedCapturer/SCManagedCapturerV1.h create mode 100644 ManagedCapturer/SCManagedCapturerV1.m create mode 100644 ManagedCapturer/SCManagedCapturerV1_Private.h create mode 100644 ManagedCapturer/SCManagedDeviceCapacityAnalyzer.h create mode 100644 ManagedCapturer/SCManagedDeviceCapacityAnalyzer.m create mode 100644 ManagedCapturer/SCManagedDeviceCapacityAnalyzerHandler.h create mode 100644 ManagedCapturer/SCManagedDeviceCapacityAnalyzerHandler.m create mode 100644 ManagedCapturer/SCManagedDeviceCapacityAnalyzerListener.h create mode 100644 ManagedCapturer/SCManagedDeviceCapacityAnalyzerListenerAnnouncer.h create mode 100644 ManagedCapturer/SCManagedDeviceCapacityAnalyzerListenerAnnouncer.mm create mode 100644 ManagedCapturer/SCManagedDroppedFramesReporter.h create mode 100644 ManagedCapturer/SCManagedDroppedFramesReporter.m create mode 100644 ManagedCapturer/SCManagedFrameHealthChecker.h create mode 100644 ManagedCapturer/SCManagedFrameHealthChecker.m create mode 100644 ManagedCapturer/SCManagedFrontFlashController.h create mode 100644 ManagedCapturer/SCManagedFrontFlashController.m create mode 100644 ManagedCapturer/SCManagedLegacyStillImageCapturer.h create mode 100644 ManagedCapturer/SCManagedLegacyStillImageCapturer.m create mode 100644 ManagedCapturer/SCManagedPhotoCapturer.h diff --git a/ManagedCapturer/SCCaptureDeviceResolver.m b/ManagedCapturer/SCCaptureDeviceResolver.m new file mode 100644 index 0000000..9bcdc67 --- /dev/null +++ b/ManagedCapturer/SCCaptureDeviceResolver.m @@ -0,0 +1,147 @@ +// +// SCCaptureDeviceResolver.m +// Snapchat +// +// Created by Lin Jia on 11/8/17. +// +// + +#import "SCCaptureDeviceResolver.h" + +#import "SCCameraTweaks.h" + +#import +#import + +@interface SCCaptureDeviceResolver () { + AVCaptureDeviceDiscoverySession *_discoverySession; +} + +@end + +@implementation SCCaptureDeviceResolver + ++ (instancetype)sharedInstance +{ + static SCCaptureDeviceResolver *resolver; + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^{ + resolver = [[SCCaptureDeviceResolver alloc] init]; + }); + return resolver; +} + +- (instancetype)init +{ + self = [super init]; + if (self) { + NSMutableArray *deviceTypes = [[NSMutableArray alloc] init]; + [deviceTypes addObject:AVCaptureDeviceTypeBuiltInWideAngleCamera]; + if (SC_AT_LEAST_IOS_10_2) { + [deviceTypes addObject:AVCaptureDeviceTypeBuiltInDualCamera]; + } + // TODO: we should KVO _discoverySession.devices. + _discoverySession = + [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:deviceTypes + mediaType:AVMediaTypeVideo + position:AVCaptureDevicePositionUnspecified]; + } + return self; +} + +- (AVCaptureDevice *)findAVCaptureDevice:(AVCaptureDevicePosition)position +{ + SCAssert(position == AVCaptureDevicePositionFront || position == AVCaptureDevicePositionBack, @""); + AVCaptureDevice *captureDevice; + if (position == AVCaptureDevicePositionFront) { + captureDevice = [self _pickBestFrontCamera:[_discoverySession.devices copy]]; + } else if (position == AVCaptureDevicePositionBack) { + captureDevice = [self _pickBestBackCamera:[_discoverySession.devices copy]]; + } + if (captureDevice) { + return captureDevice; + } + + if (SC_AT_LEAST_IOS_10_2 && SCCameraTweaksEnableDualCamera()) { + captureDevice = [AVCaptureDevice defaultDeviceWithDeviceType:AVCaptureDeviceTypeBuiltInDualCamera + mediaType:AVMediaTypeVideo + position:position]; + if (captureDevice) { + return captureDevice; + } + } + + // if code still execute, discoverSession failed, then we keep searching. + captureDevice = [AVCaptureDevice defaultDeviceWithDeviceType:AVCaptureDeviceTypeBuiltInWideAngleCamera + mediaType:AVMediaTypeVideo + position:position]; + if (captureDevice) { + return captureDevice; + } + +#if !TARGET_IPHONE_SIMULATOR + // We do not return nil at the beginning of the function for simulator, because simulators of different IOS + // versions can check whether or not our camera device API access is correct. + SCAssertFail(@"No camera is found."); +#endif + return nil; +} + +- (AVCaptureDevice *)_pickBestFrontCamera:(NSArray *)devices +{ + for (AVCaptureDevice *device in devices) { + if (device.position == AVCaptureDevicePositionFront) { + return device; + } + } + return nil; +} + +- (AVCaptureDevice *)_pickBestBackCamera:(NSArray *)devices +{ + // Look for dual camera first if needed. If dual camera not found, continue to look for wide angle camera. + if (SC_AT_LEAST_IOS_10_2 && SCCameraTweaksEnableDualCamera()) { + for (AVCaptureDevice *device in devices) { + if (device.position == AVCaptureDevicePositionBack && + device.deviceType == AVCaptureDeviceTypeBuiltInDualCamera) { + return device; + } + } + } + + for (AVCaptureDevice *device in devices) { + if (device.position == AVCaptureDevicePositionBack && + device.deviceType == AVCaptureDeviceTypeBuiltInWideAngleCamera) { + return device; + } + } + return nil; +} + +- (AVCaptureDevice *)findDualCamera +{ + if (SC_AT_LEAST_IOS_10_2) { + for (AVCaptureDevice *device in [_discoverySession.devices copy]) { + if (device.position == AVCaptureDevicePositionBack && + device.deviceType == AVCaptureDeviceTypeBuiltInDualCamera) { + return device; + } + } + } + + AVCaptureDevice *captureDevice = [AVCaptureDevice defaultDeviceWithDeviceType:AVCaptureDeviceTypeBuiltInDualCamera + mediaType:AVMediaTypeVideo + position:AVCaptureDevicePositionBack]; + if (captureDevice) { + return captureDevice; + } + +#if !TARGET_IPHONE_SIMULATOR + // We do not return nil at the beginning of the function for simulator, because simulators of different IOS + // versions can check whether or not our camera device API access is correct. + SCAssertFail(@"No camera is found."); +#endif + return nil; +} + +@end diff --git a/ManagedCapturer/SCCaptureFaceDetectionParser.h b/ManagedCapturer/SCCaptureFaceDetectionParser.h new file mode 100644 index 0000000..3e4cf25 --- /dev/null +++ b/ManagedCapturer/SCCaptureFaceDetectionParser.h @@ -0,0 +1,43 @@ +// +// SCCaptureFaceDetectionParser.h +// Snapchat +// +// Created by Jiyang Zhu on 3/13/18. +// Copyright © 2018 Snapchat, Inc. All rights reserved. +// +// This class offers methods to parse face bounds from raw data, e.g., AVMetadataObject, CIFeature. + +#import + +#import +#import + +@interface SCCaptureFaceDetectionParser : NSObject + +SC_INIT_AND_NEW_UNAVAILABLE; + +- (instancetype)initWithFaceBoundsAreaThreshold:(CGFloat)minimumArea; + +/** + Parse face bounds from AVMetadataObject. + + @param metadataObjects An array of AVMetadataObject. + @return A dictionary, value is faceBounds: CGRect, key is faceID: NSString. + */ +- (NSDictionary *)parseFaceBoundsByFaceIDFromMetadataObjects: + (NSArray<__kindof AVMetadataObject *> *)metadataObjects; + +/** + Parse face bounds from CIFeature. + + @param features An array of CIFeature. + @param imageSize Size of the image, where the feature are detected from. + @param imageOrientation Orientation of the image. + @return A dictionary, value is faceBounds: CGRect, key is faceID: NSString. + */ +- (NSDictionary *)parseFaceBoundsByFaceIDFromCIFeatures:(NSArray<__kindof CIFeature *> *)features + withImageSize:(CGSize)imageSize + imageOrientation: + (CGImagePropertyOrientation)imageOrientation; + +@end diff --git a/ManagedCapturer/SCCaptureFaceDetectionParser.m b/ManagedCapturer/SCCaptureFaceDetectionParser.m new file mode 100644 index 0000000..0e42585 --- /dev/null +++ b/ManagedCapturer/SCCaptureFaceDetectionParser.m @@ -0,0 +1,94 @@ +// +// SCCaptureFaceDetectionParser.m +// Snapchat +// +// Created by Jiyang Zhu on 3/13/18. +// Copyright © 2018 Snapchat, Inc. All rights reserved. +// + +#import "SCCaptureFaceDetectionParser.h" + +#import +#import +#import + +@implementation SCCaptureFaceDetectionParser { + CGFloat _minimumArea; +} + +- (instancetype)initWithFaceBoundsAreaThreshold:(CGFloat)minimumArea +{ + self = [super init]; + if (self) { + _minimumArea = minimumArea; + } + return self; +} + +- (NSDictionary *)parseFaceBoundsByFaceIDFromMetadataObjects: + (NSArray<__kindof AVMetadataObject *> *)metadataObjects +{ + SCTraceODPCompatibleStart(2); + NSMutableArray *faceObjects = [NSMutableArray array]; + [metadataObjects + enumerateObjectsUsingBlock:^(__kindof AVMetadataObject *_Nonnull obj, NSUInteger idx, BOOL *_Nonnull stop) { + if ([obj isKindOfClass:[AVMetadataFaceObject class]]) { + [faceObjects addObject:obj]; + } + }]; + + SC_GUARD_ELSE_RETURN_VALUE(faceObjects.count > 0, nil); + + NSMutableDictionary *faceBoundsByFaceID = + [NSMutableDictionary dictionaryWithCapacity:faceObjects.count]; + for (AVMetadataFaceObject *faceObject in faceObjects) { + CGRect bounds = faceObject.bounds; + if (CGRectGetWidth(bounds) * CGRectGetHeight(bounds) >= _minimumArea) { + [faceBoundsByFaceID setObject:[NSValue valueWithCGRect:bounds] forKey:@(faceObject.faceID)]; + } + } + return faceBoundsByFaceID; +} + +- (NSDictionary *)parseFaceBoundsByFaceIDFromCIFeatures:(NSArray<__kindof CIFeature *> *)features + withImageSize:(CGSize)imageSize + imageOrientation: + (CGImagePropertyOrientation)imageOrientation +{ + SCTraceODPCompatibleStart(2); + NSArray *faceFeatures = [features filteredArrayUsingBlock:^BOOL(id _Nonnull evaluatedObject) { + return [evaluatedObject isKindOfClass:[CIFaceFeature class]]; + }]; + + SC_GUARD_ELSE_RETURN_VALUE(faceFeatures.count > 0, nil); + + NSMutableDictionary *faceBoundsByFaceID = + [NSMutableDictionary dictionaryWithCapacity:faceFeatures.count]; + CGFloat width = imageSize.width; + CGFloat height = imageSize.height; + SCLogGeneralInfo(@"Face feature count:%d", faceFeatures.count); + for (CIFaceFeature *faceFeature in faceFeatures) { + SCLogGeneralInfo(@"Face feature: hasTrackingID:%d, bounds:%@", faceFeature.hasTrackingID, + NSStringFromCGRect(faceFeature.bounds)); + if (faceFeature.hasTrackingID) { + CGRect transferredBounds; + // Somehow the detected bounds for back camera is mirrored. + if (imageOrientation == kCGImagePropertyOrientationRight) { + transferredBounds = CGRectMake( + CGRectGetMinX(faceFeature.bounds) / width, 1 - CGRectGetMaxY(faceFeature.bounds) / height, + CGRectGetWidth(faceFeature.bounds) / width, CGRectGetHeight(faceFeature.bounds) / height); + } else { + transferredBounds = CGRectMake( + CGRectGetMinX(faceFeature.bounds) / width, CGRectGetMinY(faceFeature.bounds) / height, + CGRectGetWidth(faceFeature.bounds) / width, CGRectGetHeight(faceFeature.bounds) / height); + } + if (CGRectGetWidth(transferredBounds) * CGRectGetHeight(transferredBounds) >= _minimumArea) { + [faceBoundsByFaceID setObject:[NSValue valueWithCGRect:transferredBounds] + forKey:@(faceFeature.trackingID)]; + } + } + } + return faceBoundsByFaceID; +} + +@end diff --git a/ManagedCapturer/SCCaptureFaceDetector.h b/ManagedCapturer/SCCaptureFaceDetector.h new file mode 100644 index 0000000..02b1550 --- /dev/null +++ b/ManagedCapturer/SCCaptureFaceDetector.h @@ -0,0 +1,31 @@ +// +// SCCaptureFaceDetector.h +// Snapchat +// +// Created by Jiyang Zhu on 3/27/18. +// Copyright © 2018 Snapchat, Inc. All rights reserved. +// +// This protocol declares properties and methods that are used for face detectors. + +#import + +@class SCCaptureResource; +@class SCQueuePerformer; +@class SCCaptureFaceDetectorTrigger; +@class SCCaptureFaceDetectionParser; + +@protocol SCCaptureFaceDetector + +@property (nonatomic, strong, readonly) SCCaptureFaceDetectorTrigger *trigger; + +@property (nonatomic, strong, readonly) SCCaptureFaceDetectionParser *parser; + +- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource; + +- (SCQueuePerformer *)detectionPerformer; + +- (void)startDetection; + +- (void)stopDetection; + +@end diff --git a/ManagedCapturer/SCCaptureFaceDetectorTrigger.h b/ManagedCapturer/SCCaptureFaceDetectorTrigger.h new file mode 100644 index 0000000..2dd9d43 --- /dev/null +++ b/ManagedCapturer/SCCaptureFaceDetectorTrigger.h @@ -0,0 +1,22 @@ +// +// SCCaptureFaceDetectorTrigger.h +// Snapchat +// +// Created by Jiyang Zhu on 3/22/18. +// Copyright © 2018 Snapchat, Inc. All rights reserved. +// +// This class is used to control when should SCCaptureFaceDetector starts and stops. + +#import + +#import + +@protocol SCCaptureFaceDetector; + +@interface SCCaptureFaceDetectorTrigger : NSObject + +SC_INIT_AND_NEW_UNAVAILABLE; + +- (instancetype)initWithDetector:(id)detector; + +@end diff --git a/ManagedCapturer/SCCaptureFaceDetectorTrigger.m b/ManagedCapturer/SCCaptureFaceDetectorTrigger.m new file mode 100644 index 0000000..23f625e --- /dev/null +++ b/ManagedCapturer/SCCaptureFaceDetectorTrigger.m @@ -0,0 +1,97 @@ +// +// SCCaptureFaceDetectorTrigger.m +// Snapchat +// +// Created by Jiyang Zhu on 3/22/18. +// Copyright © 2018 Snapchat, Inc. All rights reserved. +// + +#import "SCCaptureFaceDetectorTrigger.h" + +#import "SCCaptureFaceDetector.h" + +#import +#import +#import +#import +#import + +@interface SCCaptureFaceDetectorTrigger () { + id __weak _detector; +} +@end + +@implementation SCCaptureFaceDetectorTrigger + +- (instancetype)initWithDetector:(id)detector +{ + self = [super init]; + if (self) { + _detector = detector; + [[NSNotificationCenter defaultCenter] addObserver:self + selector:@selector(_applicationDidBecomeActive) + name:kSCPostponedUIApplicationDidBecomeActiveNotification + object:nil]; + [[NSNotificationCenter defaultCenter] addObserver:self + selector:@selector(_applicationWillResignActive) + name:UIApplicationWillResignActiveNotification + object:nil]; + } + return self; +} + +#pragma mark - Internal Methods +- (void)_applicationWillResignActive +{ + SCTraceODPCompatibleStart(2); + [self _stopDetection]; +} + +- (void)_applicationDidBecomeActive +{ + SCTraceODPCompatibleStart(2); + [self _waitUntilAppStartCompleteToStartDetection]; +} + +- (void)_waitUntilAppStartCompleteToStartDetection +{ + SCTraceODPCompatibleStart(2); + @weakify(self); + + if (SCExperimentWithWaitUntilIdleReplacement()) { + [[SCTaskManager sharedManager] addTaskToRunWhenAppIdle:"SCCaptureFaceDetectorTrigger.startDetection" + performer:[_detector detectionPerformer] + block:^{ + @strongify(self); + SC_GUARD_ELSE_RETURN(self); + + [self _startDetection]; + }]; + } else { + [[SCIdleMonitor sharedInstance] waitUntilIdleForTag:"SCCaptureFaceDetectorTrigger.startDetection" + callbackQueue:[_detector detectionPerformer].queue + block:^{ + @strongify(self); + SC_GUARD_ELSE_RETURN(self); + [self _startDetection]; + }]; + } +} + +- (void)_startDetection +{ + SCTraceODPCompatibleStart(2); + [[_detector detectionPerformer] performImmediatelyIfCurrentPerformer:^{ + [_detector startDetection]; + }]; +} + +- (void)_stopDetection +{ + SCTraceODPCompatibleStart(2); + [[_detector detectionPerformer] performImmediatelyIfCurrentPerformer:^{ + [_detector stopDetection]; + }]; +} + +@end diff --git a/ManagedCapturer/SCCaptureMetadataObjectParser.h b/ManagedCapturer/SCCaptureMetadataObjectParser.h new file mode 100644 index 0000000..42fd131 --- /dev/null +++ b/ManagedCapturer/SCCaptureMetadataObjectParser.h @@ -0,0 +1,23 @@ +// +// SCCaptureMetadataObjectParser.h +// Snapchat +// +// Created by Jiyang Zhu on 3/13/18. +// Copyright © 2018 Snapchat, Inc. All rights reserved. +// +// This class offers class methods to parse AVMetadataObject. + +#import + +@interface SCCaptureMetadataObjectParser : NSObject + +/** + Parse face bounds from AVMetadataObject. + + @param metadataObjects An array of AVMetadataObject. + @return A dictionary, value is faceBounds: CGRect, key is faceID: NSString. + */ +- (NSDictionary *)parseFaceBoundsByFaceIDFromMetadataObjects: + (NSArray<__kindof AVMetadataObject *> *)metadataObjects; + +@end diff --git a/ManagedCapturer/SCCaptureMetadataObjectParser.m b/ManagedCapturer/SCCaptureMetadataObjectParser.m new file mode 100644 index 0000000..7bb50d2 --- /dev/null +++ b/ManagedCapturer/SCCaptureMetadataObjectParser.m @@ -0,0 +1,38 @@ +// +// SCCaptureMetadataObjectParser.m +// Snapchat +// +// Created by Jiyang Zhu on 3/13/18. +// Copyright © 2018 Snapchat, Inc. All rights reserved. +// + +#import "SCCaptureMetadataObjectParser.h" + +#import + +@import UIKit; + +@implementation SCCaptureMetadataObjectParser + +- (NSDictionary *)parseFaceBoundsByFaceIDFromMetadataObjects: + (NSArray<__kindof AVMetadataObject *> *)metadataObjects +{ + NSMutableArray *faceObjects = [NSMutableArray array]; + [metadataObjects + enumerateObjectsUsingBlock:^(__kindof AVMetadataObject *_Nonnull obj, NSUInteger idx, BOOL *_Nonnull stop) { + if ([obj isKindOfClass:[AVMetadataFaceObject class]]) { + [faceObjects addObject:obj]; + } + }]; + + SC_GUARD_ELSE_RETURN_VALUE(faceObjects.count > 0, nil); + + NSMutableDictionary *faceBoundsByFaceID = + [NSMutableDictionary dictionaryWithCapacity:faceObjects.count]; + for (AVMetadataFaceObject *faceObject in faceObjects) { + [faceBoundsByFaceID setObject:[NSValue valueWithCGRect:faceObject.bounds] forKey:@(faceObject.faceID)]; + } + return faceBoundsByFaceID; +} + +@end diff --git a/ManagedCapturer/SCCaptureMetadataOutputDetector.h b/ManagedCapturer/SCCaptureMetadataOutputDetector.h new file mode 100644 index 0000000..532f773 --- /dev/null +++ b/ManagedCapturer/SCCaptureMetadataOutputDetector.h @@ -0,0 +1,19 @@ +// +// SCCaptureMetadataOutputDetector.h +// Snapchat +// +// Created by Jiyang Zhu on 12/21/17. +// Copyright © 2017 Snapchat, Inc. All rights reserved. +// +// This class is intended to detect faces in Camera. It receives AVMetadataFaceObjects, and announce the bounds and +// faceIDs. + +#import "SCCaptureFaceDetector.h" + +#import + +@interface SCCaptureMetadataOutputDetector : NSObject + +SC_INIT_AND_NEW_UNAVAILABLE; + +@end diff --git a/ManagedCapturer/SCCaptureMetadataOutputDetector.m b/ManagedCapturer/SCCaptureMetadataOutputDetector.m new file mode 100644 index 0000000..901b2c2 --- /dev/null +++ b/ManagedCapturer/SCCaptureMetadataOutputDetector.m @@ -0,0 +1,175 @@ +// +// SCCaptureMetadataOutputDetector.m +// Snapchat +// +// Created by Jiyang Zhu on 12/21/17. +// Copyright © 2017 Snapchat, Inc. All rights reserved. +// + +#import "SCCaptureMetadataOutputDetector.h" + +#import "SCCameraTweaks.h" +#import "SCCaptureFaceDetectionParser.h" +#import "SCCaptureFaceDetectorTrigger.h" +#import "SCCaptureResource.h" +#import "SCManagedCaptureSession.h" +#import "SCManagedCapturer.h" + +#import +#import +#import +#import +#import +#import +#import + +#define SCLogCaptureMetaDetectorInfo(fmt, ...) \ + SCLogCoreCameraInfo(@"[SCCaptureMetadataOutputDetector] " fmt, ##__VA_ARGS__) +#define SCLogCaptureMetaDetectorWarning(fmt, ...) \ + SCLogCoreCameraWarning(@"[SCCaptureMetadataOutputDetector] " fmt, ##__VA_ARGS__) +#define SCLogCaptureMetaDetectorError(fmt, ...) \ + SCLogCoreCameraError(@"[SCCaptureMetadataOutputDetector] " fmt, ##__VA_ARGS__) + +static char *const kSCCaptureMetadataOutputDetectorProcessQueue = + "com.snapchat.capture-metadata-output-detector-process"; + +static const NSInteger kDefaultNumberOfSequentialFramesWithFaces = -1; // -1 means no sequential frames with faces. + +@interface SCCaptureMetadataOutputDetector () + +@end + +@implementation SCCaptureMetadataOutputDetector { + BOOL _isDetecting; + + AVCaptureMetadataOutput *_metadataOutput; + SCCaptureResource *_captureResource; + + SCCaptureFaceDetectionParser *_parser; + NSInteger _numberOfSequentialFramesWithFaces; + NSUInteger _detectionFrequency; + + SCQueuePerformer *_callbackPerformer; + SCQueuePerformer *_metadataProcessPerformer; + + SCCaptureFaceDetectorTrigger *_trigger; +} + +@synthesize trigger = _trigger; +@synthesize parser = _parser; + +- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource +{ + SCTraceODPCompatibleStart(2); + self = [super init]; + if (self) { + SCAssert(captureResource, @"SCCaptureResource should not be nil"); + SCAssert(captureResource.managedSession.avSession, @"AVCaptureSession should not be nil"); + SCAssert(captureResource.queuePerformer, @"SCQueuePerformer should not be nil"); + _metadataOutput = [AVCaptureMetadataOutput new]; + _callbackPerformer = captureResource.queuePerformer; + _captureResource = captureResource; + _detectionFrequency = SCExperimentWithFaceDetectionFrequency(); + + _parser = [[SCCaptureFaceDetectionParser alloc] + initWithFaceBoundsAreaThreshold:pow(SCCameraFaceFocusMinFaceSize(), 2)]; + _metadataProcessPerformer = [[SCQueuePerformer alloc] initWithLabel:kSCCaptureMetadataOutputDetectorProcessQueue + qualityOfService:QOS_CLASS_DEFAULT + queueType:DISPATCH_QUEUE_SERIAL + context:SCQueuePerformerContextCamera]; + if ([self _initDetection]) { + _trigger = [[SCCaptureFaceDetectorTrigger alloc] initWithDetector:self]; + } + } + return self; +} + +- (AVCaptureSession *)_captureSession +{ + // _captureResource.avSession may change, so we don't retain any specific AVCaptureSession. + return _captureResource.managedSession.avSession; +} + +- (BOOL)_initDetection +{ + BOOL success = NO; + if ([[self _captureSession] canAddOutput:_metadataOutput]) { + [[self _captureSession] addOutput:_metadataOutput]; + if ([_metadataOutput.availableMetadataObjectTypes containsObject:AVMetadataObjectTypeFace]) { + _numberOfSequentialFramesWithFaces = kDefaultNumberOfSequentialFramesWithFaces; + _metadataOutput.metadataObjectTypes = @[ AVMetadataObjectTypeFace ]; + success = YES; + SCLogCaptureMetaDetectorInfo(@"AVMetadataObjectTypeFace detection successfully enabled."); + } else { + [[self _captureSession] removeOutput:_metadataOutput]; + success = NO; + SCLogCaptureMetaDetectorError(@"AVMetadataObjectTypeFace is not available for " + @"AVMetadataOutput[%@]", + _metadataOutput); + } + } else { + success = NO; + SCLogCaptureMetaDetectorError(@"AVCaptureSession[%@] cannot add AVMetadataOutput[%@] as an output", + [self _captureSession], _metadataOutput); + } + return success; +} + +- (void)startDetection +{ + SCAssert([[self detectionPerformer] isCurrentPerformer], @"Calling -startDetection in an invalid queue."); + SC_GUARD_ELSE_RETURN(!_isDetecting); + [_captureResource.queuePerformer performImmediatelyIfCurrentPerformer:^{ + [_metadataOutput setMetadataObjectsDelegate:self queue:_metadataProcessPerformer.queue]; + _isDetecting = YES; + SCLogCaptureMetaDetectorInfo(@"AVMetadataObjectTypeFace detection successfully enabled."); + }]; +} + +- (void)stopDetection +{ + SCAssert([[self detectionPerformer] isCurrentPerformer], @"Calling -stopDetection in an invalid queue."); + SC_GUARD_ELSE_RETURN(_isDetecting); + [_captureResource.queuePerformer performImmediatelyIfCurrentPerformer:^{ + [_metadataOutput setMetadataObjectsDelegate:nil queue:NULL]; + _isDetecting = NO; + SCLogCaptureMetaDetectorInfo(@"AVMetadataObjectTypeFace detection successfully disabled."); + }]; +} + +- (SCQueuePerformer *)detectionPerformer +{ + return _captureResource.queuePerformer; +} + +#pragma mark - AVCaptureMetadataOutputObjectsDelegate +- (void)captureOutput:(AVCaptureOutput *)output + didOutputMetadataObjects:(NSArray<__kindof AVMetadataObject *> *)metadataObjects + fromConnection:(AVCaptureConnection *)connection +{ + SCTraceODPCompatibleStart(2); + + BOOL shouldNotify = NO; + if (metadataObjects.count == 0 && + _numberOfSequentialFramesWithFaces != + kDefaultNumberOfSequentialFramesWithFaces) { // There were faces detected before, but there is no face right + // now, so send out the notification. + _numberOfSequentialFramesWithFaces = kDefaultNumberOfSequentialFramesWithFaces; + shouldNotify = YES; + } else if (metadataObjects.count > 0) { + _numberOfSequentialFramesWithFaces++; + shouldNotify = (_numberOfSequentialFramesWithFaces % _detectionFrequency == 0); + } + + SC_GUARD_ELSE_RETURN(shouldNotify); + + NSDictionary *faceBoundsByFaceID = + [_parser parseFaceBoundsByFaceIDFromMetadataObjects:metadataObjects]; + + [_callbackPerformer perform:^{ + [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] + didDetectFaceBounds:faceBoundsByFaceID]; + }]; +} + +@end diff --git a/ManagedCapturer/SCCapturer.h b/ManagedCapturer/SCCapturer.h new file mode 100644 index 0000000..47f7abc --- /dev/null +++ b/ManagedCapturer/SCCapturer.h @@ -0,0 +1,225 @@ +// +// SCManagedCapturer.h +// Snapchat +// +// Created by Liu Liu on 4/20/15. +// Copyright (c) 2015 Liu Liu. All rights reserved. +// + +#import "SCCaptureCommon.h" +#import "SCSnapCreationTriggers.h" + +#import + +#import +#import + +#define SCCapturerContext [NSString sc_stringWithFormat:@"%s/%d", __FUNCTION__, __LINE__] + +@class SCBlackCameraDetector; +@protocol SCManagedCapturerListener +, SCManagedCapturerLensAPI, SCDeviceMotionProvider, SCFileInputDecider, SCManagedCapturerARImageCaptureProvider, + SCManagedCapturerGLViewManagerAPI, SCManagedCapturerLensAPIProvider, SCManagedCapturerLSAComponentTrackerAPI, + SCManagedCapturePreviewLayerControllerDelegate; + +@protocol SCCapturer + +@property (nonatomic, readonly) SCBlackCameraDetector *blackCameraDetector; + +/** + * Returns id for the current capturer. + */ +- (id)lensProcessingCore; + +- (CMTime)firstWrittenAudioBufferDelay; +- (BOOL)audioQueueStarted; +- (BOOL)isLensApplied; +- (BOOL)isVideoMirrored; + +- (SCVideoCaptureSessionInfo)activeSession; + +#pragma mark - Outside resources + +- (void)setBlackCameraDetector:(SCBlackCameraDetector *)blackCameraDetector + deviceMotionProvider:(id)deviceMotionProvider + fileInputDecider:(id)fileInputDecider + arImageCaptureProvider:(id)arImageCaptureProvider + glviewManager:(id)glViewManager + lensAPIProvider:(id)lensAPIProvider + lsaComponentTracker:(id)lsaComponentTracker + managedCapturerPreviewLayerControllerDelegate: + (id)previewLayerControllerDelegate; + +#pragma mark - Setup, Start & Stop + +// setupWithDevicePositionAsynchronously will be called on the main thread, executed off the main thread, exactly once +- (void)setupWithDevicePositionAsynchronously:(SCManagedCaptureDevicePosition)devicePosition + completionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context; + +/** + * Important: Remember to call stopRunningAsynchronously to stop the capture session. Dismissing the view is not enough + * @param identifier is for knowing the callsite. Pass in the classname of the callsite is generally suggested. + * Currently it is used for debugging purposes. In other words the capture session will work without it. + */ +- (SCCapturerToken *)startRunningAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context; +- (void)stopRunningAsynchronously:(SCCapturerToken *)token + completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler + context:(NSString *)context; + +- (void)stopRunningAsynchronously:(SCCapturerToken *)token + completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler + after:(NSTimeInterval)delay + context:(NSString *)context; + +- (void)startStreamingAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context; + +- (void)addSampleBufferDisplayController:(id)sampleBufferDisplayController + context:(NSString *)context; + +#pragma mark - Recording / Capture + +- (void)captureStillImageAsynchronouslyWithAspectRatio:(CGFloat)aspectRatio + captureSessionID:(NSString *)captureSessionID + completionHandler: + (sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler + context:(NSString *)context; +/** + * Unlike captureStillImageAsynchronouslyWithAspectRatio, this captures a single frame from the ongoing video + * stream. This should be faster but lower quality (and smaller size), and does not play the shutter sound. + */ +- (void)captureSingleVideoFrameAsynchronouslyWithCompletionHandler: + (sc_managed_capturer_capture_video_frame_completion_handler_t)completionHandler + context:(NSString *)context; + +- (void)prepareForRecordingAsynchronouslyWithContext:(NSString *)context + audioConfiguration:(SCAudioConfiguration *)configuration; +- (void)startRecordingAsynchronouslyWithOutputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings + audioConfiguration:(SCAudioConfiguration *)configuration + maxDuration:(NSTimeInterval)maxDuration + fileURL:(NSURL *)fileURL + captureSessionID:(NSString *)captureSessionID + completionHandler: + (sc_managed_capturer_start_recording_completion_handler_t)completionHandler + context:(NSString *)context; +- (void)stopRecordingAsynchronouslyWithContext:(NSString *)context; +- (void)cancelRecordingAsynchronouslyWithContext:(NSString *)context; + +- (void)startScanAsynchronouslyWithScanConfiguration:(SCScanConfiguration *)configuration context:(NSString *)context; +- (void)stopScanAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler context:(NSString *)context; +- (void)sampleFrameWithCompletionHandler:(void (^)(UIImage *frame, CMTime presentationTime))completionHandler + context:(NSString *)context; + +// AddTimedTask will schedule a task to run, it is thread safe API. Your task will run on main thread, so it is not +// recommended to add large amount of tasks which all have the same task target time. +- (void)addTimedTask:(SCTimedTask *)task context:(NSString *)context; + +// clearTimedTasks will cancel the tasks, it is thread safe API. +- (void)clearTimedTasksWithContext:(NSString *)context; + +#pragma mark - Utilities + +- (void)convertViewCoordinates:(CGPoint)viewCoordinates + completionHandler:(sc_managed_capturer_convert_view_coordniates_completion_handler_t)completionHandler + context:(NSString *)context; + +- (void)detectLensCategoryOnNextFrame:(CGPoint)point + lenses:(NSArray *)lenses + completion:(sc_managed_lenses_processor_category_point_completion_handler_t)completion + context:(NSString *)context; + +#pragma mark - Configurations + +- (void)setDevicePositionAsynchronously:(SCManagedCaptureDevicePosition)devicePosition + completionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context; + +- (void)setFlashActive:(BOOL)flashActive + completionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context; + +- (void)setLensesActive:(BOOL)lensesActive + completionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context; + +- (void)setLensesActive:(BOOL)lensesActive + filterFactory:(SCLookseryFilterFactory *)filterFactory + completionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context; + +- (void)setLensesInTalkActive:(BOOL)lensesActive + completionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context; + +- (void)setTorchActiveAsynchronously:(BOOL)torchActive + completionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context; + +- (void)setNightModeActiveAsynchronously:(BOOL)active + completionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context; + +- (void)lockZoomWithContext:(NSString *)context; + +- (void)unlockZoomWithContext:(NSString *)context; + +- (void)setZoomFactorAsynchronously:(CGFloat)zoomFactor context:(NSString *)context; +- (void)resetZoomFactorAsynchronously:(CGFloat)zoomFactor + devicePosition:(SCManagedCaptureDevicePosition)devicePosition + context:(NSString *)context; + +- (void)setExposurePointOfInterestAsynchronously:(CGPoint)pointOfInterest + fromUser:(BOOL)fromUser + completionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context; + +- (void)setAutofocusPointOfInterestAsynchronously:(CGPoint)pointOfInterest + completionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context; + +- (void)setPortraitModePointOfInterestAsynchronously:(CGPoint)pointOfInterest + completionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context; + +- (void)continuousAutofocusAndExposureAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context; + +// I need to call these three methods from SCAppDelegate explicitly so that I get the latest information. +- (void)applicationDidEnterBackground; +- (void)applicationWillEnterForeground; +- (void)applicationDidBecomeActive; +- (void)applicationWillResignActive; +- (void)mediaServicesWereReset; +- (void)mediaServicesWereLost; + +#pragma mark - Add / Remove Listener + +- (void)addListener:(id)listener; +- (void)removeListener:(id)listener; +- (void)addVideoDataSourceListener:(id)listener; +- (void)removeVideoDataSourceListener:(id)listener; +- (void)addDeviceCapacityAnalyzerListener:(id)listener; +- (void)removeDeviceCapacityAnalyzerListener:(id)listener; + +- (NSString *)debugInfo; + +- (id)currentVideoDataSource; + +- (void)checkRestrictedCamera:(void (^)(BOOL, BOOL, AVAuthorizationStatus))callback; + +// Need to be visible so that classes like SCCaptureSessionFixer can manage capture session +- (void)recreateAVCaptureSession; + +#pragma mark - Snap Creation triggers + +- (SCSnapCreationTriggers *)snapCreationTriggers; + +@optional + +- (BOOL)authorizedForVideoCapture; + +- (void)preloadVideoCaptureAuthorization; + +@end diff --git a/ManagedCapturer/SCCapturerBufferedVideoWriter.h b/ManagedCapturer/SCCapturerBufferedVideoWriter.h new file mode 100644 index 0000000..f0b7a05 --- /dev/null +++ b/ManagedCapturer/SCCapturerBufferedVideoWriter.h @@ -0,0 +1,44 @@ +// +// SCCapturerBufferedVideoWriter.h +// Snapchat +// +// Created by Chao Pang on 12/5/17. +// + +#import + +#import + +#import +#import + +@protocol SCCapturerBufferedVideoWriterDelegate + +- (void)videoWriterDidFailWritingWithError:(NSError *)error; + +@end + +@interface SCCapturerBufferedVideoWriter : NSObject + +- (instancetype)init NS_UNAVAILABLE; + +- (instancetype)initWithPerformer:(id)performer + outputURL:(NSURL *)outputURL + delegate:(id)delegate + error:(NSError **)error; + +- (BOOL)prepareWritingWithOutputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings; + +- (void)startWritingAtSourceTime:(CMTime)sourceTime; + +- (void)finishWritingAtSourceTime:(CMTime)sourceTime withCompletionHanlder:(dispatch_block_t)completionBlock; + +- (void)cancelWriting; + +- (void)appendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer; + +- (void)appendAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer; + +- (void)cleanUp; + +@end diff --git a/ManagedCapturer/SCCapturerBufferedVideoWriter.m b/ManagedCapturer/SCCapturerBufferedVideoWriter.m new file mode 100644 index 0000000..6d0abe2 --- /dev/null +++ b/ManagedCapturer/SCCapturerBufferedVideoWriter.m @@ -0,0 +1,430 @@ +// +// SCCapturerBufferedVideoWriter.m +// Snapchat +// +// Created by Chao Pang on 12/5/17. +// + +#import "SCCapturerBufferedVideoWriter.h" + +#import "SCAudioCaptureSession.h" +#import "SCCaptureCommon.h" +#import "SCManagedCapturerUtils.h" + +#import +#import +#import +#import +#import + +#import + +@implementation SCCapturerBufferedVideoWriter { + SCQueuePerformer *_performer; + __weak id _delegate; + FBKVOController *_observeController; + + AVAssetWriter *_assetWriter; + AVAssetWriterInput *_audioWriterInput; + AVAssetWriterInput *_videoWriterInput; + AVAssetWriterInputPixelBufferAdaptor *_pixelBufferAdaptor; + CVPixelBufferPoolRef _defaultPixelBufferPool; + CVPixelBufferPoolRef _nightPixelBufferPool; + CVPixelBufferPoolRef _lensesPixelBufferPool; + CMBufferQueueRef _videoBufferQueue; + CMBufferQueueRef _audioBufferQueue; +} + +- (instancetype)initWithPerformer:(id)performer + outputURL:(NSURL *)outputURL + delegate:(id)delegate + error:(NSError **)error +{ + self = [super init]; + if (self) { + _performer = performer; + _delegate = delegate; + _observeController = [[FBKVOController alloc] initWithObserver:self]; + CMBufferQueueCreate(kCFAllocatorDefault, 0, CMBufferQueueGetCallbacksForUnsortedSampleBuffers(), + &_videoBufferQueue); + CMBufferQueueCreate(kCFAllocatorDefault, 0, CMBufferQueueGetCallbacksForUnsortedSampleBuffers(), + &_audioBufferQueue); + _assetWriter = [[AVAssetWriter alloc] initWithURL:outputURL fileType:AVFileTypeMPEG4 error:error]; + if (*error) { + self = nil; + return self; + } + } + return self; +} + +- (BOOL)prepareWritingWithOutputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings +{ + SCTraceStart(); + SCAssert([_performer isCurrentPerformer], @""); + SCAssert(outputSettings, @"empty output setting"); + // Audio + SCTraceSignal(@"Derive audio output setting"); + NSDictionary *audioOutputSettings = @{ + AVFormatIDKey : @(kAudioFormatMPEG4AAC), + AVNumberOfChannelsKey : @(1), + AVSampleRateKey : @(kSCAudioCaptureSessionDefaultSampleRate), + AVEncoderBitRateKey : @(outputSettings.audioBitRate) + }; + _audioWriterInput = + [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeAudio outputSettings:audioOutputSettings]; + _audioWriterInput.expectsMediaDataInRealTime = YES; + + // Video + SCTraceSignal(@"Derive video output setting"); + size_t outputWidth = outputSettings.width; + size_t outputHeight = outputSettings.height; + SCAssert(outputWidth > 0 && outputHeight > 0 && (outputWidth % 2 == 0) && (outputHeight % 2 == 0), + @"invalid output size"); + NSDictionary *videoCompressionSettings = @{ + AVVideoAverageBitRateKey : @(outputSettings.videoBitRate), + AVVideoMaxKeyFrameIntervalKey : @(outputSettings.keyFrameInterval) + }; + NSDictionary *videoOutputSettings = @{ + AVVideoCodecKey : AVVideoCodecH264, + AVVideoWidthKey : @(outputWidth), + AVVideoHeightKey : @(outputHeight), + AVVideoScalingModeKey : AVVideoScalingModeResizeAspectFill, + AVVideoCompressionPropertiesKey : videoCompressionSettings + }; + _videoWriterInput = + [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo outputSettings:videoOutputSettings]; + _videoWriterInput.expectsMediaDataInRealTime = YES; + CGAffineTransform transform = CGAffineTransformMakeTranslation(outputHeight, 0); + _videoWriterInput.transform = CGAffineTransformRotate(transform, M_PI_2); + _pixelBufferAdaptor = [[AVAssetWriterInputPixelBufferAdaptor alloc] + initWithAssetWriterInput:_videoWriterInput + sourcePixelBufferAttributes:@{ + (NSString *) + kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange), (NSString *) + kCVPixelBufferWidthKey : @(outputWidth), (NSString *) + kCVPixelBufferHeightKey : @(outputHeight) + }]; + + SCTraceSignal(@"Setup video writer input"); + if ([_assetWriter canAddInput:_videoWriterInput]) { + [_assetWriter addInput:_videoWriterInput]; + } else { + return NO; + } + + SCTraceSignal(@"Setup audio writer input"); + if ([_assetWriter canAddInput:_audioWriterInput]) { + [_assetWriter addInput:_audioWriterInput]; + } else { + return NO; + } + + return YES; +} + +- (void)appendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer +{ + SCAssert([_performer isCurrentPerformer], @""); + SC_GUARD_ELSE_RETURN(sampleBuffer); + if (!CMBufferQueueIsEmpty(_videoBufferQueue)) { + // We need to drain the buffer queue in this case + while (_videoWriterInput.readyForMoreMediaData) { // TODO: also need to break out in case of errors + CMSampleBufferRef dequeuedSampleBuffer = + (CMSampleBufferRef)CMBufferQueueDequeueAndRetain(_videoBufferQueue); + if (dequeuedSampleBuffer == NULL) { + break; + } + [self _appendVideoSampleBuffer:dequeuedSampleBuffer]; + CFRelease(dequeuedSampleBuffer); + } + } + // Fast path, just append this sample buffer if ready + if (_videoWriterInput.readyForMoreMediaData) { + [self _appendVideoSampleBuffer:sampleBuffer]; + } else { + // It is not ready, queuing the sample buffer + CMBufferQueueEnqueue(_videoBufferQueue, sampleBuffer); + } +} + +- (void)appendAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer +{ + SCAssert([_performer isCurrentPerformer], @""); + SC_GUARD_ELSE_RETURN(sampleBuffer); + if (!CMBufferQueueIsEmpty(_audioBufferQueue)) { + // We need to drain the buffer queue in this case + while (_audioWriterInput.readyForMoreMediaData) { + CMSampleBufferRef dequeuedSampleBuffer = + (CMSampleBufferRef)CMBufferQueueDequeueAndRetain(_audioBufferQueue); + if (dequeuedSampleBuffer == NULL) { + break; + } + [_audioWriterInput appendSampleBuffer:sampleBuffer]; + CFRelease(dequeuedSampleBuffer); + } + } + // fast path, just append this sample buffer if ready + if ((_audioWriterInput.readyForMoreMediaData)) { + [_audioWriterInput appendSampleBuffer:sampleBuffer]; + } else { + // it is not ready, queuing the sample buffer + CMBufferQueueEnqueue(_audioBufferQueue, sampleBuffer); + } +} + +- (void)startWritingAtSourceTime:(CMTime)sourceTime +{ + SCTraceStart(); + SCAssert([_performer isCurrentPerformer], @""); + // To observe the status change on assetWriter because when assetWriter errors out, it only changes the + // status, no further delegate callbacks etc. + [_observeController observe:_assetWriter + keyPath:@keypath(_assetWriter, status) + options:NSKeyValueObservingOptionNew + action:@selector(assetWriterStatusChanged:)]; + [_assetWriter startWriting]; + [_assetWriter startSessionAtSourceTime:sourceTime]; +} + +- (void)cancelWriting +{ + SCTraceStart(); + SCAssert([_performer isCurrentPerformer], @""); + CMBufferQueueReset(_videoBufferQueue); + CMBufferQueueReset(_audioBufferQueue); + [_assetWriter cancelWriting]; +} + +- (void)finishWritingAtSourceTime:(CMTime)sourceTime withCompletionHanlder:(dispatch_block_t)completionBlock +{ + SCTraceStart(); + SCAssert([_performer isCurrentPerformer], @""); + + while (_audioWriterInput.readyForMoreMediaData && !CMBufferQueueIsEmpty(_audioBufferQueue)) { + CMSampleBufferRef audioSampleBuffer = (CMSampleBufferRef)CMBufferQueueDequeueAndRetain(_audioBufferQueue); + if (audioSampleBuffer == NULL) { + break; + } + [_audioWriterInput appendSampleBuffer:audioSampleBuffer]; + CFRelease(audioSampleBuffer); + } + while (_videoWriterInput.readyForMoreMediaData && !CMBufferQueueIsEmpty(_videoBufferQueue)) { + CMSampleBufferRef videoSampleBuffer = (CMSampleBufferRef)CMBufferQueueDequeueAndRetain(_videoBufferQueue); + if (videoSampleBuffer == NULL) { + break; + } + [_videoWriterInput appendSampleBuffer:videoSampleBuffer]; + CFRelease(videoSampleBuffer); + } + + dispatch_block_t finishWritingBlock = ^() { + [_assetWriter endSessionAtSourceTime:sourceTime]; + [_audioWriterInput markAsFinished]; + [_videoWriterInput markAsFinished]; + [_assetWriter finishWritingWithCompletionHandler:^{ + if (completionBlock) { + completionBlock(); + } + }]; + }; + + if (CMBufferQueueIsEmpty(_audioBufferQueue) && CMBufferQueueIsEmpty(_videoBufferQueue)) { + finishWritingBlock(); + } else { + // We need to drain the samples from the queues before finish writing + __block BOOL isAudioDone = NO; + __block BOOL isVideoDone = NO; + // Audio + [_audioWriterInput + requestMediaDataWhenReadyOnQueue:_performer.queue + usingBlock:^{ + if (!CMBufferQueueIsEmpty(_audioBufferQueue) && + _assetWriter.status == AVAssetWriterStatusWriting) { + CMSampleBufferRef audioSampleBuffer = + (CMSampleBufferRef)CMBufferQueueDequeueAndRetain(_audioBufferQueue); + if (audioSampleBuffer) { + [_audioWriterInput appendSampleBuffer:audioSampleBuffer]; + CFRelease(audioSampleBuffer); + } + } else if (!isAudioDone) { + isAudioDone = YES; + } + if (isAudioDone && isVideoDone) { + finishWritingBlock(); + } + }]; + + // Video + [_videoWriterInput + requestMediaDataWhenReadyOnQueue:_performer.queue + usingBlock:^{ + if (!CMBufferQueueIsEmpty(_videoBufferQueue) && + _assetWriter.status == AVAssetWriterStatusWriting) { + CMSampleBufferRef videoSampleBuffer = + (CMSampleBufferRef)CMBufferQueueDequeueAndRetain(_videoBufferQueue); + if (videoSampleBuffer) { + [_videoWriterInput appendSampleBuffer:videoSampleBuffer]; + CFRelease(videoSampleBuffer); + } + } else if (!isVideoDone) { + isVideoDone = YES; + } + if (isAudioDone && isVideoDone) { + finishWritingBlock(); + } + }]; + } +} + +- (void)cleanUp +{ + _assetWriter = nil; + _videoWriterInput = nil; + _audioWriterInput = nil; + _pixelBufferAdaptor = nil; +} + +- (void)dealloc +{ + CFRelease(_videoBufferQueue); + CFRelease(_audioBufferQueue); + CVPixelBufferPoolRelease(_defaultPixelBufferPool); + CVPixelBufferPoolRelease(_nightPixelBufferPool); + CVPixelBufferPoolRelease(_lensesPixelBufferPool); + [_observeController unobserveAll]; +} + +- (void)assetWriterStatusChanged:(NSDictionary *)change +{ + SCTraceStart(); + if (_assetWriter.status == AVAssetWriterStatusFailed) { + SCTraceSignal(@"Asset writer status failed %@, error %@", change, _assetWriter.error); + [_delegate videoWriterDidFailWritingWithError:[_assetWriter.error copy]]; + } +} + +#pragma - Private methods + +- (CVImageBufferRef)_croppedPixelBufferWithInputPixelBuffer:(CVImageBufferRef)inputPixelBuffer +{ + SCAssertTrue([SCDeviceName isIphoneX]); + const size_t inputBufferWidth = CVPixelBufferGetWidth(inputPixelBuffer); + const size_t inputBufferHeight = CVPixelBufferGetHeight(inputPixelBuffer); + const size_t croppedBufferWidth = (size_t)(inputBufferWidth * kSCIPhoneXCapturedImageVideoCropRatio) / 2 * 2; + const size_t croppedBufferHeight = + (size_t)(croppedBufferWidth * SCManagedCapturedImageAndVideoAspectRatio()) / 2 * 2; + const size_t offsetPointX = inputBufferWidth - croppedBufferWidth; + const size_t offsetPointY = (inputBufferHeight - croppedBufferHeight) / 4 * 2; + + SC_GUARD_ELSE_RUN_AND_RETURN_VALUE((inputBufferWidth >= croppedBufferWidth) && + (inputBufferHeight >= croppedBufferHeight) && (offsetPointX % 2 == 0) && + (offsetPointY % 2 == 0) && + (inputBufferWidth >= croppedBufferWidth + offsetPointX) && + (inputBufferHeight >= croppedBufferHeight + offsetPointY), + SCLogGeneralError(@"Invalid cropping configuration"), NULL); + + CVPixelBufferRef croppedPixelBuffer = NULL; + CVPixelBufferPoolRef pixelBufferPool = + [self _pixelBufferPoolWithInputSize:CGSizeMake(inputBufferWidth, inputBufferHeight) + croppedSize:CGSizeMake(croppedBufferWidth, croppedBufferHeight)]; + + if (pixelBufferPool) { + CVReturn result = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, pixelBufferPool, &croppedPixelBuffer); + if ((result != kCVReturnSuccess) || (croppedPixelBuffer == NULL)) { + SCLogGeneralError(@"[SCCapturerVideoWriterInput] Error creating croppedPixelBuffer"); + return NULL; + } + } else { + SCAssertFail(@"[SCCapturerVideoWriterInput] PixelBufferPool is NULL with inputBufferWidth:%@, " + @"inputBufferHeight:%@, croppedBufferWidth:%@, croppedBufferHeight:%@", + @(inputBufferWidth), @(inputBufferHeight), @(croppedBufferWidth), @(croppedBufferHeight)); + return NULL; + } + CVPixelBufferLockBaseAddress(inputPixelBuffer, kCVPixelBufferLock_ReadOnly); + CVPixelBufferLockBaseAddress(croppedPixelBuffer, 0); + + const size_t planesCount = CVPixelBufferGetPlaneCount(inputPixelBuffer); + for (int planeIndex = 0; planeIndex < planesCount; planeIndex++) { + size_t inPlaneHeight = CVPixelBufferGetHeightOfPlane(inputPixelBuffer, planeIndex); + size_t inPlaneBytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(inputPixelBuffer, planeIndex); + uint8_t *inPlaneAdress = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(inputPixelBuffer, planeIndex); + + size_t croppedPlaneHeight = CVPixelBufferGetHeightOfPlane(croppedPixelBuffer, planeIndex); + size_t croppedPlaneBytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(croppedPixelBuffer, planeIndex); + uint8_t *croppedPlaneAdress = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(croppedPixelBuffer, planeIndex); + + // Note that inPlaneBytesPerRow is not strictly 2x of inPlaneWidth for some devices (e.g. iPhone X). + // However, since UV are packed together in memory, we can use offsetPointX for all planes + size_t offsetPlaneBytesX = offsetPointX; + size_t offsetPlaneBytesY = offsetPointY * inPlaneHeight / inputBufferHeight; + + inPlaneAdress = inPlaneAdress + offsetPlaneBytesY * inPlaneBytesPerRow + offsetPlaneBytesX; + size_t bytesToCopyPerRow = MIN(inPlaneBytesPerRow - offsetPlaneBytesX, croppedPlaneBytesPerRow); + for (int i = 0; i < croppedPlaneHeight; i++) { + memcpy(croppedPlaneAdress, inPlaneAdress, bytesToCopyPerRow); + inPlaneAdress += inPlaneBytesPerRow; + croppedPlaneAdress += croppedPlaneBytesPerRow; + } + } + CVPixelBufferUnlockBaseAddress(inputPixelBuffer, kCVPixelBufferLock_ReadOnly); + CVPixelBufferUnlockBaseAddress(croppedPixelBuffer, 0); + return croppedPixelBuffer; +} + +- (CVPixelBufferPoolRef)_pixelBufferPoolWithInputSize:(CGSize)inputSize croppedSize:(CGSize)croppedSize +{ + if (CGSizeEqualToSize(inputSize, [SCManagedCaptureDevice defaultActiveFormatResolution])) { + if (_defaultPixelBufferPool == NULL) { + _defaultPixelBufferPool = [self _newPixelBufferPoolWithWidth:croppedSize.width height:croppedSize.height]; + } + return _defaultPixelBufferPool; + } else if (CGSizeEqualToSize(inputSize, [SCManagedCaptureDevice nightModeActiveFormatResolution])) { + if (_nightPixelBufferPool == NULL) { + _nightPixelBufferPool = [self _newPixelBufferPoolWithWidth:croppedSize.width height:croppedSize.height]; + } + return _nightPixelBufferPool; + } else { + if (_lensesPixelBufferPool == NULL) { + _lensesPixelBufferPool = [self _newPixelBufferPoolWithWidth:croppedSize.width height:croppedSize.height]; + } + return _lensesPixelBufferPool; + } +} + +- (CVPixelBufferPoolRef)_newPixelBufferPoolWithWidth:(size_t)width height:(size_t)height +{ + NSDictionary *attributes = @{ + (NSString *) kCVPixelBufferIOSurfacePropertiesKey : @{}, (NSString *) + kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange), (NSString *) + kCVPixelBufferWidthKey : @(width), (NSString *) + kCVPixelBufferHeightKey : @(height) + }; + CVPixelBufferPoolRef pixelBufferPool = NULL; + CVReturn result = CVPixelBufferPoolCreate(kCFAllocatorDefault, NULL, + (__bridge CFDictionaryRef _Nullable)(attributes), &pixelBufferPool); + if (result != kCVReturnSuccess) { + SCLogGeneralError(@"[SCCapturerBufferredVideoWriter] Error creating pixel buffer pool %i", result); + return NULL; + } + + return pixelBufferPool; +} + +- (void)_appendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer +{ + SCAssert([_performer isCurrentPerformer], @""); + CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); + CVImageBufferRef inputPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); + if ([SCDeviceName isIphoneX]) { + CVImageBufferRef croppedPixelBuffer = [self _croppedPixelBufferWithInputPixelBuffer:inputPixelBuffer]; + if (croppedPixelBuffer) { + [_pixelBufferAdaptor appendPixelBuffer:croppedPixelBuffer withPresentationTime:presentationTime]; + CVPixelBufferRelease(croppedPixelBuffer); + } + } else { + [_pixelBufferAdaptor appendPixelBuffer:inputPixelBuffer withPresentationTime:presentationTime]; + } +} + +@end diff --git a/ManagedCapturer/SCCapturerDefines.h b/ManagedCapturer/SCCapturerDefines.h new file mode 100644 index 0000000..ff6974a --- /dev/null +++ b/ManagedCapturer/SCCapturerDefines.h @@ -0,0 +1,20 @@ +// +// SCCapturerDefines.h +// Snapchat +// +// Created by Chao Pang on 12/20/17. +// + +#import + +typedef NS_ENUM(NSInteger, SCCapturerLightingConditionType) { + SCCapturerLightingConditionTypeNormal = 0, + SCCapturerLightingConditionTypeDark, + SCCapturerLightingConditionTypeExtremeDark, +}; + +typedef struct SampleBufferMetadata { + int isoSpeedRating; + float exposureTime; + float brightness; +} SampleBufferMetadata; diff --git a/ManagedCapturer/SCCapturerToken.h b/ManagedCapturer/SCCapturerToken.h new file mode 100644 index 0000000..3186b03 --- /dev/null +++ b/ManagedCapturer/SCCapturerToken.h @@ -0,0 +1,18 @@ +// +// SCCapturerToken.h +// Snapchat +// +// Created by Xishuo Liu on 3/24/17. +// Copyright © 2017 Snapchat, Inc. All rights reserved. +// + +#import + +@interface SCCapturerToken : NSObject + +- (instancetype)initWithIdentifier:(NSString *)identifier NS_DESIGNATED_INITIALIZER; + +- (instancetype)init __attribute__((unavailable("Use initWithIdentifier: instead."))); +- (instancetype) new __attribute__((unavailable("Use initWithIdentifier: instead."))); + +@end diff --git a/ManagedCapturer/SCCapturerToken.m b/ManagedCapturer/SCCapturerToken.m new file mode 100644 index 0000000..fb6a449 --- /dev/null +++ b/ManagedCapturer/SCCapturerToken.m @@ -0,0 +1,30 @@ +// +// SCCapturerToken.m +// Snapchat +// +// Created by Xishuo Liu on 3/24/17. +// Copyright © 2017 Snapchat, Inc. All rights reserved. +// + +#import "SCCapturerToken.h" + +#import + +@implementation SCCapturerToken { + NSString *_identifier; +} + +- (instancetype)initWithIdentifier:(NSString *)identifier +{ + if (self = [super init]) { + _identifier = identifier.copy; + } + return self; +} + +- (NSString *)debugDescription +{ + return [NSString sc_stringWithFormat:@"%@_%@", _identifier, self]; +} + +@end diff --git a/ManagedCapturer/SCCapturerTokenProvider.h b/ManagedCapturer/SCCapturerTokenProvider.h new file mode 100644 index 0000000..5ce3d59 --- /dev/null +++ b/ManagedCapturer/SCCapturerTokenProvider.h @@ -0,0 +1,20 @@ +// +// Created by Aaron Levine on 10/16/17. +// + +#import + +#import + +@class SCCapturerToken; + +NS_ASSUME_NONNULL_BEGIN +@interface SCCapturerTokenProvider : NSObject + +SC_INIT_AND_NEW_UNAVAILABLE ++ (instancetype)providerWithToken:(SCCapturerToken *)token; + +- (nullable SCCapturerToken *)getTokenAndInvalidate; + +@end +NS_ASSUME_NONNULL_END diff --git a/ManagedCapturer/SCCapturerTokenProvider.m b/ManagedCapturer/SCCapturerTokenProvider.m new file mode 100644 index 0000000..92e3da8 --- /dev/null +++ b/ManagedCapturer/SCCapturerTokenProvider.m @@ -0,0 +1,42 @@ +// +// Created by Aaron Levine on 10/16/17. +// + +#import "SCCapturerTokenProvider.h" + +#import "SCCapturerToken.h" + +#import +#import + +@implementation SCCapturerTokenProvider { + SCCapturerToken *_Nullable _token; +} + ++ (instancetype)providerWithToken:(SCCapturerToken *)token +{ + return [[self alloc] initWithToken:token]; +} + +- (instancetype)initWithToken:(SCCapturerToken *)token +{ + self = [super init]; + if (self) { + _token = token; + } + + return self; +} + +- (nullable SCCapturerToken *)getTokenAndInvalidate +{ + // ensure serial access by requiring calls be on the main thread + SCAssertMainThread(); + + let token = _token; + _token = nil; + + return token; +} + +@end diff --git a/ManagedCapturer/SCExposureState.h b/ManagedCapturer/SCExposureState.h new file mode 100644 index 0000000..f49341f --- /dev/null +++ b/ManagedCapturer/SCExposureState.h @@ -0,0 +1,18 @@ +// +// SCExposureState.h +// Snapchat +// +// Created by Derek Peirce on 4/10/17. +// Copyright © 2017 Snapchat, Inc. All rights reserved. +// + +#import +#import + +@interface SCExposureState : NSObject + +- (instancetype)initWithDevice:(AVCaptureDevice *)device; + +- (void)applyISOAndExposureDurationToDevice:(AVCaptureDevice *)device; + +@end diff --git a/ManagedCapturer/SCExposureState.m b/ManagedCapturer/SCExposureState.m new file mode 100644 index 0000000..ce42d47 --- /dev/null +++ b/ManagedCapturer/SCExposureState.m @@ -0,0 +1,47 @@ +// +// SCExposureState.m +// Snapchat +// +// Created by Derek Peirce on 4/10/17. +// Copyright © 2017 Snapchat, Inc. All rights reserved. +// + +#import "SCExposureState.h" + +#import "AVCaptureDevice+ConfigurationLock.h" + +#import + +@import AVFoundation; + +@implementation SCExposureState { + float _ISO; + CMTime _exposureDuration; +} + +- (instancetype)initWithDevice:(AVCaptureDevice *)device +{ + if (self = [super init]) { + _ISO = device.ISO; + _exposureDuration = device.exposureDuration; + } + return self; +} + +- (void)applyISOAndExposureDurationToDevice:(AVCaptureDevice *)device +{ + if ([device isExposureModeSupported:AVCaptureExposureModeCustom]) { + [device runTask:@"set prior exposure" + withLockedConfiguration:^() { + CMTime exposureDuration = + CMTimeClampToRange(_exposureDuration, CMTimeRangeMake(device.activeFormat.minExposureDuration, + device.activeFormat.maxExposureDuration)); + [device setExposureModeCustomWithDuration:exposureDuration + ISO:SC_CLAMP(_ISO, device.activeFormat.minISO, + device.activeFormat.maxISO) + completionHandler:nil]; + }]; + } +} + +@end diff --git a/ManagedCapturer/SCFileAudioCaptureSession.h b/ManagedCapturer/SCFileAudioCaptureSession.h new file mode 100644 index 0000000..b4a61dd --- /dev/null +++ b/ManagedCapturer/SCFileAudioCaptureSession.h @@ -0,0 +1,19 @@ +// +// SCFileAudioCaptureSession.h +// Snapchat +// +// Created by Xiaomu Wu on 2/2/17. +// Copyright © 2017 Snapchat, Inc. All rights reserved. +// + +#import "SCAudioCaptureSession.h" + +#import + +@interface SCFileAudioCaptureSession : NSObject + +// Linear PCM is required. +// To best mimic `SCAudioCaptureSession`, use an audio file recorded from it. +- (void)setFileURL:(NSURL *)fileURL; + +@end diff --git a/ManagedCapturer/SCFileAudioCaptureSession.m b/ManagedCapturer/SCFileAudioCaptureSession.m new file mode 100644 index 0000000..523b508 --- /dev/null +++ b/ManagedCapturer/SCFileAudioCaptureSession.m @@ -0,0 +1,243 @@ +// +// SCFileAudioCaptureSession.m +// Snapchat +// +// Created by Xiaomu Wu on 2/2/17. +// Copyright © 2017 Snapchat, Inc. All rights reserved. +// + +#import "SCFileAudioCaptureSession.h" + +#import +#import +#import +#import + +@import AudioToolbox; + +static float const kAudioBufferDurationInSeconds = 0.2; // same as SCAudioCaptureSession + +static char *const kSCFileAudioCaptureSessionQueueLabel = "com.snapchat.file-audio-capture-session"; + +@implementation SCFileAudioCaptureSession { + SCQueuePerformer *_performer; + SCSentinel *_sentinel; + + NSURL *_fileURL; + + AudioFileID _audioFile; // audio file + AudioStreamBasicDescription _asbd; // audio format (core audio) + CMAudioFormatDescriptionRef _formatDescription; // audio format (core media) + SInt64 _readCurPacket; // current packet index to read + UInt32 _readNumPackets; // number of packets to read every time + UInt32 _readNumBytes; // number of bytes to read every time + void *_readBuffer; // data buffer to hold read packets +} + +@synthesize delegate = _delegate; + +#pragma mark - Public + +- (instancetype)init +{ + self = [super init]; + if (self) { + _performer = [[SCQueuePerformer alloc] initWithLabel:kSCFileAudioCaptureSessionQueueLabel + qualityOfService:QOS_CLASS_UNSPECIFIED + queueType:DISPATCH_QUEUE_SERIAL + context:SCQueuePerformerContextCamera]; + _sentinel = [[SCSentinel alloc] init]; + } + return self; +} + +- (void)dealloc +{ + if (_audioFile) { + AudioFileClose(_audioFile); + } + if (_formatDescription) { + CFRelease(_formatDescription); + } + if (_readBuffer) { + free(_readBuffer); + } +} + +- (void)setFileURL:(NSURL *)fileURL +{ + [_performer perform:^{ + _fileURL = fileURL; + }]; +} + +#pragma mark - SCAudioCaptureSession + +- (void)beginAudioRecordingAsynchronouslyWithSampleRate:(double)sampleRate // `sampleRate` ignored + completionHandler:(audio_capture_session_block)completionHandler +{ + [_performer perform:^{ + BOOL succeeded = [self _setup]; + int32_t sentinelValue = [_sentinel value]; + if (completionHandler) { + completionHandler(nil); + } + if (succeeded) { + [_performer perform:^{ + SC_GUARD_ELSE_RETURN([_sentinel value] == sentinelValue); + [self _read]; + } + after:kAudioBufferDurationInSeconds]; + } + }]; +} + +- (void)disposeAudioRecordingSynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler +{ + [_performer performAndWait:^{ + [self _teardown]; + if (completionHandler) { + completionHandler(); + } + }]; +} + +#pragma mark - Private + +- (BOOL)_setup +{ + SCAssert([_performer isCurrentPerformer], @""); + + [_sentinel increment]; + + OSStatus status = noErr; + + status = AudioFileOpenURL((__bridge CFURLRef)_fileURL, kAudioFileReadPermission, 0, &_audioFile); + if (noErr != status) { + SCLogGeneralError(@"Cannot open file at URL %@, error code %d", _fileURL, (int)status); + return NO; + } + + _asbd = (AudioStreamBasicDescription){0}; + UInt32 asbdSize = sizeof(_asbd); + status = AudioFileGetProperty(_audioFile, kAudioFilePropertyDataFormat, &asbdSize, &_asbd); + if (noErr != status) { + SCLogGeneralError(@"Cannot get audio data format, error code %d", (int)status); + AudioFileClose(_audioFile); + _audioFile = NULL; + return NO; + } + + if (kAudioFormatLinearPCM != _asbd.mFormatID) { + SCLogGeneralError(@"Linear PCM is required"); + AudioFileClose(_audioFile); + _audioFile = NULL; + _asbd = (AudioStreamBasicDescription){0}; + return NO; + } + + UInt32 aclSize = 0; + AudioChannelLayout *acl = NULL; + status = AudioFileGetPropertyInfo(_audioFile, kAudioFilePropertyChannelLayout, &aclSize, NULL); + if (noErr == status) { + acl = malloc(aclSize); + status = AudioFileGetProperty(_audioFile, kAudioFilePropertyChannelLayout, &aclSize, acl); + if (noErr != status) { + aclSize = 0; + free(acl); + acl = NULL; + } + } + + status = CMAudioFormatDescriptionCreate(NULL, &_asbd, aclSize, acl, 0, NULL, NULL, &_formatDescription); + if (acl) { + free(acl); + acl = NULL; + } + if (noErr != status) { + SCLogGeneralError(@"Cannot create format description, error code %d", (int)status); + AudioFileClose(_audioFile); + _audioFile = NULL; + _asbd = (AudioStreamBasicDescription){0}; + return NO; + } + + _readCurPacket = 0; + _readNumPackets = ceil(_asbd.mSampleRate * kAudioBufferDurationInSeconds); + _readNumBytes = _asbd.mBytesPerPacket * _readNumPackets; + _readBuffer = malloc(_readNumBytes); + + return YES; +} + +- (void)_read +{ + SCAssert([_performer isCurrentPerformer], @""); + + OSStatus status = noErr; + + UInt32 numBytes = _readNumBytes; + UInt32 numPackets = _readNumPackets; + status = AudioFileReadPacketData(_audioFile, NO, &numBytes, NULL, _readCurPacket, &numPackets, _readBuffer); + if (noErr != status) { + SCLogGeneralError(@"Cannot read audio data, error code %d", (int)status); + return; + } + if (0 == numPackets) { + return; + } + CMTime PTS = CMTimeMakeWithSeconds(_readCurPacket / _asbd.mSampleRate, 600); + + _readCurPacket += numPackets; + + CMBlockBufferRef dataBuffer = NULL; + status = CMBlockBufferCreateWithMemoryBlock(NULL, NULL, numBytes, NULL, NULL, 0, numBytes, 0, &dataBuffer); + if (kCMBlockBufferNoErr == status) { + if (dataBuffer) { + CMBlockBufferReplaceDataBytes(_readBuffer, dataBuffer, 0, numBytes); + CMSampleBufferRef sampleBuffer = NULL; + CMAudioSampleBufferCreateWithPacketDescriptions(NULL, dataBuffer, true, NULL, NULL, _formatDescription, + numPackets, PTS, NULL, &sampleBuffer); + if (sampleBuffer) { + [_delegate audioCaptureSession:self didOutputSampleBuffer:sampleBuffer]; + CFRelease(sampleBuffer); + } + CFRelease(dataBuffer); + } + } else { + SCLogGeneralError(@"Cannot create data buffer, error code %d", (int)status); + } + + int32_t sentinelValue = [_sentinel value]; + [_performer perform:^{ + SC_GUARD_ELSE_RETURN([_sentinel value] == sentinelValue); + [self _read]; + } + after:kAudioBufferDurationInSeconds]; +} + +- (void)_teardown +{ + SCAssert([_performer isCurrentPerformer], @""); + + [_sentinel increment]; + + if (_audioFile) { + AudioFileClose(_audioFile); + _audioFile = NULL; + } + _asbd = (AudioStreamBasicDescription){0}; + if (_formatDescription) { + CFRelease(_formatDescription); + _formatDescription = NULL; + } + _readCurPacket = 0; + _readNumPackets = 0; + _readNumBytes = 0; + if (_readBuffer) { + free(_readBuffer); + _readBuffer = NULL; + } +} + +@end diff --git a/ManagedCapturer/SCManagedAudioStreamer.h b/ManagedCapturer/SCManagedAudioStreamer.h new file mode 100644 index 0000000..9157d4a --- /dev/null +++ b/ManagedCapturer/SCManagedAudioStreamer.h @@ -0,0 +1,20 @@ +// +// SCManagedAudioStreamer.h +// Snapchat +// +// Created by Ricardo Sánchez-Sáez on 7/28/16. +// Copyright © 2016 Snapchat, Inc. All rights reserved. +// + +#import + +#import + +@interface SCManagedAudioStreamer : NSObject + ++ (instancetype)sharedInstance; + ++ (instancetype) new NS_UNAVAILABLE; +- (instancetype)init NS_UNAVAILABLE; + +@end diff --git a/ManagedCapturer/SCManagedAudioStreamer.m b/ManagedCapturer/SCManagedAudioStreamer.m new file mode 100644 index 0000000..4055396 --- /dev/null +++ b/ManagedCapturer/SCManagedAudioStreamer.m @@ -0,0 +1,115 @@ +// +// SCManagedAudioStreamer.m +// Snapchat +// +// Created by Ricardo Sánchez-Sáez on 7/28/16. +// Copyright © 2016 Snapchat, Inc. All rights reserved. +// + +#import "SCManagedAudioStreamer.h" + +#import "SCAudioCaptureSession.h" + +#import +#import +#import +#import + +#import +#import + +static char *const kSCManagedAudioStreamerQueueLabel = "com.snapchat.audioStreamerQueue"; + +@interface SCManagedAudioStreamer () + +@end + +@implementation SCManagedAudioStreamer { + SCAudioCaptureSession *_captureSession; + SCAudioConfigurationToken *_audioConfiguration; + SCManagedAudioDataSourceListenerAnnouncer *_announcer; + SCScopedAccess *_scopedMutableAudioSession; +} + +@synthesize performer = _performer; + ++ (instancetype)sharedInstance +{ + static dispatch_once_t onceToken; + static SCManagedAudioStreamer *managedAudioStreamer; + dispatch_once(&onceToken, ^{ + managedAudioStreamer = [[SCManagedAudioStreamer alloc] initSharedInstance]; + }); + return managedAudioStreamer; +} + +- (instancetype)initSharedInstance +{ + SCTraceStart(); + self = [super init]; + if (self) { + _performer = [[SCQueuePerformer alloc] initWithLabel:kSCManagedAudioStreamerQueueLabel + qualityOfService:QOS_CLASS_USER_INTERACTIVE + queueType:DISPATCH_QUEUE_SERIAL + context:SCQueuePerformerContextCamera]; + _announcer = [[SCManagedAudioDataSourceListenerAnnouncer alloc] init]; + _captureSession = [[SCAudioCaptureSession alloc] init]; + _captureSession.delegate = self; + } + return self; +} + +- (BOOL)isStreaming +{ + return _audioConfiguration != nil; +} + +- (void)startStreamingWithAudioConfiguration:(SCAudioConfiguration *)configuration +{ + SCTraceStart(); + [_performer perform:^{ + if (!self.isStreaming) { + // Begin audio recording asynchronously. First we need to have the proper audio session category. + _audioConfiguration = [SCAudioSessionExperimentAdapter + configureWith:configuration + performer:_performer + completion:^(NSError *error) { + [_captureSession + beginAudioRecordingAsynchronouslyWithSampleRate:kSCAudioCaptureSessionDefaultSampleRate + completionHandler:NULL]; + + }]; + } + }]; +} + +- (void)stopStreaming +{ + [_performer perform:^{ + if (self.isStreaming) { + [_captureSession disposeAudioRecordingSynchronouslyWithCompletionHandler:NULL]; + [SCAudioSessionExperimentAdapter relinquishConfiguration:_audioConfiguration performer:nil completion:nil]; + _audioConfiguration = nil; + } + }]; +} + +- (void)addListener:(id)listener +{ + SCTraceStart(); + [_announcer addListener:listener]; +} + +- (void)removeListener:(id)listener +{ + SCTraceStart(); + [_announcer removeListener:listener]; +} + +- (void)audioCaptureSession:(SCAudioCaptureSession *)audioCaptureSession + didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer +{ + [_announcer managedAudioDataSource:self didOutputSampleBuffer:sampleBuffer]; +} + +@end diff --git a/ManagedCapturer/SCManagedCaptureDevice+SCManagedCapturer.h b/ManagedCapturer/SCManagedCaptureDevice+SCManagedCapturer.h new file mode 100644 index 0000000..670e9be --- /dev/null +++ b/ManagedCapturer/SCManagedCaptureDevice+SCManagedCapturer.h @@ -0,0 +1,71 @@ +// +// SCManagedCaptureDevice+SCManagedCapturer.h +// Snapchat +// +// Created by Liu Liu on 5/9/15. +// Copyright (c) 2015 Snapchat, Inc. All rights reserved. +// + +#import "SCManagedCaptureDevice.h" + +#import + +@interface SCManagedCaptureDevice (SCManagedCapturer) + +@property (nonatomic, strong, readonly) AVCaptureDevice *device; + +@property (nonatomic, strong, readonly) AVCaptureDeviceInput *deviceInput; + +@property (nonatomic, copy, readonly) NSError *error; + +@property (nonatomic, assign, readonly) BOOL isConnected; + +@property (nonatomic, strong, readonly) AVCaptureDeviceFormat *activeFormat; + +// Setup and hook up with device + +- (BOOL)setDeviceAsInput:(AVCaptureSession *)session; + +- (void)removeDeviceAsInput:(AVCaptureSession *)session; + +- (void)resetDeviceAsInput; + +// Configurations + +@property (nonatomic, assign) BOOL flashActive; + +@property (nonatomic, assign) BOOL torchActive; + +@property (nonatomic, assign) float zoomFactor; + +@property (nonatomic, assign, readonly) BOOL liveVideoStreamingActive; + +@property (nonatomic, assign, readonly) BOOL isNightModeActive; + +@property (nonatomic, assign, readonly) BOOL isFlashSupported; + +@property (nonatomic, assign, readonly) BOOL isTorchSupported; + +- (void)setNightModeActive:(BOOL)nightModeActive session:(AVCaptureSession *)session; + +- (void)setLiveVideoStreaming:(BOOL)liveVideoStreaming session:(AVCaptureSession *)session; + +- (void)setCaptureDepthData:(BOOL)captureDepthData session:(AVCaptureSession *)session; + +- (void)setExposurePointOfInterest:(CGPoint)pointOfInterest fromUser:(BOOL)fromUser; + +- (void)setAutofocusPointOfInterest:(CGPoint)pointOfInterest; + +- (void)continuousAutofocus; + +- (void)setRecording:(BOOL)recording; + +- (void)updateActiveFormatWithSession:(AVCaptureSession *)session; + +// Utilities + +- (CGPoint)convertViewCoordinates:(CGPoint)viewCoordinates + viewSize:(CGSize)viewSize + videoGravity:(NSString *)videoGravity; + +@end diff --git a/ManagedCapturer/SCManagedCaptureDevice+SCManagedDeviceCapacityAnalyzer.h b/ManagedCapturer/SCManagedCaptureDevice+SCManagedDeviceCapacityAnalyzer.h new file mode 100644 index 0000000..3bd7ccc --- /dev/null +++ b/ManagedCapturer/SCManagedCaptureDevice+SCManagedDeviceCapacityAnalyzer.h @@ -0,0 +1,17 @@ +// +// SCManagedCaptureDevice+SCManagedDeviceCapacityAnalyzer.h +// Snapchat +// +// Created by Kam Sheffield on 10/29/15. +// Copyright © 2015 Snapchat, Inc. All rights reserved. +// + +#import "SCManagedCaptureDevice.h" + +#import + +@interface SCManagedCaptureDevice (SCManagedDeviceCapacityAnalyzer) + +@property (nonatomic, strong, readonly) AVCaptureDevice *device; + +@end diff --git a/ManagedCapturer/SCManagedCaptureDevice.h b/ManagedCapturer/SCManagedCaptureDevice.h new file mode 100644 index 0000000..9562c7f --- /dev/null +++ b/ManagedCapturer/SCManagedCaptureDevice.h @@ -0,0 +1,60 @@ +// +// SCManagedCaptureDevice.h +// Snapchat +// +// Created by Liu Liu on 4/22/15. +// Copyright (c) 2015 Liu Liu. All rights reserved. +// + +#import +#import + +#import +#import + +extern CGFloat const kSCMaxVideoZoomFactor; +extern CGFloat const kSCMinVideoZoomFactor; + +@class SCManagedCaptureDevice; + +@protocol SCManagedCaptureDeviceDelegate + +@optional +- (void)managedCaptureDevice:(SCManagedCaptureDevice *)device didChangeAdjustingExposure:(BOOL)adjustingExposure; +- (void)managedCaptureDevice:(SCManagedCaptureDevice *)device didChangeExposurePoint:(CGPoint)exposurePoint; +- (void)managedCaptureDevice:(SCManagedCaptureDevice *)device didChangeFocusPoint:(CGPoint)focusPoint; + +@end + +@interface SCManagedCaptureDevice : NSObject + +@property (nonatomic, weak) id delegate; + +// These two class methods are thread safe ++ (instancetype)front; + ++ (instancetype)back; + ++ (instancetype)dualCamera; + ++ (instancetype)deviceWithPosition:(SCManagedCaptureDevicePosition)position; + ++ (BOOL)is1080pSupported; + ++ (BOOL)isMixCaptureSupported; + ++ (BOOL)isNightModeSupported; + ++ (BOOL)isEnhancedNightModeSupported; + ++ (CGSize)defaultActiveFormatResolution; + ++ (CGSize)nightModeActiveFormatResolution; + +- (BOOL)softwareZoom; + +- (SCManagedCaptureDevicePosition)position; + +- (BOOL)isAvailable; + +@end diff --git a/ManagedCapturer/SCManagedCaptureDevice.m b/ManagedCapturer/SCManagedCaptureDevice.m new file mode 100644 index 0000000..30e0330 --- /dev/null +++ b/ManagedCapturer/SCManagedCaptureDevice.m @@ -0,0 +1,821 @@ +// +// SCManagedCaptureDevice.m +// Snapchat +// +// Created by Liu Liu on 4/22/15. +// Copyright (c) 2015 Liu Liu. All rights reserved. +// + +#import "SCManagedCaptureDevice.h" + +#import "AVCaptureDevice+ConfigurationLock.h" +#import "SCCameraTweaks.h" +#import "SCCaptureCommon.h" +#import "SCCaptureDeviceResolver.h" +#import "SCManagedCaptureDevice+SCManagedCapturer.h" +#import "SCManagedCaptureDeviceAutoExposureHandler.h" +#import "SCManagedCaptureDeviceAutoFocusHandler.h" +#import "SCManagedCaptureDeviceExposureHandler.h" +#import "SCManagedCaptureDeviceFaceDetectionAutoExposureHandler.h" +#import "SCManagedCaptureDeviceFaceDetectionAutoFocusHandler.h" +#import "SCManagedCaptureDeviceFocusHandler.h" +#import "SCManagedCapturer.h" +#import "SCManagedDeviceCapacityAnalyzer.h" + +#import +#import +#import + +#import + +static int32_t const kSCManagedCaptureDeviceMaximumHighFrameRate = 30; +static int32_t const kSCManagedCaptureDeviceMaximumLowFrameRate = 24; + +static float const kSCManagedCaptureDevicecSoftwareMaxZoomFactor = 8; + +CGFloat const kSCMaxVideoZoomFactor = 100; // the max videoZoomFactor acceptable +CGFloat const kSCMinVideoZoomFactor = 1; + +static NSDictionary *SCBestHRSIFormatsForHeights(NSArray *desiredHeights, NSArray *formats, BOOL shouldSupportDepth) +{ + NSMutableDictionary *bestHRSIHeights = [NSMutableDictionary dictionary]; + for (NSNumber *height in desiredHeights) { + bestHRSIHeights[height] = @0; + } + NSMutableDictionary *bestHRSIFormats = [NSMutableDictionary dictionary]; + for (AVCaptureDeviceFormat *format in formats) { + if (@available(ios 11.0, *)) { + if (shouldSupportDepth && format.supportedDepthDataFormats.count == 0) { + continue; + } + } + if (CMFormatDescriptionGetMediaSubType(format.formatDescription) != + kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) { + continue; + } + CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription); + NSNumber *height = @(dimensions.height); + NSNumber *bestHRSI = bestHRSIHeights[height]; + if (bestHRSI) { + CMVideoDimensions hrsi = format.highResolutionStillImageDimensions; + // If we enabled HSRI, we only intersted in the ones that is good. + if (hrsi.height > [bestHRSI intValue]) { + bestHRSIHeights[height] = @(hrsi.height); + bestHRSIFormats[height] = format; + } + } + } + return [bestHRSIFormats copy]; +} + +static inline float SCDegreesToRadians(float theta) +{ + return theta * (float)M_PI / 180.f; +} + +static inline float SCRadiansToDegrees(float theta) +{ + return theta * 180.f / (float)M_PI; +} + +@implementation SCManagedCaptureDevice { + AVCaptureDevice *_device; + AVCaptureDeviceInput *_deviceInput; + AVCaptureDeviceFormat *_defaultFormat; + AVCaptureDeviceFormat *_nightFormat; + AVCaptureDeviceFormat *_liveVideoStreamingFormat; + SCManagedCaptureDevicePosition _devicePosition; + + // Configurations on the device, shortcut to avoid re-configurations + id _exposureHandler; + id _focusHandler; + + FBKVOController *_observeController; + + // For the private category methods + NSError *_error; + BOOL _softwareZoom; + BOOL _isConnected; + BOOL _flashActive; + BOOL _torchActive; + BOOL _liveVideoStreamingActive; + float _zoomFactor; + BOOL _isNightModeActive; + BOOL _captureDepthData; +} +@synthesize fieldOfView = _fieldOfView; + ++ (instancetype)front +{ + SCTraceStart(); + static dispatch_once_t onceToken; + static SCManagedCaptureDevice *front; + static dispatch_semaphore_t semaphore; + dispatch_once(&onceToken, ^{ + semaphore = dispatch_semaphore_create(1); + }); + /* You can use the tweak below to intentionally kill camera in debug. + if (SCIsDebugBuild() && SCCameraTweaksKillFrontCamera()) { + return nil; + } + */ + dispatch_semaphore_wait(semaphore, DISPATCH_TIME_FOREVER); + if (!front) { + AVCaptureDevice *device = + [[SCCaptureDeviceResolver sharedInstance] findAVCaptureDevice:AVCaptureDevicePositionFront]; + if (device) { + front = [[SCManagedCaptureDevice alloc] initWithDevice:device + devicePosition:SCManagedCaptureDevicePositionFront]; + } + } + dispatch_semaphore_signal(semaphore); + return front; +} + ++ (instancetype)back +{ + SCTraceStart(); + static dispatch_once_t onceToken; + static SCManagedCaptureDevice *back; + static dispatch_semaphore_t semaphore; + dispatch_once(&onceToken, ^{ + semaphore = dispatch_semaphore_create(1); + }); + /* You can use the tweak below to intentionally kill camera in debug. + if (SCIsDebugBuild() && SCCameraTweaksKillBackCamera()) { + return nil; + } + */ + dispatch_semaphore_wait(semaphore, DISPATCH_TIME_FOREVER); + if (!back) { + AVCaptureDevice *device = + [[SCCaptureDeviceResolver sharedInstance] findAVCaptureDevice:AVCaptureDevicePositionBack]; + if (device) { + back = [[SCManagedCaptureDevice alloc] initWithDevice:device + devicePosition:SCManagedCaptureDevicePositionBack]; + } + } + dispatch_semaphore_signal(semaphore); + return back; +} + ++ (SCManagedCaptureDevice *)dualCamera +{ + SCTraceStart(); + static dispatch_once_t onceToken; + static SCManagedCaptureDevice *dualCamera; + static dispatch_semaphore_t semaphore; + dispatch_once(&onceToken, ^{ + semaphore = dispatch_semaphore_create(1); + }); + dispatch_semaphore_wait(semaphore, DISPATCH_TIME_FOREVER); + if (!dualCamera) { + AVCaptureDevice *device = [[SCCaptureDeviceResolver sharedInstance] findDualCamera]; + if (device) { + dualCamera = [[SCManagedCaptureDevice alloc] initWithDevice:device + devicePosition:SCManagedCaptureDevicePositionBackDualCamera]; + } + } + dispatch_semaphore_signal(semaphore); + return dualCamera; +} + ++ (instancetype)deviceWithPosition:(SCManagedCaptureDevicePosition)position +{ + switch (position) { + case SCManagedCaptureDevicePositionFront: + return [self front]; + case SCManagedCaptureDevicePositionBack: + return [self back]; + case SCManagedCaptureDevicePositionBackDualCamera: + return [self dualCamera]; + } +} + ++ (BOOL)is1080pSupported +{ + return [SCDeviceName isIphone] && [SCDeviceName isSimilarToIphone6SorNewer]; +} + ++ (BOOL)isMixCaptureSupported +{ + return !![self front] && !![self back]; +} + ++ (BOOL)isNightModeSupported +{ + return [SCDeviceName isIphone] && [SCDeviceName isSimilarToIphone6orNewer]; +} + ++ (BOOL)isEnhancedNightModeSupported +{ + if (SC_AT_LEAST_IOS_11) { + return [SCDeviceName isIphone] && [SCDeviceName isSimilarToIphone6SorNewer]; + } + return NO; +} + ++ (CGSize)defaultActiveFormatResolution +{ + if ([SCDeviceName isIphoneX]) { + return CGSizeMake(kSCManagedCapturerVideoActiveFormatWidth1080p, + kSCManagedCapturerVideoActiveFormatHeight1080p); + } + return CGSizeMake(kSCManagedCapturerDefaultVideoActiveFormatWidth, + kSCManagedCapturerDefaultVideoActiveFormatHeight); +} + ++ (CGSize)nightModeActiveFormatResolution +{ + if ([SCManagedCaptureDevice isEnhancedNightModeSupported]) { + return CGSizeMake(kSCManagedCapturerNightVideoHighResActiveFormatWidth, + kSCManagedCapturerNightVideoHighResActiveFormatHeight); + } + return CGSizeMake(kSCManagedCapturerNightVideoDefaultResActiveFormatWidth, + kSCManagedCapturerNightVideoDefaultResActiveFormatHeight); +} + +- (instancetype)initWithDevice:(AVCaptureDevice *)device devicePosition:(SCManagedCaptureDevicePosition)devicePosition +{ + SCTraceStart(); + self = [super init]; + if (self) { + _device = device; + _devicePosition = devicePosition; + + if (SCCameraTweaksEnableFaceDetectionFocus(devicePosition)) { + _exposureHandler = [[SCManagedCaptureDeviceFaceDetectionAutoExposureHandler alloc] + initWithDevice:device + pointOfInterest:CGPointMake(0.5, 0.5) + managedCapturer:[SCManagedCapturer sharedInstance]]; + _focusHandler = [[SCManagedCaptureDeviceFaceDetectionAutoFocusHandler alloc] + initWithDevice:device + pointOfInterest:CGPointMake(0.5, 0.5) + managedCapturer:[SCManagedCapturer sharedInstance]]; + } else { + _exposureHandler = [[SCManagedCaptureDeviceAutoExposureHandler alloc] initWithDevice:device + pointOfInterest:CGPointMake(0.5, 0.5)]; + _focusHandler = [[SCManagedCaptureDeviceAutoFocusHandler alloc] initWithDevice:device + pointOfInterest:CGPointMake(0.5, 0.5)]; + } + _observeController = [[FBKVOController alloc] initWithObserver:self]; + [self _setAsExposureListenerForDevice:device]; + if (SCCameraTweaksEnableExposurePointObservation()) { + [self _observeExposurePointForDevice:device]; + } + if (SCCameraTweaksEnableFocusPointObservation()) { + [self _observeFocusPointForDevice:device]; + } + + _zoomFactor = 1.0; + [self _findSupportedFormats]; + } + return self; +} + +- (SCManagedCaptureDevicePosition)position +{ + return _devicePosition; +} + +#pragma mark - Setup and hook up with device + +- (BOOL)setDeviceAsInput:(AVCaptureSession *)session +{ + SCTraceStart(); + AVCaptureDeviceInput *deviceInput = [self deviceInput]; + if ([session canAddInput:deviceInput]) { + [session addInput:deviceInput]; + } else { + NSString *previousSessionPreset = session.sessionPreset; + session.sessionPreset = AVCaptureSessionPresetInputPriority; + // Now we surely can add input + if ([session canAddInput:deviceInput]) { + [session addInput:deviceInput]; + } else { + session.sessionPreset = previousSessionPreset; + return NO; + } + } + + [self _enableSubjectAreaChangeMonitoring]; + + [self _updateActiveFormatWithSession:session fallbackPreset:AVCaptureSessionPreset640x480]; + if (_device.activeFormat.videoMaxZoomFactor < 1 + 1e-5) { + _softwareZoom = YES; + } else { + _softwareZoom = NO; + if (_device.videoZoomFactor != _zoomFactor) { + // Reset the zoom factor + [self setZoomFactor:_zoomFactor]; + } + } + + [_exposureHandler setVisible:YES]; + [_focusHandler setVisible:YES]; + + _isConnected = YES; + + return YES; +} + +- (void)removeDeviceAsInput:(AVCaptureSession *)session +{ + SCTraceStart(); + if (_isConnected) { + [session removeInput:_deviceInput]; + [_exposureHandler setVisible:NO]; + [_focusHandler setVisible:NO]; + _isConnected = NO; + } +} + +- (void)resetDeviceAsInput +{ + _deviceInput = nil; + AVCaptureDevice *deviceFound; + switch (_devicePosition) { + case SCManagedCaptureDevicePositionFront: + deviceFound = [[SCCaptureDeviceResolver sharedInstance] findAVCaptureDevice:AVCaptureDevicePositionFront]; + break; + case SCManagedCaptureDevicePositionBack: + deviceFound = [[SCCaptureDeviceResolver sharedInstance] findAVCaptureDevice:AVCaptureDevicePositionBack]; + break; + case SCManagedCaptureDevicePositionBackDualCamera: + deviceFound = [[SCCaptureDeviceResolver sharedInstance] findDualCamera]; + break; + } + if (deviceFound) { + _device = deviceFound; + } +} + +#pragma mark - Configurations + +- (void)_findSupportedFormats +{ + NSInteger defaultHeight = [SCManagedCaptureDevice defaultActiveFormatResolution].height; + NSInteger nightHeight = [SCManagedCaptureDevice nightModeActiveFormatResolution].height; + NSInteger liveVideoStreamingHeight = kSCManagedCapturerLiveStreamingVideoActiveFormatHeight; + NSArray *heights = @[ @(nightHeight), @(defaultHeight), @(liveVideoStreamingHeight) ]; + BOOL formatsShouldSupportDepth = _devicePosition == SCManagedCaptureDevicePositionBackDualCamera; + NSDictionary *formats = SCBestHRSIFormatsForHeights(heights, _device.formats, formatsShouldSupportDepth); + _nightFormat = formats[@(nightHeight)]; + _defaultFormat = formats[@(defaultHeight)]; + _liveVideoStreamingFormat = formats[@(liveVideoStreamingHeight)]; +} + +- (AVCaptureDeviceFormat *)_bestSupportedFormat +{ + if (_isNightModeActive) { + return _nightFormat; + } + if (_liveVideoStreamingActive) { + return _liveVideoStreamingFormat; + } + return _defaultFormat; +} + +- (void)setNightModeActive:(BOOL)nightModeActive session:(AVCaptureSession *)session +{ + SCTraceStart(); + if (![SCManagedCaptureDevice isNightModeSupported]) { + return; + } + if (_isNightModeActive == nightModeActive) { + return; + } + _isNightModeActive = nightModeActive; + [self updateActiveFormatWithSession:session]; +} + +- (void)setLiveVideoStreaming:(BOOL)liveVideoStreaming session:(AVCaptureSession *)session +{ + SCTraceStart(); + if (_liveVideoStreamingActive == liveVideoStreaming) { + return; + } + _liveVideoStreamingActive = liveVideoStreaming; + [self updateActiveFormatWithSession:session]; +} + +- (void)setCaptureDepthData:(BOOL)captureDepthData session:(AVCaptureSession *)session +{ + SCTraceStart(); + _captureDepthData = captureDepthData; + [self _findSupportedFormats]; + [self updateActiveFormatWithSession:session]; +} + +- (void)updateActiveFormatWithSession:(AVCaptureSession *)session +{ + [self _updateActiveFormatWithSession:session fallbackPreset:AVCaptureSessionPreset640x480]; + if (_device.videoZoomFactor != _zoomFactor) { + [self setZoomFactor:_zoomFactor]; + } +} + +- (void)_updateActiveFormatWithSession:(AVCaptureSession *)session fallbackPreset:(NSString *)fallbackPreset +{ + AVCaptureDeviceFormat *nextFormat = [self _bestSupportedFormat]; + if (nextFormat && [session canSetSessionPreset:AVCaptureSessionPresetInputPriority]) { + session.sessionPreset = AVCaptureSessionPresetInputPriority; + if (nextFormat == _device.activeFormat) { + // Need to reconfigure frame rate though active format unchanged + [_device runTask:@"update frame rate" + withLockedConfiguration:^() { + [self _updateDeviceFrameRate]; + }]; + } else { + [_device runTask:@"update active format" + withLockedConfiguration:^() { + _device.activeFormat = nextFormat; + [self _updateDeviceFrameRate]; + }]; + } + } else { + session.sessionPreset = fallbackPreset; + } + [self _updateFieldOfView]; +} + +- (void)_updateDeviceFrameRate +{ + int32_t deviceFrameRate; + if (_liveVideoStreamingActive) { + deviceFrameRate = kSCManagedCaptureDeviceMaximumLowFrameRate; + } else { + deviceFrameRate = kSCManagedCaptureDeviceMaximumHighFrameRate; + } + CMTime frameDuration = CMTimeMake(1, deviceFrameRate); + if (@available(ios 11.0, *)) { + if (_captureDepthData) { + // Sync the video frame rate to the max depth frame rate (24 fps) + if (_device.activeDepthDataFormat.videoSupportedFrameRateRanges.firstObject) { + frameDuration = + _device.activeDepthDataFormat.videoSupportedFrameRateRanges.firstObject.minFrameDuration; + } + } + } + _device.activeVideoMaxFrameDuration = frameDuration; + _device.activeVideoMinFrameDuration = frameDuration; + if (_device.lowLightBoostSupported) { + _device.automaticallyEnablesLowLightBoostWhenAvailable = YES; + } +} + +- (void)setZoomFactor:(float)zoomFactor +{ + SCTraceStart(); + if (_softwareZoom) { + // Just remember the software zoom scale + if (zoomFactor <= kSCManagedCaptureDevicecSoftwareMaxZoomFactor && zoomFactor >= 1) { + _zoomFactor = zoomFactor; + } + } else { + [_device runTask:@"set zoom factor" + withLockedConfiguration:^() { + if (zoomFactor <= _device.activeFormat.videoMaxZoomFactor && zoomFactor >= 1) { + _zoomFactor = zoomFactor; + if (_device.videoZoomFactor != _zoomFactor) { + _device.videoZoomFactor = _zoomFactor; + } + } + }]; + } + [self _updateFieldOfView]; +} + +- (void)_updateFieldOfView +{ + float fieldOfView = _device.activeFormat.videoFieldOfView; + if (_zoomFactor > 1.f) { + // Adjust the field of view to take the zoom factor into account. + // Note: this assumes the zoom factor linearly affects the focal length. + fieldOfView = 2.f * SCRadiansToDegrees(atanf(tanf(SCDegreesToRadians(0.5f * fieldOfView)) / _zoomFactor)); + } + self.fieldOfView = fieldOfView; +} + +- (void)setExposurePointOfInterest:(CGPoint)pointOfInterest fromUser:(BOOL)fromUser +{ + [_exposureHandler setExposurePointOfInterest:pointOfInterest fromUser:fromUser]; +} + +// called when user taps on a point on screen, to re-adjust camera focus onto that tapped spot. +// this re-adjustment is always necessary, regardless of scenarios (recording video, taking photo, etc), +// therefore we don't have to check _focusLock in this method. +- (void)setAutofocusPointOfInterest:(CGPoint)pointOfInterest +{ + SCTraceStart(); + [_focusHandler setAutofocusPointOfInterest:pointOfInterest]; +} + +- (void)continuousAutofocus +{ + SCTraceStart(); + [_focusHandler continuousAutofocus]; +} + +- (void)setRecording:(BOOL)recording +{ + if (SCCameraTweaksSmoothAutoFocusWhileRecording() && [_device isSmoothAutoFocusSupported]) { + [self _setSmoothFocus:recording]; + } else { + [self _setFocusLock:recording]; + } + [_exposureHandler setStableExposure:recording]; +} + +- (void)_setFocusLock:(BOOL)focusLock +{ + SCTraceStart(); + [_focusHandler setFocusLock:focusLock]; +} + +- (void)_setSmoothFocus:(BOOL)smoothFocus +{ + SCTraceStart(); + [_focusHandler setSmoothFocus:smoothFocus]; +} + +- (void)setFlashActive:(BOOL)flashActive +{ + SCTraceStart(); + if (_flashActive != flashActive) { + if ([_device hasFlash]) { +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wdeprecated-declarations" + if (flashActive && [_device isFlashModeSupported:AVCaptureFlashModeOn]) { + [_device runTask:@"set flash active" + withLockedConfiguration:^() { + _device.flashMode = AVCaptureFlashModeOn; + }]; + } else if (!flashActive && [_device isFlashModeSupported:AVCaptureFlashModeOff]) { + [_device runTask:@"set flash off" + withLockedConfiguration:^() { + _device.flashMode = AVCaptureFlashModeOff; + }]; + } +#pragma clang diagnostic pop + _flashActive = flashActive; + } else { + _flashActive = NO; + } + } +} + +- (void)setTorchActive:(BOOL)torchActive +{ + SCTraceStart(); + if (_torchActive != torchActive) { + if ([_device hasTorch]) { + if (torchActive && [_device isTorchModeSupported:AVCaptureTorchModeOn]) { + [_device runTask:@"set torch active" + withLockedConfiguration:^() { + [_device setTorchMode:AVCaptureTorchModeOn]; + }]; + } else if (!torchActive && [_device isTorchModeSupported:AVCaptureTorchModeOff]) { + [_device runTask:@"set torch off" + withLockedConfiguration:^() { + _device.torchMode = AVCaptureTorchModeOff; + }]; + } + _torchActive = torchActive; + } else { + _torchActive = NO; + } + } +} + +#pragma mark - Utilities + +- (BOOL)isFlashSupported +{ + return _device.hasFlash; +} + +- (BOOL)isTorchSupported +{ + return _device.hasTorch; +} + +- (CGPoint)convertViewCoordinates:(CGPoint)viewCoordinates + viewSize:(CGSize)viewSize + videoGravity:(NSString *)videoGravity +{ + SCTraceStart(); + CGPoint pointOfInterest = CGPointMake(.5f, .5f); + CGRect cleanAperture; + AVCaptureDeviceInput *deviceInput = [self deviceInput]; + NSArray *ports = [deviceInput.ports copy]; + if ([videoGravity isEqualToString:AVLayerVideoGravityResize]) { + // Scale, switch x and y, and reverse x + return CGPointMake(viewCoordinates.y / viewSize.height, 1.f - (viewCoordinates.x / viewSize.width)); + } + for (AVCaptureInputPort *port in ports) { + if ([port mediaType] == AVMediaTypeVideo && port.formatDescription) { + cleanAperture = CMVideoFormatDescriptionGetCleanAperture(port.formatDescription, YES); + CGSize apertureSize = cleanAperture.size; + CGPoint point = viewCoordinates; + CGFloat apertureRatio = apertureSize.height / apertureSize.width; + CGFloat viewRatio = viewSize.width / viewSize.height; + CGFloat xc = .5f; + CGFloat yc = .5f; + if ([videoGravity isEqualToString:AVLayerVideoGravityResizeAspect]) { + if (viewRatio > apertureRatio) { + CGFloat y2 = viewSize.height; + CGFloat x2 = viewSize.height * apertureRatio; + CGFloat x1 = viewSize.width; + CGFloat blackBar = (x1 - x2) / 2; + // If point is inside letterboxed area, do coordinate conversion; otherwise, don't change the + // default value returned (.5,.5) + if (point.x >= blackBar && point.x <= blackBar + x2) { + // Scale (accounting for the letterboxing on the left and right of the video preview), + // switch x and y, and reverse x + xc = point.y / y2; + yc = 1.f - ((point.x - blackBar) / x2); + } + } else { + CGFloat y2 = viewSize.width / apertureRatio; + CGFloat y1 = viewSize.height; + CGFloat x2 = viewSize.width; + CGFloat blackBar = (y1 - y2) / 2; + // If point is inside letterboxed area, do coordinate conversion. Otherwise, don't change the + // default value returned (.5,.5) + if (point.y >= blackBar && point.y <= blackBar + y2) { + // Scale (accounting for the letterboxing on the top and bottom of the video preview), + // switch x and y, and reverse x + xc = ((point.y - blackBar) / y2); + yc = 1.f - (point.x / x2); + } + } + } else if ([videoGravity isEqualToString:AVLayerVideoGravityResizeAspectFill]) { + // Scale, switch x and y, and reverse x + if (viewRatio > apertureRatio) { + CGFloat y2 = apertureSize.width * (viewSize.width / apertureSize.height); + xc = (point.y + ((y2 - viewSize.height) / 2.f)) / y2; // Account for cropped height + yc = (viewSize.width - point.x) / viewSize.width; + } else { + CGFloat x2 = apertureSize.height * (viewSize.height / apertureSize.width); + yc = 1.f - ((point.x + ((x2 - viewSize.width) / 2)) / x2); // Account for cropped width + xc = point.y / viewSize.height; + } + } + pointOfInterest = CGPointMake(xc, yc); + break; + } + } + return pointOfInterest; +} + +#pragma mark - SCManagedCapturer friendly methods + +- (AVCaptureDevice *)device +{ + return _device; +} + +- (AVCaptureDeviceInput *)deviceInput +{ + SCTraceStart(); + if (!_deviceInput) { + NSError *error = nil; + _deviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:_device error:&error]; + if (!_deviceInput) { + _error = [error copy]; + } + } + return _deviceInput; +} + +- (NSError *)error +{ + return _error; +} + +- (BOOL)softwareZoom +{ + return _softwareZoom; +} + +- (BOOL)isConnected +{ + return _isConnected; +} + +- (BOOL)flashActive +{ + return _flashActive; +} + +- (BOOL)torchActive +{ + return _torchActive; +} + +- (float)zoomFactor +{ + return _zoomFactor; +} + +- (BOOL)isNightModeActive +{ + return _isNightModeActive; +} + +- (BOOL)liveVideoStreamingActive +{ + return _liveVideoStreamingActive; +} + +- (BOOL)isAvailable +{ + return [_device isConnected]; +} + +#pragma mark - Private methods + +- (void)_enableSubjectAreaChangeMonitoring +{ + SCTraceStart(); + [_device runTask:@"enable SubjectAreaChangeMonitoring" + withLockedConfiguration:^() { + _device.subjectAreaChangeMonitoringEnabled = YES; + }]; +} + +- (AVCaptureDeviceFormat *)activeFormat +{ + return _device.activeFormat; +} + +#pragma mark - Observe -adjustingExposure +- (void)_setAsExposureListenerForDevice:(AVCaptureDevice *)device +{ + SCTraceStart(); + SCLogCoreCameraInfo(@"Set exposure adjustment KVO for device: %ld", (long)device.position); + [_observeController observe:device + keyPath:@keypath(device, adjustingExposure) + options:NSKeyValueObservingOptionNew + action:@selector(_adjustingExposureChanged:)]; +} + +- (void)_adjustingExposureChanged:(NSDictionary *)change +{ + SCTraceStart(); + BOOL adjustingExposure = [change[NSKeyValueChangeNewKey] boolValue]; + SCLogCoreCameraInfo(@"KVO exposure changed to %d", adjustingExposure); + if ([self.delegate respondsToSelector:@selector(managedCaptureDevice:didChangeAdjustingExposure:)]) { + [self.delegate managedCaptureDevice:self didChangeAdjustingExposure:adjustingExposure]; + } +} + +#pragma mark - Observe -exposurePointOfInterest +- (void)_observeExposurePointForDevice:(AVCaptureDevice *)device +{ + SCTraceStart(); + SCLogCoreCameraInfo(@"Set exposure point KVO for device: %ld", (long)device.position); + [_observeController observe:device + keyPath:@keypath(device, exposurePointOfInterest) + options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew + action:@selector(_exposurePointOfInterestChanged:)]; +} + +- (void)_exposurePointOfInterestChanged:(NSDictionary *)change +{ + SCTraceStart(); + CGPoint exposurePoint = [change[NSKeyValueChangeNewKey] CGPointValue]; + SCLogCoreCameraInfo(@"KVO exposure point changed to %@", NSStringFromCGPoint(exposurePoint)); + if ([self.delegate respondsToSelector:@selector(managedCaptureDevice:didChangeExposurePoint:)]) { + [self.delegate managedCaptureDevice:self didChangeExposurePoint:exposurePoint]; + } +} + +#pragma mark - Observe -focusPointOfInterest +- (void)_observeFocusPointForDevice:(AVCaptureDevice *)device +{ + SCTraceStart(); + SCLogCoreCameraInfo(@"Set focus point KVO for device: %ld", (long)device.position); + [_observeController observe:device + keyPath:@keypath(device, focusPointOfInterest) + options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew + action:@selector(_focusPointOfInterestChanged:)]; +} + +- (void)_focusPointOfInterestChanged:(NSDictionary *)change +{ + SCTraceStart(); + CGPoint focusPoint = [change[NSKeyValueChangeNewKey] CGPointValue]; + SCLogCoreCameraInfo(@"KVO focus point changed to %@", NSStringFromCGPoint(focusPoint)); + if ([self.delegate respondsToSelector:@selector(managedCaptureDevice:didChangeFocusPoint:)]) { + [self.delegate managedCaptureDevice:self didChangeFocusPoint:focusPoint]; + } +} + +- (void)dealloc +{ + [_observeController unobserveAll]; +} + +@end diff --git a/ManagedCapturer/SCManagedCaptureDeviceAutoExposureHandler.h b/ManagedCapturer/SCManagedCaptureDeviceAutoExposureHandler.h new file mode 100644 index 0000000..1ee516f --- /dev/null +++ b/ManagedCapturer/SCManagedCaptureDeviceAutoExposureHandler.h @@ -0,0 +1,17 @@ +// +// SCManagedCaptureDeviceAutoExposureHandler.h +// Snapchat +// +// Created by Derek Peirce on 3/21/17. +// Copyright © 2017 Snapchat, Inc. All rights reserved. +// + +#import "SCManagedCaptureDeviceExposureHandler.h" + +#import + +@interface SCManagedCaptureDeviceAutoExposureHandler : NSObject + +- (instancetype)initWithDevice:(AVCaptureDevice *)device pointOfInterest:(CGPoint)pointOfInterest; + +@end diff --git a/ManagedCapturer/SCManagedCaptureDeviceAutoExposureHandler.m b/ManagedCapturer/SCManagedCaptureDeviceAutoExposureHandler.m new file mode 100644 index 0000000..179115b --- /dev/null +++ b/ManagedCapturer/SCManagedCaptureDeviceAutoExposureHandler.m @@ -0,0 +1,63 @@ +// +// SCManagedCaptureDeviceAutoExposureHandler.m +// Snapchat +// +// Created by Derek Peirce on 3/21/17. +// Copyright © 2017 Snapchat, Inc. All rights reserved. +// + +#import "SCManagedCaptureDeviceAutoExposureHandler.h" + +#import "AVCaptureDevice+ConfigurationLock.h" +#import "SCManagedCaptureDeviceExposureHandler.h" + +#import + +@import AVFoundation; + +@implementation SCManagedCaptureDeviceAutoExposureHandler { + CGPoint _exposurePointOfInterest; + AVCaptureDevice *_device; +} + +- (instancetype)initWithDevice:(AVCaptureDevice *)device pointOfInterest:(CGPoint)pointOfInterest +{ + if (self = [super init]) { + _device = device; + _exposurePointOfInterest = pointOfInterest; + } + return self; +} + +- (CGPoint)getExposurePointOfInterest +{ + return _exposurePointOfInterest; +} + +- (void)setExposurePointOfInterest:(CGPoint)pointOfInterest fromUser:(BOOL)fromUser +{ + SCTraceStart(); + if (!CGPointEqualToPoint(pointOfInterest, _exposurePointOfInterest)) { + if ([_device isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure] && + [_device isExposurePointOfInterestSupported]) { + [_device runTask:@"set exposure" + withLockedConfiguration:^() { + // Set exposure point before changing focus mode + // Be noticed that order does matter + _device.exposurePointOfInterest = pointOfInterest; + _device.exposureMode = AVCaptureExposureModeContinuousAutoExposure; + }]; + } + _exposurePointOfInterest = pointOfInterest; + } +} + +- (void)setStableExposure:(BOOL)stableExposure +{ +} + +- (void)setVisible:(BOOL)visible +{ +} + +@end diff --git a/ManagedCapturer/SCManagedCaptureDeviceAutoFocusHandler.h b/ManagedCapturer/SCManagedCaptureDeviceAutoFocusHandler.h new file mode 100644 index 0000000..df7e2f6 --- /dev/null +++ b/ManagedCapturer/SCManagedCaptureDeviceAutoFocusHandler.h @@ -0,0 +1,18 @@ +// +// SCManagedCaptureDeviceAutoFocusHandler.h +// Snapchat +// +// Created by Jiyang Zhu on 3/7/18. +// Copyright © 2018 Snapchat, Inc. All rights reserved. +// +// This class is used to adjust focus related parameters of camera, including focus mode and focus point. + +#import "SCManagedCaptureDeviceFocusHandler.h" + +#import + +@interface SCManagedCaptureDeviceAutoFocusHandler : NSObject + +- (instancetype)initWithDevice:(AVCaptureDevice *)device pointOfInterest:(CGPoint)pointOfInterest; + +@end diff --git a/ManagedCapturer/SCManagedCaptureDeviceAutoFocusHandler.m b/ManagedCapturer/SCManagedCaptureDeviceAutoFocusHandler.m new file mode 100644 index 0000000..ab2738c --- /dev/null +++ b/ManagedCapturer/SCManagedCaptureDeviceAutoFocusHandler.m @@ -0,0 +1,131 @@ +// +// SCManagedCaptureDeviceAutoFocusHandler.m +// Snapchat +// +// Created by Jiyang Zhu on 3/7/18. +// Copyright © 2018 Snapchat, Inc. All rights reserved. +// + +#import "SCManagedCaptureDeviceAutoFocusHandler.h" + +#import "AVCaptureDevice+ConfigurationLock.h" + +#import +#import + +@import CoreGraphics; + +@interface SCManagedCaptureDeviceAutoFocusHandler () + +@property (nonatomic, assign) CGPoint focusPointOfInterest; +@property (nonatomic, strong) AVCaptureDevice *device; + +@property (nonatomic, assign) BOOL isContinuousAutofocus; +@property (nonatomic, assign) BOOL isFocusLock; + +@end + +@implementation SCManagedCaptureDeviceAutoFocusHandler + +- (instancetype)initWithDevice:(AVCaptureDevice *)device pointOfInterest:(CGPoint)pointOfInterest +{ + if (self = [super init]) { + _device = device; + _focusPointOfInterest = pointOfInterest; + _isContinuousAutofocus = YES; + _isFocusLock = NO; + } + return self; +} + +- (CGPoint)getFocusPointOfInterest +{ + return self.focusPointOfInterest; +} + +// called when user taps on a point on screen, to re-adjust camera focus onto that tapped spot. +// this re-adjustment is always necessary, regardless of scenarios (recording video, taking photo, etc), +// therefore we don't have to check self.isFocusLock in this method. +- (void)setAutofocusPointOfInterest:(CGPoint)pointOfInterest +{ + SCTraceODPCompatibleStart(2); + SC_GUARD_ELSE_RETURN(!CGPointEqualToPoint(pointOfInterest, self.focusPointOfInterest) || self.isContinuousAutofocus) + // Do the setup immediately if the focus lock is off. + if ([self.device isFocusModeSupported:AVCaptureFocusModeAutoFocus] && + [self.device isFocusPointOfInterestSupported]) { + [self.device runTask:@"set autofocus" + withLockedConfiguration:^() { + // Set focus point before changing focus mode + // Be noticed that order does matter + self.device.focusPointOfInterest = pointOfInterest; + self.device.focusMode = AVCaptureFocusModeAutoFocus; + }]; + } + self.focusPointOfInterest = pointOfInterest; + self.isContinuousAutofocus = NO; +} + +- (void)continuousAutofocus +{ + SCTraceODPCompatibleStart(2); + SC_GUARD_ELSE_RETURN(!self.isContinuousAutofocus); + if (!self.isFocusLock) { + // Do the setup immediately if the focus lock is off. + if ([self.device isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus] && + [self.device isFocusPointOfInterestSupported]) { + [self.device runTask:@"set continuous autofocus" + withLockedConfiguration:^() { + // Set focus point before changing focus mode + // Be noticed that order does matter + self.device.focusPointOfInterest = CGPointMake(0.5, 0.5); + self.device.focusMode = AVCaptureFocusModeContinuousAutoFocus; + }]; + } + } + self.focusPointOfInterest = CGPointMake(0.5, 0.5); + self.isContinuousAutofocus = YES; +} + +- (void)setFocusLock:(BOOL)focusLock +{ + SCTraceODPCompatibleStart(2); + SC_GUARD_ELSE_RETURN(self.isFocusLock != focusLock); + // This is the old lock, we only do focus lock on back camera + if (focusLock) { + if ([self.device isFocusModeSupported:AVCaptureFocusModeLocked]) { + [self.device runTask:@"set focus lock on" + withLockedConfiguration:^() { + self.device.focusMode = AVCaptureFocusModeLocked; + }]; + } + } else { + // Restore to previous autofocus configurations + if ([self.device isFocusModeSupported:(self.isContinuousAutofocus ? AVCaptureFocusModeContinuousAutoFocus + : AVCaptureFocusModeAutoFocus)] && + [self.device isFocusPointOfInterestSupported]) { + [self.device runTask:@"set focus lock on" + withLockedConfiguration:^() { + self.device.focusPointOfInterest = self.focusPointOfInterest; + self.device.focusMode = self.isContinuousAutofocus ? AVCaptureFocusModeContinuousAutoFocus + : AVCaptureFocusModeAutoFocus; + }]; + } + } + self.isFocusLock = focusLock; +} + +- (void)setSmoothFocus:(BOOL)smoothFocus +{ + SCTraceODPCompatibleStart(2); + SC_GUARD_ELSE_RETURN(smoothFocus != self.device.smoothAutoFocusEnabled); + [self.device runTask:@"set smooth autofocus" + withLockedConfiguration:^() { + [self.device setSmoothAutoFocusEnabled:smoothFocus]; + }]; +} + +- (void)setVisible:(BOOL)visible +{ +} + +@end diff --git a/ManagedCapturer/SCManagedCaptureDeviceDefaultZoomHandler.h b/ManagedCapturer/SCManagedCaptureDeviceDefaultZoomHandler.h new file mode 100644 index 0000000..93b5409 --- /dev/null +++ b/ManagedCapturer/SCManagedCaptureDeviceDefaultZoomHandler.h @@ -0,0 +1,25 @@ +// +// SCManagedCaptureDeviceDefaultZoomHandler.h +// Snapchat +// +// Created by Yu-Kuan Lai on 4/12/17. +// Copyright © 2017 Snapchat, Inc. All rights reserved. +// + +#import + +#import +#import + +@class SCManagedCaptureDevice; +@class SCCaptureResource; + +@interface SCManagedCaptureDeviceDefaultZoomHandler : NSObject + +SC_INIT_AND_NEW_UNAVAILABLE +- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource; + +- (void)setZoomFactor:(CGFloat)zoomFactor forDevice:(SCManagedCaptureDevice *)device immediately:(BOOL)immediately; +- (void)softwareZoomWithDevice:(SCManagedCaptureDevice *)device; + +@end diff --git a/ManagedCapturer/SCManagedCaptureDeviceDefaultZoomHandler.m b/ManagedCapturer/SCManagedCaptureDeviceDefaultZoomHandler.m new file mode 100644 index 0000000..38b9876 --- /dev/null +++ b/ManagedCapturer/SCManagedCaptureDeviceDefaultZoomHandler.m @@ -0,0 +1,93 @@ +// +// SCManagedCaptureDeviceDefaultZoomHandler.m +// Snapchat +// +// Created by Yu-Kuan Lai on 4/12/17. +// Copyright © 2017 Snapchat, Inc. All rights reserved. +// + +#import "SCManagedCaptureDeviceDefaultZoomHandler_Private.h" + +#import "SCCaptureResource.h" +#import "SCManagedCaptureDevice+SCManagedCapturer.h" +#import "SCManagedCapturer.h" +#import "SCManagedCapturerLogging.h" +#import "SCManagedCapturerStateBuilder.h" +#import "SCMetalUtils.h" + +#import +#import +#import +#import + +@implementation SCManagedCaptureDeviceDefaultZoomHandler + +- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource +{ + self = [super init]; + if (self) { + _captureResource = captureResource; + } + + return self; +} + +- (void)setZoomFactor:(CGFloat)zoomFactor forDevice:(SCManagedCaptureDevice *)device immediately:(BOOL)immediately +{ + [self _setZoomFactor:zoomFactor forManagedCaptureDevice:device]; +} + +- (void)softwareZoomWithDevice:(SCManagedCaptureDevice *)device +{ + SCTraceODPCompatibleStart(2); + SCAssert([_captureResource.queuePerformer isCurrentPerformer] || + [[SCQueuePerformer mainQueuePerformer] isCurrentPerformer], + @""); + SCAssert(device.softwareZoom, @"Only do software zoom for software zoom device"); + + SC_GUARD_ELSE_RETURN(!SCDeviceSupportsMetal()); + float zoomFactor = device.zoomFactor; + SCLogCapturerInfo(@"Adjusting software zoom factor to: %f", zoomFactor); + AVCaptureVideoPreviewLayer *videoPreviewLayer = _captureResource.videoPreviewLayer; + [[SCQueuePerformer mainQueuePerformer] perform:^{ + [CATransaction begin]; + [CATransaction setDisableActions:YES]; + // I end up need to change its superlayer transform to get the zoom effect + videoPreviewLayer.superlayer.affineTransform = CGAffineTransformMakeScale(zoomFactor, zoomFactor); + [CATransaction commit]; + }]; +} + +- (void)_setZoomFactor:(CGFloat)zoomFactor forManagedCaptureDevice:(SCManagedCaptureDevice *)device +{ + SCTraceODPCompatibleStart(2); + [_captureResource.queuePerformer perform:^{ + SCTraceStart(); + if (device) { + SCLogCapturerInfo(@"Set zoom factor: %f -> %f", _captureResource.state.zoomFactor, zoomFactor); + [device setZoomFactor:zoomFactor]; + BOOL zoomFactorChanged = NO; + // If the device is our current device, send the notification, update the + // state. + if (device.isConnected && device == _captureResource.device) { + if (device.softwareZoom) { + [self softwareZoomWithDevice:device]; + } + _captureResource.state = [[[SCManagedCapturerStateBuilder + withManagedCapturerState:_captureResource.state] setZoomFactor:zoomFactor] build]; + zoomFactorChanged = YES; + } + SCManagedCapturerState *state = [_captureResource.state copy]; + runOnMainThreadAsynchronously(^{ + if (zoomFactorChanged) { + [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] + didChangeState:state]; + [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] + didChangeZoomFactor:state]; + } + }); + } + }]; +} + +@end diff --git a/ManagedCapturer/SCManagedCaptureDeviceDefaultZoomHandler_Private.h b/ManagedCapturer/SCManagedCaptureDeviceDefaultZoomHandler_Private.h new file mode 100644 index 0000000..912d4b4 --- /dev/null +++ b/ManagedCapturer/SCManagedCaptureDeviceDefaultZoomHandler_Private.h @@ -0,0 +1,17 @@ +// +// SCManagedCaptureDeviceDefaultZoomHandler_Private.h +// Snapchat +// +// Created by Joe Qiao on 04/01/2018. +// + +#import "SCManagedCaptureDeviceDefaultZoomHandler.h" + +@interface SCManagedCaptureDeviceDefaultZoomHandler () + +@property (nonatomic, weak) SCCaptureResource *captureResource; +@property (nonatomic, weak) SCManagedCaptureDevice *currentDevice; + +- (void)_setZoomFactor:(CGFloat)zoomFactor forManagedCaptureDevice:(SCManagedCaptureDevice *)device; + +@end diff --git a/ManagedCapturer/SCManagedCaptureDeviceExposureHandler.h b/ManagedCapturer/SCManagedCaptureDeviceExposureHandler.h new file mode 100644 index 0000000..be68731 --- /dev/null +++ b/ManagedCapturer/SCManagedCaptureDeviceExposureHandler.h @@ -0,0 +1,22 @@ +// +// SCManagedCaptureDeviceExposureHandler.h +// Snapchat +// +// Created by Derek Peirce on 3/21/17. +// Copyright © 2017 Snapchat, Inc. All rights reserved. +// + +#import +#import + +@protocol SCManagedCaptureDeviceExposureHandler + +- (CGPoint)getExposurePointOfInterest; + +- (void)setStableExposure:(BOOL)stableExposure; + +- (void)setExposurePointOfInterest:(CGPoint)pointOfInterest fromUser:(BOOL)fromUser; + +- (void)setVisible:(BOOL)visible; + +@end diff --git a/ManagedCapturer/SCManagedCaptureDeviceFaceDetectionAutoExposureHandler.h b/ManagedCapturer/SCManagedCaptureDeviceFaceDetectionAutoExposureHandler.h new file mode 100644 index 0000000..7d93270 --- /dev/null +++ b/ManagedCapturer/SCManagedCaptureDeviceFaceDetectionAutoExposureHandler.h @@ -0,0 +1,28 @@ +// +// SCManagedCaptureDeviceFaceDetectionAutoExposureHandler.h +// Snapchat +// +// Created by Jiyang Zhu on 3/6/18. +// Copyright © 2018 Snapchat, Inc. All rights reserved. +// +// This class is used to +// 1. adjust exposure related parameters of camera, including exposure mode and exposure point. +// 2. receive detected face bounds, and set exposure point to a preferred face if needed. + +#import "SCManagedCaptureDeviceExposureHandler.h" + +#import + +#import + +@protocol SCCapturer; + +@interface SCManagedCaptureDeviceFaceDetectionAutoExposureHandler : NSObject + +SC_INIT_AND_NEW_UNAVAILABLE + +- (instancetype)initWithDevice:(AVCaptureDevice *)device + pointOfInterest:(CGPoint)pointOfInterest + managedCapturer:(id)managedCapturer; + +@end diff --git a/ManagedCapturer/SCManagedCaptureDeviceFaceDetectionAutoExposureHandler.m b/ManagedCapturer/SCManagedCaptureDeviceFaceDetectionAutoExposureHandler.m new file mode 100644 index 0000000..ebabdf1 --- /dev/null +++ b/ManagedCapturer/SCManagedCaptureDeviceFaceDetectionAutoExposureHandler.m @@ -0,0 +1,121 @@ +// +// SCManagedCaptureDeviceFaceDetectionAutoExposureHandler.m +// Snapchat +// +// Created by Jiyang Zhu on 3/6/18. +// Copyright © 2018 Snapchat, Inc. All rights reserved. +// + +#import "SCManagedCaptureDeviceFaceDetectionAutoExposureHandler.h" + +#import "AVCaptureDevice+ConfigurationLock.h" +#import "SCCameraTweaks.h" +#import "SCManagedCaptureDeviceExposureHandler.h" +#import "SCManagedCaptureFaceDetectionAdjustingPOIResource.h" +#import "SCManagedCapturer.h" +#import "SCManagedCapturerListener.h" + +#import +#import +#import + +@import AVFoundation; + +@interface SCManagedCaptureDeviceFaceDetectionAutoExposureHandler () + +@property (nonatomic, strong) AVCaptureDevice *device; +@property (nonatomic, weak) id managedCapturer; +@property (nonatomic, assign) CGPoint exposurePointOfInterest; +@property (nonatomic, assign) BOOL isVisible; + +@property (nonatomic, copy) NSDictionary *faceBoundsByFaceID; +@property (nonatomic, strong) SCManagedCaptureFaceDetectionAdjustingPOIResource *resource; + +@end + +@implementation SCManagedCaptureDeviceFaceDetectionAutoExposureHandler + +- (instancetype)initWithDevice:(AVCaptureDevice *)device + pointOfInterest:(CGPoint)pointOfInterest + managedCapturer:(id)managedCapturer +{ + if (self = [super init]) { + SCAssert(device, @"AVCaptureDevice should not be nil."); + SCAssert(managedCapturer, @"id should not be nil."); + _device = device; + _exposurePointOfInterest = pointOfInterest; + SCManagedCaptureDevicePosition position = + (device.position == AVCaptureDevicePositionFront ? SCManagedCaptureDevicePositionFront + : SCManagedCaptureDevicePositionBack); + _resource = [[SCManagedCaptureFaceDetectionAdjustingPOIResource alloc] + initWithDefaultPointOfInterest:pointOfInterest + shouldTargetOnFaceAutomatically:SCCameraTweaksTurnOnFaceDetectionFocusByDefault(position)]; + _managedCapturer = managedCapturer; + } + return self; +} + +- (void)dealloc +{ + [_managedCapturer removeListener:self]; +} + +- (CGPoint)getExposurePointOfInterest +{ + return self.exposurePointOfInterest; +} + +- (void)setExposurePointOfInterest:(CGPoint)pointOfInterest fromUser:(BOOL)fromUser +{ + SCTraceODPCompatibleStart(2); + + pointOfInterest = [self.resource updateWithNewProposedPointOfInterest:pointOfInterest fromUser:fromUser]; + + [self _actuallySetExposurePointOfInterestIfNeeded:pointOfInterest]; +} + +- (void)_actuallySetExposurePointOfInterestIfNeeded:(CGPoint)pointOfInterest +{ + SCTraceODPCompatibleStart(2); + SC_GUARD_ELSE_RETURN(!CGPointEqualToPoint(pointOfInterest, self.exposurePointOfInterest)); + if ([self.device isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure] && + [self.device isExposurePointOfInterestSupported]) { + [self.device runTask:@"set exposure" + withLockedConfiguration:^() { + // Set exposure point before changing exposure mode + // Be noticed that order does matter + self.device.exposurePointOfInterest = pointOfInterest; + self.device.exposureMode = AVCaptureExposureModeContinuousAutoExposure; + }]; + } + self.exposurePointOfInterest = pointOfInterest; +} + +- (void)setStableExposure:(BOOL)stableExposure +{ +} + +- (void)setVisible:(BOOL)visible +{ + SCTraceODPCompatibleStart(2); + SC_GUARD_ELSE_RETURN(_isVisible != visible); + _isVisible = visible; + if (visible) { + [self.managedCapturer addListener:self]; + } else { + [self.managedCapturer removeListener:self]; + [self.resource reset]; + } +} + +#pragma mark - SCManagedCapturerListener +- (void)managedCapturer:(id)managedCapturer + didDetectFaceBounds:(NSDictionary *)faceBoundsByFaceID +{ + SCTraceODPCompatibleStart(2); + SC_GUARD_ELSE_RETURN(self.isVisible); + CGPoint pointOfInterest = [self.resource updateWithNewDetectedFaceBounds:faceBoundsByFaceID]; + [self _actuallySetExposurePointOfInterestIfNeeded:pointOfInterest]; +} + +@end diff --git a/ManagedCapturer/SCManagedCaptureDeviceFaceDetectionAutoFocusHandler.h b/ManagedCapturer/SCManagedCaptureDeviceFaceDetectionAutoFocusHandler.h new file mode 100644 index 0000000..710a6dc --- /dev/null +++ b/ManagedCapturer/SCManagedCaptureDeviceFaceDetectionAutoFocusHandler.h @@ -0,0 +1,28 @@ +// +// SCManagedCaptureDeviceFaceDetectionAutoFocusHandler.h +// Snapchat +// +// Created by Jiyang Zhu on 3/7/18. +// Copyright © 2018 Snapchat, Inc. All rights reserved. +// +// This class is used to +// 1. adjust focus related parameters of camera, including focus mode and focus point. +// 2. receive detected face bounds, and focus to a preferred face if needed. + +#import "SCManagedCaptureDeviceFocusHandler.h" + +#import + +#import + +@protocol SCCapturer; + +@interface SCManagedCaptureDeviceFaceDetectionAutoFocusHandler : NSObject + +SC_INIT_AND_NEW_UNAVAILABLE + +- (instancetype)initWithDevice:(AVCaptureDevice *)device + pointOfInterest:(CGPoint)pointOfInterest + managedCapturer:(id)managedCapturer; + +@end diff --git a/ManagedCapturer/SCManagedCaptureDeviceFaceDetectionAutoFocusHandler.m b/ManagedCapturer/SCManagedCaptureDeviceFaceDetectionAutoFocusHandler.m new file mode 100644 index 0000000..80cd3e4 --- /dev/null +++ b/ManagedCapturer/SCManagedCaptureDeviceFaceDetectionAutoFocusHandler.m @@ -0,0 +1,153 @@ +// +// SCManagedCaptureDeviceFaceDetectionAutoFocusHandler.m +// Snapchat +// +// Created by Jiyang Zhu on 3/7/18. +// Copyright © 2018 Snapchat, Inc. All rights reserved. +// + +#import "SCManagedCaptureDeviceFaceDetectionAutoFocusHandler.h" + +#import "AVCaptureDevice+ConfigurationLock.h" +#import "SCCameraTweaks.h" +#import "SCManagedCaptureFaceDetectionAdjustingPOIResource.h" +#import "SCManagedCapturer.h" +#import "SCManagedCapturerListener.h" + +#import +#import +#import + +@interface SCManagedCaptureDeviceFaceDetectionAutoFocusHandler () + +@property (nonatomic, strong) AVCaptureDevice *device; +@property (nonatomic, weak) id managedCapturer; +@property (nonatomic, assign) CGPoint focusPointOfInterest; + +@property (nonatomic, assign) BOOL isVisible; +@property (nonatomic, assign) BOOL isContinuousAutofocus; +@property (nonatomic, assign) BOOL focusLock; + +@property (nonatomic, copy) NSDictionary *faceBoundsByFaceID; +@property (nonatomic, strong) SCManagedCaptureFaceDetectionAdjustingPOIResource *resource; + +@end + +@implementation SCManagedCaptureDeviceFaceDetectionAutoFocusHandler + +- (instancetype)initWithDevice:(AVCaptureDevice *)device + pointOfInterest:(CGPoint)pointOfInterest + managedCapturer:(id)managedCapturer +{ + if (self = [super init]) { + SCAssert(device, @"AVCaptureDevice should not be nil."); + SCAssert(managedCapturer, @"id should not be nil."); + _device = device; + _focusPointOfInterest = pointOfInterest; + SCManagedCaptureDevicePosition position = + (device.position == AVCaptureDevicePositionFront ? SCManagedCaptureDevicePositionFront + : SCManagedCaptureDevicePositionBack); + _resource = [[SCManagedCaptureFaceDetectionAdjustingPOIResource alloc] + initWithDefaultPointOfInterest:pointOfInterest + shouldTargetOnFaceAutomatically:SCCameraTweaksTurnOnFaceDetectionFocusByDefault(position)]; + _managedCapturer = managedCapturer; + } + return self; +} + +- (CGPoint)getFocusPointOfInterest +{ + return self.focusPointOfInterest; +} + +// called when user taps on a point on screen, to re-adjust camera focus onto that tapped spot. +// this re-adjustment is always necessary, regardless of scenarios (recording video, taking photo, etc), +// therefore we don't have to check self.focusLock in this method. +- (void)setAutofocusPointOfInterest:(CGPoint)pointOfInterest +{ + SCTraceODPCompatibleStart(2); + pointOfInterest = [self.resource updateWithNewProposedPointOfInterest:pointOfInterest fromUser:YES]; + SC_GUARD_ELSE_RETURN(!CGPointEqualToPoint(pointOfInterest, self.focusPointOfInterest) || + self.isContinuousAutofocus); + [self _actuallySetFocusPointOfInterestIfNeeded:pointOfInterest + withFocusMode:AVCaptureFocusModeAutoFocus + taskName:@"set autofocus"]; +} + +- (void)continuousAutofocus +{ + SCTraceODPCompatibleStart(2); + SC_GUARD_ELSE_RETURN(!self.isContinuousAutofocus); + CGPoint pointOfInterest = [self.resource updateWithNewProposedPointOfInterest:CGPointMake(0.5, 0.5) fromUser:NO]; + [self _actuallySetFocusPointOfInterestIfNeeded:pointOfInterest + withFocusMode:AVCaptureFocusModeContinuousAutoFocus + taskName:@"set continuous autofocus"]; +} + +- (void)setFocusLock:(BOOL)focusLock +{ + // Disabled focus lock for face detection and focus handler. +} + +- (void)setSmoothFocus:(BOOL)smoothFocus +{ + SCTraceODPCompatibleStart(2); + SC_GUARD_ELSE_RETURN(smoothFocus != self.device.smoothAutoFocusEnabled); + [self.device runTask:@"set smooth autofocus" + withLockedConfiguration:^() { + [self.device setSmoothAutoFocusEnabled:smoothFocus]; + }]; +} + +- (void)setVisible:(BOOL)visible +{ + SCTraceODPCompatibleStart(2); + SC_GUARD_ELSE_RETURN(_isVisible != visible); + self.isVisible = visible; + if (visible) { + [[SCManagedCapturer sharedInstance] addListener:self]; + } else { + [[SCManagedCapturer sharedInstance] removeListener:self]; + [self.resource reset]; + } +} + +- (void)_actuallySetFocusPointOfInterestIfNeeded:(CGPoint)pointOfInterest + withFocusMode:(AVCaptureFocusMode)focusMode + taskName:(NSString *)taskName +{ + SCTraceODPCompatibleStart(2); + SC_GUARD_ELSE_RETURN(!CGPointEqualToPoint(pointOfInterest, self.focusPointOfInterest) && + [self.device isFocusModeSupported:focusMode] && [self.device isFocusPointOfInterestSupported]); + [self.device runTask:taskName + withLockedConfiguration:^() { + // Set focus point before changing focus mode + // Be noticed that order does matter + self.device.focusPointOfInterest = pointOfInterest; + self.device.focusMode = focusMode; + }]; + + self.focusPointOfInterest = pointOfInterest; + self.isContinuousAutofocus = (focusMode == AVCaptureFocusModeContinuousAutoFocus); +} + +#pragma mark - SCManagedCapturerListener +- (void)managedCapturer:(id)managedCapturer + didDetectFaceBounds:(NSDictionary *)faceBoundsByFaceID +{ + SCTraceODPCompatibleStart(2); + SC_GUARD_ELSE_RETURN(self.isVisible); + CGPoint pointOfInterest = [self.resource updateWithNewDetectedFaceBounds:faceBoundsByFaceID]; + // If pointOfInterest is equal to CGPointMake(0.5, 0.5), it means no valid face is found, so that we should reset to + // AVCaptureFocusModeContinuousAutoFocus. Otherwise, focus on the point and set the mode as + // AVCaptureFocusModeAutoFocus. + // TODO(Jiyang): Refactor SCManagedCaptureFaceDetectionAdjustingPOIResource to include focusMode and exposureMode. + AVCaptureFocusMode focusMode = CGPointEqualToPoint(pointOfInterest, CGPointMake(0.5, 0.5)) + ? AVCaptureFocusModeContinuousAutoFocus + : AVCaptureFocusModeAutoFocus; + [self _actuallySetFocusPointOfInterestIfNeeded:pointOfInterest + withFocusMode:focusMode + taskName:@"set autofocus from face detection"]; +} + +@end diff --git a/ManagedCapturer/SCManagedCaptureDeviceFocusHandler.h b/ManagedCapturer/SCManagedCaptureDeviceFocusHandler.h new file mode 100644 index 0000000..4a14bb9 --- /dev/null +++ b/ManagedCapturer/SCManagedCaptureDeviceFocusHandler.h @@ -0,0 +1,28 @@ +// +// SCManagedCaptureDeviceFocusHandler.h +// Snapchat +// +// Created by Jiyang Zhu on 3/7/18. +// Copyright © 2018 Snapchat, Inc. All rights reserved. +// + +#import +#import + +@protocol SCManagedCaptureDeviceFocusHandler + +- (CGPoint)getFocusPointOfInterest; + +/// Called when subject area changes. +- (void)continuousAutofocus; + +/// Called when user taps. +- (void)setAutofocusPointOfInterest:(CGPoint)pointOfInterest; + +- (void)setSmoothFocus:(BOOL)smoothFocus; + +- (void)setFocusLock:(BOOL)focusLock; + +- (void)setVisible:(BOOL)visible; + +@end diff --git a/ManagedCapturer/SCManagedCaptureDeviceHandler.h b/ManagedCapturer/SCManagedCaptureDeviceHandler.h new file mode 100644 index 0000000..95de114 --- /dev/null +++ b/ManagedCapturer/SCManagedCaptureDeviceHandler.h @@ -0,0 +1,23 @@ +// +// SCManagedCaptureDeviceHandler.h +// Snapchat +// +// Created by Jiyang Zhu on 3/8/18. +// Copyright © 2018 Snapchat, Inc. All rights reserved. +// + +#import "SCManagedCaptureDevice.h" + +#import + +#import + +@class SCCaptureResource; + +@interface SCManagedCaptureDeviceHandler : NSObject + +SC_INIT_AND_NEW_UNAVAILABLE + +- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource; + +@end diff --git a/ManagedCapturer/SCManagedCaptureDeviceHandler.m b/ManagedCapturer/SCManagedCaptureDeviceHandler.m new file mode 100644 index 0000000..d43bdd1 --- /dev/null +++ b/ManagedCapturer/SCManagedCaptureDeviceHandler.m @@ -0,0 +1,77 @@ +// +// SCManagedCaptureDeviceHandler.m +// Snapchat +// +// Created by Jiyang Zhu on 3/8/18. +// Copyright © 2018 Snapchat, Inc. All rights reserved. +// + +#import "SCManagedCaptureDeviceHandler.h" + +#import "SCCaptureResource.h" +#import "SCManagedCapturer.h" +#import "SCManagedCapturerLogging.h" +#import "SCManagedCapturerState.h" +#import "SCManagedCapturerStateBuilder.h" + +#import +#import +#import +#import + +@interface SCManagedCaptureDeviceHandler () + +@property (nonatomic, weak) SCCaptureResource *captureResource; + +@end + +@implementation SCManagedCaptureDeviceHandler + +- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource +{ + self = [super init]; + if (self) { + SCAssert(captureResource, @"SCCaptureResource should not be nil."); + _captureResource = captureResource; + } + return self; +} + +- (void)managedCaptureDevice:(SCManagedCaptureDevice *)device didChangeAdjustingExposure:(BOOL)adjustingExposure +{ + SC_GUARD_ELSE_RETURN(device == _captureResource.device); + SCTraceODPCompatibleStart(2); + SCLogCapturerInfo(@"KVO Changes adjustingExposure %d", adjustingExposure); + [_captureResource.queuePerformer perform:^{ + _captureResource.state = [[[SCManagedCapturerStateBuilder withManagedCapturerState:_captureResource.state] + setAdjustingExposure:adjustingExposure] build]; + SCManagedCapturerState *state = [_captureResource.state copy]; + runOnMainThreadAsynchronously(^{ + [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didChangeState:state]; + [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] + didChangeAdjustingExposure:state]; + }); + }]; +} + +- (void)managedCaptureDevice:(SCManagedCaptureDevice *)device didChangeExposurePoint:(CGPoint)exposurePoint +{ + SC_GUARD_ELSE_RETURN(device == self.captureResource.device); + SCTraceODPCompatibleStart(2); + runOnMainThreadAsynchronously(^{ + [self.captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] + didChangeExposurePoint:exposurePoint]; + }); +} + +- (void)managedCaptureDevice:(SCManagedCaptureDevice *)device didChangeFocusPoint:(CGPoint)focusPoint +{ + SC_GUARD_ELSE_RETURN(device == self.captureResource.device); + SCTraceODPCompatibleStart(2); + runOnMainThreadAsynchronously(^{ + [self.captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] + didChangeFocusPoint:focusPoint]; + }); +} + +@end diff --git a/ManagedCapturer/SCManagedCaptureDeviceLinearInterpolationZoomHandler.h b/ManagedCapturer/SCManagedCaptureDeviceLinearInterpolationZoomHandler.h new file mode 100644 index 0000000..cca35f8 --- /dev/null +++ b/ManagedCapturer/SCManagedCaptureDeviceLinearInterpolationZoomHandler.h @@ -0,0 +1,12 @@ +// +// SCManagedCaptureDeviceLinearInterpolationZoomHandler.h +// Snapchat +// +// Created by Joe Qiao on 03/01/2018. +// + +#import "SCManagedCaptureDeviceDefaultZoomHandler.h" + +@interface SCManagedCaptureDeviceLinearInterpolationZoomHandler : SCManagedCaptureDeviceDefaultZoomHandler + +@end diff --git a/ManagedCapturer/SCManagedCaptureDeviceLinearInterpolationZoomHandler.m b/ManagedCapturer/SCManagedCaptureDeviceLinearInterpolationZoomHandler.m new file mode 100644 index 0000000..b73de51 --- /dev/null +++ b/ManagedCapturer/SCManagedCaptureDeviceLinearInterpolationZoomHandler.m @@ -0,0 +1,190 @@ +// +// SCManagedCaptureDeviceLinearInterpolationZoomHandler.m +// Snapchat +// +// Created by Joe Qiao on 03/01/2018. +// + +#import "SCManagedCaptureDeviceLinearInterpolationZoomHandler.h" + +#import "SCCameraTweaks.h" +#import "SCManagedCaptureDeviceDefaultZoomHandler_Private.h" +#import "SCManagedCapturerLogging.h" + +#import +#import + +@interface SCManagedCaptureDeviceLinearInterpolationZoomHandler () + +@property (nonatomic, strong) CADisplayLink *displayLink; +@property (nonatomic, assign) double timestamp; +@property (nonatomic, assign) float targetFactor; +@property (nonatomic, assign) float intermediateFactor; +@property (nonatomic, assign) int trend; +@property (nonatomic, assign) float stepLength; + +@end + +@implementation SCManagedCaptureDeviceLinearInterpolationZoomHandler + +- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource +{ + self = [super initWithCaptureResource:captureResource]; + if (self) { + _timestamp = -1.0; + _targetFactor = 1.0; + _intermediateFactor = _targetFactor; + _trend = 1; + _stepLength = 0.0; + } + + return self; +} + +- (void)dealloc +{ + [self _invalidate]; +} + +- (void)setZoomFactor:(CGFloat)zoomFactor forDevice:(SCManagedCaptureDevice *)device immediately:(BOOL)immediately +{ + if (self.currentDevice != device) { + if (_displayLink) { + // if device changed, interupt smoothing process + // and reset to target zoom factor immediately + [self _resetToZoomFactor:_targetFactor]; + } + self.currentDevice = device; + immediately = YES; + } + + if (immediately) { + [self _resetToZoomFactor:zoomFactor]; + } else { + [self _addTargetZoomFactor:zoomFactor]; + } +} + +#pragma mark - Configurable +// smoothen if the update time interval is greater than the threshold +- (double)_thresholdTimeIntervalToSmoothen +{ + return SCCameraTweaksSmoothZoomThresholdTime(); +} + +- (double)_thresholdFactorDiffToSmoothen +{ + return SCCameraTweaksSmoothZoomThresholdFactor(); +} + +- (int)_intermediateFactorFramesPerSecond +{ + return SCCameraTweaksSmoothZoomIntermediateFramesPerSecond(); +} + +- (double)_delayTolerantTime +{ + return SCCameraTweaksSmoothZoomDelayTolerantTime(); +} + +// minimum step length between two intermediate factors, +// the greater the better as long as could provide a 'smooth experience' during smoothing process +- (float)_minimumStepLength +{ + return SCCameraTweaksSmoothZoomMinStepLength(); +} + +#pragma mark - Private methods +- (void)_addTargetZoomFactor:(float)factor +{ + SCAssertMainThread(); + + SCLogCapturerInfo(@"Smooth Zoom - [1] t=%f zf=%f", CACurrentMediaTime(), factor); + if (SCFloatEqual(factor, _targetFactor)) { + return; + } + _targetFactor = factor; + + float diff = _targetFactor - _intermediateFactor; + if ([self _isDuringSmoothingProcess]) { + // during smoothing, only update data + [self _updateDataWithDiff:diff]; + } else { + double curTimestamp = CACurrentMediaTime(); + if (!SCFloatEqual(_timestamp, -1.0) && (curTimestamp - _timestamp) > [self _thresholdTimeIntervalToSmoothen] && + ABS(diff) > [self _thresholdFactorDiffToSmoothen]) { + // need smoothing + [self _updateDataWithDiff:diff]; + if ([self _nextStep]) { + // use timer to interpolate intermediate factors to avoid sharp jump + _displayLink = [CADisplayLink displayLinkWithTarget:self selector:@selector(_nextStep)]; + _displayLink.preferredFramesPerSecond = [self _intermediateFactorFramesPerSecond]; + [_displayLink addToRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode]; + } + } else { + _timestamp = curTimestamp; + _intermediateFactor = factor; + + SCLogCapturerInfo(@"Smooth Zoom - [2] t=%f zf=%f", CACurrentMediaTime(), _intermediateFactor); + [self _setZoomFactor:_intermediateFactor forManagedCaptureDevice:self.currentDevice]; + } + } +} + +- (void)_resetToZoomFactor:(float)factor +{ + [self _invalidate]; + + _timestamp = -1.0; + _targetFactor = factor; + _intermediateFactor = _targetFactor; + + [self _setZoomFactor:_intermediateFactor forManagedCaptureDevice:self.currentDevice]; +} + +- (BOOL)_nextStep +{ + _timestamp = CACurrentMediaTime(); + _intermediateFactor += (_trend * _stepLength); + + BOOL hasNext = YES; + if (_trend < 0.0) { + _intermediateFactor = MAX(_intermediateFactor, _targetFactor); + } else { + _intermediateFactor = MIN(_intermediateFactor, _targetFactor); + } + + SCLogCapturerInfo(@"Smooth Zoom - [3] t=%f zf=%f", CACurrentMediaTime(), _intermediateFactor); + [self _setZoomFactor:_intermediateFactor forManagedCaptureDevice:self.currentDevice]; + + if (SCFloatEqual(_intermediateFactor, _targetFactor)) { + // finish smoothening + [self _invalidate]; + hasNext = NO; + } + + return hasNext; +} + +- (void)_invalidate +{ + [_displayLink invalidate]; + _displayLink = nil; + _trend = 1; + _stepLength = 0.0; +} + +- (void)_updateDataWithDiff:(CGFloat)diff +{ + _trend = diff < 0.0 ? -1 : 1; + _stepLength = + MAX(_stepLength, MAX([self _minimumStepLength], + ABS(diff) / ([self _delayTolerantTime] * [self _intermediateFactorFramesPerSecond]))); +} + +- (BOOL)_isDuringSmoothingProcess +{ + return (_displayLink ? YES : NO); +} + +@end diff --git a/ManagedCapturer/SCManagedCaptureDeviceLockOnRecordExposureHandler.h b/ManagedCapturer/SCManagedCaptureDeviceLockOnRecordExposureHandler.h new file mode 100644 index 0000000..c78738d --- /dev/null +++ b/ManagedCapturer/SCManagedCaptureDeviceLockOnRecordExposureHandler.h @@ -0,0 +1,20 @@ +// +// SCManagedCaptureDeviceLockOnRecordExposureHandler.h +// Snapchat +// +// Created by Derek Peirce on 3/24/17. +// Copyright © 2017 Snapchat, Inc. All rights reserved. +// + +#import "SCManagedCaptureDeviceExposureHandler.h" + +#import + +// An exposure handler that prevents any changes in exposure as soon as recording begins +@interface SCManagedCaptureDeviceLockOnRecordExposureHandler : NSObject + +- (instancetype)initWithDevice:(AVCaptureDevice *)device + pointOfInterest:(CGPoint)pointOfInterest + allowTap:(BOOL)allowTap; + +@end diff --git a/ManagedCapturer/SCManagedCaptureDeviceLockOnRecordExposureHandler.m b/ManagedCapturer/SCManagedCaptureDeviceLockOnRecordExposureHandler.m new file mode 100644 index 0000000..6d42977 --- /dev/null +++ b/ManagedCapturer/SCManagedCaptureDeviceLockOnRecordExposureHandler.m @@ -0,0 +1,90 @@ +// +// SCManagedCaptureDeviceLockOnRecordExposureHandler.m +// Snapchat +// +// Created by Derek Peirce on 3/24/17. +// Copyright © 2017 Snapchat, Inc. All rights reserved. +// + +#import "SCManagedCaptureDeviceLockOnRecordExposureHandler.h" + +#import "AVCaptureDevice+ConfigurationLock.h" +#import "SCExposureState.h" +#import "SCManagedCaptureDeviceExposureHandler.h" + +#import + +@import AVFoundation; + +@implementation SCManagedCaptureDeviceLockOnRecordExposureHandler { + CGPoint _exposurePointOfInterest; + AVCaptureDevice *_device; + // allows the exposure to change when the user taps to refocus + BOOL _allowTap; + SCExposureState *_exposureState; +} + +- (instancetype)initWithDevice:(AVCaptureDevice *)device + pointOfInterest:(CGPoint)pointOfInterest + allowTap:(BOOL)allowTap +{ + if (self = [super init]) { + _device = device; + _exposurePointOfInterest = pointOfInterest; + _allowTap = allowTap; + } + return self; +} + +- (CGPoint)getExposurePointOfInterest +{ + return _exposurePointOfInterest; +} + +- (void)setExposurePointOfInterest:(CGPoint)pointOfInterest fromUser:(BOOL)fromUser +{ + SCTraceStart(); + BOOL locked = _device.exposureMode == AVCaptureExposureModeLocked || + _device.exposureMode == AVCaptureExposureModeCustom || + _device.exposureMode == AVCaptureExposureModeAutoExpose; + if (!locked || (fromUser && _allowTap)) { + AVCaptureExposureMode exposureMode = + (locked ? AVCaptureExposureModeAutoExpose : AVCaptureExposureModeContinuousAutoExposure); + if ([_device isExposureModeSupported:exposureMode] && [_device isExposurePointOfInterestSupported]) { + [_device runTask:@"set exposure point" + withLockedConfiguration:^() { + // Set exposure point before changing focus mode + // Be noticed that order does matter + _device.exposurePointOfInterest = pointOfInterest; + _device.exposureMode = exposureMode; + }]; + } + _exposurePointOfInterest = pointOfInterest; + } +} + +- (void)setStableExposure:(BOOL)stableExposure +{ + AVCaptureExposureMode exposureMode = + stableExposure ? AVCaptureExposureModeLocked : AVCaptureExposureModeContinuousAutoExposure; + if ([_device isExposureModeSupported:exposureMode]) { + [_device runTask:@"set stable exposure" + withLockedConfiguration:^() { + _device.exposureMode = exposureMode; + }]; + } +} + +- (void)setVisible:(BOOL)visible +{ + if (visible) { + if (_device.exposureMode == AVCaptureExposureModeLocked || + _device.exposureMode == AVCaptureExposureModeCustom) { + [_exposureState applyISOAndExposureDurationToDevice:_device]; + } + } else { + _exposureState = [[SCExposureState alloc] initWithDevice:_device]; + } +} + +@end diff --git a/ManagedCapturer/SCManagedCaptureDeviceSavitzkyGolayZoomHandler.h b/ManagedCapturer/SCManagedCaptureDeviceSavitzkyGolayZoomHandler.h new file mode 100644 index 0000000..05e7a61 --- /dev/null +++ b/ManagedCapturer/SCManagedCaptureDeviceSavitzkyGolayZoomHandler.h @@ -0,0 +1,13 @@ +// +// SCManagedCaptureDeviceSavitzkyGolayZoomHandler.h +// Snapchat +// +// Created by Yu-Kuan Lai on 4/12/17. +// Copyright © 2017 Snapchat, Inc. All rights reserved. +// + +#import "SCManagedCaptureDeviceDefaultZoomHandler.h" + +@interface SCManagedCaptureDeviceSavitzkyGolayZoomHandler : SCManagedCaptureDeviceDefaultZoomHandler + +@end diff --git a/ManagedCapturer/SCManagedCaptureDeviceSavitzkyGolayZoomHandler.m b/ManagedCapturer/SCManagedCaptureDeviceSavitzkyGolayZoomHandler.m new file mode 100644 index 0000000..468e104 --- /dev/null +++ b/ManagedCapturer/SCManagedCaptureDeviceSavitzkyGolayZoomHandler.m @@ -0,0 +1,95 @@ +// +// SCManagedCaptureDeviceSavitzkyGolayZoomHandler.m +// Snapchat +// +// Created by Yu-Kuan Lai on 4/12/17. +// Copyright © 2017 Snapchat, Inc. All rights reserved. +// https://en.wikipedia.org/wiki/Savitzky%E2%80%93Golay_filter +// + +#import "SCManagedCaptureDeviceSavitzkyGolayZoomHandler.h" + +#import "SCManagedCaptureDevice.h" +#import "SCManagedCaptureDeviceDefaultZoomHandler_Private.h" + +#import +#import + +static NSUInteger const kSCSavitzkyGolayWindowSize = 9; +static CGFloat const kSCUpperSharpZoomThreshold = 1.15; + +@interface SCManagedCaptureDeviceSavitzkyGolayZoomHandler () + +@property (nonatomic, strong) NSMutableArray *zoomFactorHistoryArray; + +@end + +@implementation SCManagedCaptureDeviceSavitzkyGolayZoomHandler + +- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource +{ + self = [super initWithCaptureResource:captureResource]; + if (self) { + _zoomFactorHistoryArray = [[NSMutableArray alloc] init]; + } + + return self; +} + +- (void)setZoomFactor:(CGFloat)zoomFactor forDevice:(SCManagedCaptureDevice *)device immediately:(BOOL)immediately +{ + if (self.currentDevice != device) { + // reset if device changed + self.currentDevice = device; + [self _resetZoomFactor:zoomFactor forDevice:self.currentDevice]; + return; + } + + if (immediately || zoomFactor == 1 || _zoomFactorHistoryArray.count == 0) { + // reset if zoomFactor is 1 or this is the first data point + [self _resetZoomFactor:zoomFactor forDevice:device]; + return; + } + + CGFloat lastVal = [[_zoomFactorHistoryArray lastObject] floatValue]; + CGFloat upperThreshold = lastVal * kSCUpperSharpZoomThreshold; + if (zoomFactor > upperThreshold) { + // sharp change in zoomFactor, reset + [self _resetZoomFactor:zoomFactor forDevice:device]; + return; + } + + [_zoomFactorHistoryArray addObject:@(zoomFactor)]; + if ([_zoomFactorHistoryArray count] > kSCSavitzkyGolayWindowSize) { + [_zoomFactorHistoryArray removeObjectAtIndex:0]; + } + + float filteredZoomFactor = + SC_CLAMP([self _savitzkyGolayFilteredZoomFactor], kSCMinVideoZoomFactor, kSCMaxVideoZoomFactor); + [self _setZoomFactor:filteredZoomFactor forManagedCaptureDevice:device]; +} + +- (CGFloat)_savitzkyGolayFilteredZoomFactor +{ + if ([_zoomFactorHistoryArray count] == kSCSavitzkyGolayWindowSize) { + CGFloat filteredZoomFactor = + 59 * [_zoomFactorHistoryArray[4] floatValue] + + 54 * ([_zoomFactorHistoryArray[3] floatValue] + [_zoomFactorHistoryArray[5] floatValue]) + + 39 * ([_zoomFactorHistoryArray[2] floatValue] + [_zoomFactorHistoryArray[6] floatValue]) + + 14 * ([_zoomFactorHistoryArray[1] floatValue] + [_zoomFactorHistoryArray[7] floatValue]) - + 21 * ([_zoomFactorHistoryArray[0] floatValue] + [_zoomFactorHistoryArray[8] floatValue]); + filteredZoomFactor /= 231; + return filteredZoomFactor; + } else { + return [[_zoomFactorHistoryArray lastObject] floatValue]; // use zoomFactor directly if we have less than 9 + } +} + +- (void)_resetZoomFactor:(CGFloat)zoomFactor forDevice:(SCManagedCaptureDevice *)device +{ + [_zoomFactorHistoryArray removeAllObjects]; + [_zoomFactorHistoryArray addObject:@(zoomFactor)]; + [self _setZoomFactor:zoomFactor forManagedCaptureDevice:device]; +} + +@end diff --git a/ManagedCapturer/SCManagedCaptureDeviceSubjectAreaHandler.h b/ManagedCapturer/SCManagedCaptureDeviceSubjectAreaHandler.h new file mode 100644 index 0000000..825da29 --- /dev/null +++ b/ManagedCapturer/SCManagedCaptureDeviceSubjectAreaHandler.h @@ -0,0 +1,23 @@ +// +// SCManagedCaptureDeviceSubjectAreaHandler.h +// Snapchat +// +// Created by Xiaokang Liu on 19/03/2018. +// +// This class is used to handle the AVCaptureDeviceSubjectAreaDidChangeNotification notification for SCManagedCapturer. +// To reset device's settings when the subject area changed + +#import + +#import + +@class SCCaptureResource; +@protocol SCCapturer; + +@interface SCManagedCaptureDeviceSubjectAreaHandler : NSObject +SC_INIT_AND_NEW_UNAVAILABLE +- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource NS_DESIGNATED_INITIALIZER; + +- (void)stopObserving; +- (void)startObserving; +@end diff --git a/ManagedCapturer/SCManagedCaptureDeviceSubjectAreaHandler.m b/ManagedCapturer/SCManagedCaptureDeviceSubjectAreaHandler.m new file mode 100644 index 0000000..5fe08e7 --- /dev/null +++ b/ManagedCapturer/SCManagedCaptureDeviceSubjectAreaHandler.m @@ -0,0 +1,67 @@ +// +// SCManagedCaptureDeviceSubjectAreaHandler.m +// Snapchat +// +// Created by Xiaokang Liu on 19/03/2018. +// + +#import "SCManagedCaptureDeviceSubjectAreaHandler.h" + +#import "SCCameraTweaks.h" +#import "SCCaptureResource.h" +#import "SCCaptureWorker.h" +#import "SCManagedCaptureDevice+SCManagedCapturer.h" +#import "SCManagedCapturer.h" +#import "SCManagedCapturerState.h" + +#import +#import + +@interface SCManagedCaptureDeviceSubjectAreaHandler () { + __weak SCCaptureResource *_captureResource; +} +@end + +@implementation SCManagedCaptureDeviceSubjectAreaHandler +- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource +{ + self = [super init]; + if (self) { + SCAssert(captureResource, @""); + _captureResource = captureResource; + } + return self; +} + +- (void)stopObserving +{ + [[NSNotificationCenter defaultCenter] removeObserver:self + name:AVCaptureDeviceSubjectAreaDidChangeNotification + object:nil]; +} + +- (void)startObserving +{ + [[NSNotificationCenter defaultCenter] addObserver:self + selector:@selector(_subjectAreaDidChange:) + name:AVCaptureDeviceSubjectAreaDidChangeNotification + object:nil]; +} + +#pragma mark - Private methods +- (void)_subjectAreaDidChange:(NSDictionary *)notification +{ + [_captureResource.queuePerformer perform:^{ + if (_captureResource.device.isConnected && !_captureResource.state.arSessionActive) { + // Reset to continuous autofocus when the subject area changed + [_captureResource.device continuousAutofocus]; + [_captureResource.device setExposurePointOfInterest:CGPointMake(0.5, 0.5) fromUser:NO]; + if (SCCameraTweaksEnablePortraitModeAutofocus()) { + [SCCaptureWorker setPortraitModePointOfInterestAsynchronously:CGPointMake(0.5, 0.5) + completionHandler:nil + resource:_captureResource]; + } + } + }]; +} +@end diff --git a/ManagedCapturer/SCManagedCaptureDeviceThresholdExposureHandler.h b/ManagedCapturer/SCManagedCaptureDeviceThresholdExposureHandler.h new file mode 100644 index 0000000..5d1db73 --- /dev/null +++ b/ManagedCapturer/SCManagedCaptureDeviceThresholdExposureHandler.h @@ -0,0 +1,19 @@ +// +// SCManagedCaptureDeviceThresholdExposureHandler.h +// Snapchat +// +// Created by Derek Peirce on 4/11/17. +// Copyright © 2017 Snapchat, Inc. All rights reserved. +// + +#import "SCManagedCaptureDeviceExposureHandler.h" + +#import + +@interface SCManagedCaptureDeviceThresholdExposureHandler : NSObject + +- (instancetype)initWithDevice:(AVCaptureDevice *)device + pointOfInterest:(CGPoint)pointOfInterest + threshold:(CGFloat)threshold; + +@end diff --git a/ManagedCapturer/SCManagedCaptureDeviceThresholdExposureHandler.m b/ManagedCapturer/SCManagedCaptureDeviceThresholdExposureHandler.m new file mode 100644 index 0000000..7487405 --- /dev/null +++ b/ManagedCapturer/SCManagedCaptureDeviceThresholdExposureHandler.m @@ -0,0 +1,133 @@ +// +// SCManagedCaptureDeviceThresholdExposureHandler.m +// Snapchat +// +// Created by Derek Peirce on 4/11/17. +// Copyright © 2017 Snapchat, Inc. All rights reserved. +// + +#import "SCManagedCaptureDeviceThresholdExposureHandler.h" + +#import "AVCaptureDevice+ConfigurationLock.h" +#import "SCCameraTweaks.h" +#import "SCExposureState.h" +#import "SCManagedCaptureDeviceExposureHandler.h" + +#import + +#import + +@import AVFoundation; + +@implementation SCManagedCaptureDeviceThresholdExposureHandler { + AVCaptureDevice *_device; + CGPoint _exposurePointOfInterest; + CGFloat _threshold; + // allows the exposure to change when the user taps to refocus + SCExposureState *_exposureState; + FBKVOController *_kvoController; +} + +- (instancetype)initWithDevice:(AVCaptureDevice *)device + pointOfInterest:(CGPoint)pointOfInterest + threshold:(CGFloat)threshold +{ + if (self = [super init]) { + _device = device; + _exposurePointOfInterest = pointOfInterest; + _threshold = threshold; + _kvoController = [FBKVOController controllerWithObserver:self]; + @weakify(self); + [_kvoController observe:device + keyPath:NSStringFromSelector(@selector(exposureMode)) + options:NSKeyValueObservingOptionOld | NSKeyValueObservingOptionNew + block:^(id observer, id object, NSDictionary *change) { + @strongify(self); + AVCaptureExposureMode old = + (AVCaptureExposureMode)[(NSNumber *)change[NSKeyValueChangeOldKey] intValue]; + AVCaptureExposureMode new = + (AVCaptureExposureMode)[(NSNumber *)change[NSKeyValueChangeNewKey] intValue]; + if (old == AVCaptureExposureModeAutoExpose && new == AVCaptureExposureModeLocked) { + // auto expose is done, go back to custom + self->_exposureState = [[SCExposureState alloc] initWithDevice:self->_device]; + [self->_exposureState applyISOAndExposureDurationToDevice:self->_device]; + } + }]; + [_kvoController observe:device + keyPath:NSStringFromSelector(@selector(exposureTargetOffset)) + options:NSKeyValueObservingOptionOld | NSKeyValueObservingOptionNew + block:^(id observer, id object, NSDictionary *change) { + @strongify(self); + if (self->_device.exposureMode == AVCaptureExposureModeCustom) { + CGFloat offset = [(NSNumber *)change[NSKeyValueChangeOldKey] floatValue]; + if (fabs(offset) > self->_threshold) { + [self->_device runTask:@"set exposure point" + withLockedConfiguration:^() { + // Set exposure point before changing focus mode + // Be noticed that order does matter + self->_device.exposurePointOfInterest = CGPointMake(0.5, 0.5); + self->_device.exposureMode = AVCaptureExposureModeAutoExpose; + }]; + } + } + }]; + } + return self; +} + +- (CGPoint)getExposurePointOfInterest +{ + return _exposurePointOfInterest; +} + +- (void)setExposurePointOfInterest:(CGPoint)pointOfInterest fromUser:(BOOL)fromUser +{ + SCTraceStart(); + BOOL locked = _device.exposureMode == AVCaptureExposureModeLocked || + _device.exposureMode == AVCaptureExposureModeCustom || + _device.exposureMode == AVCaptureExposureModeAutoExpose; + if (!locked || fromUser) { + AVCaptureExposureMode exposureMode = + (locked ? AVCaptureExposureModeAutoExpose : AVCaptureExposureModeContinuousAutoExposure); + if ([_device isExposureModeSupported:exposureMode] && [_device isExposurePointOfInterestSupported]) { + [_device runTask:@"set exposure point" + withLockedConfiguration:^() { + // Set exposure point before changing focus mode + // Be noticed that order does matter + _device.exposurePointOfInterest = pointOfInterest; + _device.exposureMode = exposureMode; + }]; + } + _exposurePointOfInterest = pointOfInterest; + } +} + +- (void)setStableExposure:(BOOL)stableExposure +{ + if (stableExposure) { + _exposureState = [[SCExposureState alloc] initWithDevice:_device]; + [_exposureState applyISOAndExposureDurationToDevice:_device]; + } else { + AVCaptureExposureMode exposureMode = AVCaptureExposureModeContinuousAutoExposure; + if ([_device isExposureModeSupported:exposureMode]) { + [_device runTask:@"set exposure point" + withLockedConfiguration:^() { + _device.exposureMode = AVCaptureExposureModeContinuousAutoExposure; + }]; + } + } +} + +- (void)setVisible:(BOOL)visible +{ + if (visible) { + if (_device.exposureMode == AVCaptureExposureModeLocked || + _device.exposureMode == AVCaptureExposureModeCustom) { + [_exposureState applyISOAndExposureDurationToDevice:_device]; + } + } else { + _exposureState = [[SCExposureState alloc] initWithDevice:_device]; + } +} + +@end diff --git a/ManagedCapturer/SCManagedCaptureFaceDetectionAdjustingPOIResource.h b/ManagedCapturer/SCManagedCaptureFaceDetectionAdjustingPOIResource.h new file mode 100644 index 0000000..f1fa9c3 --- /dev/null +++ b/ManagedCapturer/SCManagedCaptureFaceDetectionAdjustingPOIResource.h @@ -0,0 +1,61 @@ +// +// SCManagedCaptureFaceDetectionAdjustingPOIResource.h +// Snapchat +// +// Created by Jiyang Zhu on 3/7/18. +// Copyright © 2018 Snapchat, Inc. All rights reserved. +// +// This class is used to keep several properties for face detection and focus/exposure. It provides methods to help +// FaceDetectionAutoFocusHandler and FaceDetectionAutoExposureHandler to deal with the point of interest setting events +// from user taps, subject area changes, and face detection, by updating itself and return the actual point of +// interest. + +#import +#import + +typedef NS_ENUM(NSInteger, SCManagedCaptureFaceDetectionAdjustingPOIMode) { + SCManagedCaptureFaceDetectionAdjustingPOIModeNone = 0, + SCManagedCaptureFaceDetectionAdjustingPOIModeFixedOnPointWithFace, + SCManagedCaptureFaceDetectionAdjustingPOIModeFixedOnPointWithoutFace, +}; + +@interface SCManagedCaptureFaceDetectionAdjustingPOIResource : NSObject + +@property (nonatomic, assign) CGPoint pointOfInterest; + +@property (nonatomic, strong) NSDictionary *faceBoundsByFaceID; +@property (nonatomic, assign) SCManagedCaptureFaceDetectionAdjustingPOIMode adjustingPOIMode; +@property (nonatomic, assign) BOOL shouldTargetOnFaceAutomatically; +@property (nonatomic, strong) NSNumber *targetingFaceID; +@property (nonatomic, assign) CGRect targetingFaceBounds; + +- (instancetype)initWithDefaultPointOfInterest:(CGPoint)pointOfInterest + shouldTargetOnFaceAutomatically:(BOOL)shouldTargetOnFaceAutomatically; + +- (void)reset; + +/** + Update SCManagedCaptureFaceDetectionAdjustingPOIResource when a new POI adjustment comes. It will find the face that + the proposedPoint belongs to, return the center of the face, if the adjustingPOIMode and fromUser meets the + requirements. + + @param proposedPoint + The point of interest that upper level wants to set. + @param fromUser + Whether the setting is from user's tap or not. + @return + The actual point of interest that should be applied. + */ +- (CGPoint)updateWithNewProposedPointOfInterest:(CGPoint)proposedPoint fromUser:(BOOL)fromUser; + +/** + Update SCManagedCaptureFaceDetectionAdjustingPOIResource when new detected face bounds comes. + + @param faceBoundsByFaceID + A dictionary. Key: FaceID as NSNumber. Value: FaceBounds as CGRect. + @return + The actual point of interest that should be applied. + */ +- (CGPoint)updateWithNewDetectedFaceBounds:(NSDictionary *)faceBoundsByFaceID; + +@end diff --git a/ManagedCapturer/SCManagedCaptureFaceDetectionAdjustingPOIResource.m b/ManagedCapturer/SCManagedCaptureFaceDetectionAdjustingPOIResource.m new file mode 100644 index 0000000..935a3a8 --- /dev/null +++ b/ManagedCapturer/SCManagedCaptureFaceDetectionAdjustingPOIResource.m @@ -0,0 +1,232 @@ +// +// SCManagedCaptureFaceDetectionAdjustingPOIResource.m +// Snapchat +// +// Created by Jiyang Zhu on 3/7/18. +// Copyright © 2018 Snapchat, Inc. All rights reserved. +// + +#import "SCManagedCaptureFaceDetectionAdjustingPOIResource.h" + +#import +#import +#import + +@implementation SCManagedCaptureFaceDetectionAdjustingPOIResource { + CGPoint _defaultPointOfInterest; +} + +#pragma mark - Public Methods + +- (instancetype)initWithDefaultPointOfInterest:(CGPoint)pointOfInterest + shouldTargetOnFaceAutomatically:(BOOL)shouldTargetOnFaceAutomatically +{ + if (self = [super init]) { + _pointOfInterest = pointOfInterest; + _defaultPointOfInterest = pointOfInterest; + _shouldTargetOnFaceAutomatically = shouldTargetOnFaceAutomatically; + } + return self; +} + +- (void)reset +{ + SCTraceODPCompatibleStart(2); + self.adjustingPOIMode = SCManagedCaptureFaceDetectionAdjustingPOIModeNone; + self.targetingFaceID = nil; + self.targetingFaceBounds = CGRectZero; + self.faceBoundsByFaceID = nil; + self.pointOfInterest = _defaultPointOfInterest; +} + +- (CGPoint)updateWithNewProposedPointOfInterest:(CGPoint)proposedPoint fromUser:(BOOL)fromUser +{ + SCTraceODPCompatibleStart(2); + if (fromUser) { + NSNumber *faceID = + [self _getFaceIDOfFaceBoundsContainingPoint:proposedPoint fromFaceBounds:self.faceBoundsByFaceID]; + if (faceID && [faceID integerValue] >= 0) { + CGPoint point = [self _getPointOfInterestWithFaceID:faceID fromFaceBounds:self.faceBoundsByFaceID]; + if ([self _isPointOfInterestValid:point]) { + [self _setPointOfInterest:point + targetingFaceID:faceID + adjustingPOIMode:SCManagedCaptureFaceDetectionAdjustingPOIModeFixedOnPointWithFace]; + } else { + [self _setPointOfInterest:proposedPoint + targetingFaceID:nil + adjustingPOIMode:SCManagedCaptureFaceDetectionAdjustingPOIModeFixedOnPointWithoutFace]; + } + } else { + [self _setPointOfInterest:proposedPoint + targetingFaceID:nil + adjustingPOIMode:SCManagedCaptureFaceDetectionAdjustingPOIModeFixedOnPointWithoutFace]; + } + } else { + [self _setPointOfInterest:proposedPoint + targetingFaceID:nil + adjustingPOIMode:SCManagedCaptureFaceDetectionAdjustingPOIModeNone]; + } + return self.pointOfInterest; +} + +- (CGPoint)updateWithNewDetectedFaceBounds:(NSDictionary *)faceBoundsByFaceID +{ + SCTraceODPCompatibleStart(2); + self.faceBoundsByFaceID = faceBoundsByFaceID; + switch (self.adjustingPOIMode) { + case SCManagedCaptureFaceDetectionAdjustingPOIModeNone: { + if (self.shouldTargetOnFaceAutomatically) { + [self _focusOnPreferredFaceInFaceBounds:self.faceBoundsByFaceID]; + } + } break; + case SCManagedCaptureFaceDetectionAdjustingPOIModeFixedOnPointWithFace: { + BOOL isFocusingOnCurrentTargetingFaceSuccess = + [self _focusOnFaceWithTargetFaceID:self.targetingFaceID inFaceBounds:self.faceBoundsByFaceID]; + if (!isFocusingOnCurrentTargetingFaceSuccess && self.shouldTargetOnFaceAutomatically) { + // If the targeted face has disappeared, and shouldTargetOnFaceAutomatically is YES, automatically target on + // the next preferred face. + [self _focusOnPreferredFaceInFaceBounds:self.faceBoundsByFaceID]; + } + } break; + case SCManagedCaptureFaceDetectionAdjustingPOIModeFixedOnPointWithoutFace: + // The point of interest should be fixed at a non-face point where user tapped before. + break; + } + return self.pointOfInterest; +} + +#pragma mark - Internal Methods + +- (BOOL)_focusOnPreferredFaceInFaceBounds:(NSDictionary *)faceBoundsByFaceID +{ + SCTraceODPCompatibleStart(2); + NSNumber *preferredFaceID = [self _getPreferredFaceIDFromFaceBounds:faceBoundsByFaceID]; + return [self _focusOnFaceWithTargetFaceID:preferredFaceID inFaceBounds:faceBoundsByFaceID]; +} + +- (BOOL)_focusOnFaceWithTargetFaceID:(NSNumber *)preferredFaceID + inFaceBounds:(NSDictionary *)faceBoundsByFaceID +{ + SCTraceODPCompatibleStart(2); + SC_GUARD_ELSE_RETURN_VALUE(preferredFaceID, NO); + NSValue *faceBoundsValue = [faceBoundsByFaceID objectForKey:preferredFaceID]; + if (faceBoundsValue) { + CGRect faceBounds = [faceBoundsValue CGRectValue]; + CGPoint proposedPoint = CGPointMake(CGRectGetMidX(faceBounds), CGRectGetMidY(faceBounds)); + if ([self _isPointOfInterestValid:proposedPoint]) { + if ([self _shouldChangeToNewPoint:proposedPoint withNewFaceID:preferredFaceID newFaceBounds:faceBounds]) { + [self _setPointOfInterest:proposedPoint + targetingFaceID:preferredFaceID + adjustingPOIMode:SCManagedCaptureFaceDetectionAdjustingPOIModeFixedOnPointWithFace]; + } + return YES; + } + } + [self reset]; + return NO; +} + +- (void)_setPointOfInterest:(CGPoint)pointOfInterest + targetingFaceID:(NSNumber *)targetingFaceID + adjustingPOIMode:(SCManagedCaptureFaceDetectionAdjustingPOIMode)adjustingPOIMode +{ + SCTraceODPCompatibleStart(2); + self.pointOfInterest = pointOfInterest; + self.targetingFaceID = targetingFaceID; + if (targetingFaceID) { // If targetingFaceID exists, record the current face bounds. + self.targetingFaceBounds = [[self.faceBoundsByFaceID objectForKey:targetingFaceID] CGRectValue]; + } else { // Otherwise, reset targetingFaceBounds to zero. + self.targetingFaceBounds = CGRectZero; + } + self.adjustingPOIMode = adjustingPOIMode; +} + +- (BOOL)_isPointOfInterestValid:(CGPoint)pointOfInterest +{ + return (pointOfInterest.x >= 0 && pointOfInterest.x <= 1 && pointOfInterest.y >= 0 && pointOfInterest.y <= 1); +} + +- (NSNumber *)_getPreferredFaceIDFromFaceBounds:(NSDictionary *)faceBoundsByFaceID +{ + SCTraceODPCompatibleStart(2); + SC_GUARD_ELSE_RETURN_VALUE(faceBoundsByFaceID.count > 0, nil); + + // Find out the bounds with the max area. + __block NSNumber *preferredFaceID = nil; + __block CGFloat maxArea = 0; + [faceBoundsByFaceID + enumerateKeysAndObjectsUsingBlock:^(NSNumber *_Nonnull key, NSValue *_Nonnull obj, BOOL *_Nonnull stop) { + CGRect faceBounds = [obj CGRectValue]; + CGFloat area = CGRectGetWidth(faceBounds) * CGRectGetHeight(faceBounds); + if (area > maxArea) { + preferredFaceID = key; + maxArea = area; + } + }]; + + return preferredFaceID; +} + +- (CGPoint)_getPointOfInterestWithFaceID:(NSNumber *)faceID + fromFaceBounds:(NSDictionary *)faceBoundsByFaceID +{ + SCTraceODPCompatibleStart(2); + NSValue *faceBoundsValue = [faceBoundsByFaceID objectForKey:faceID]; + if (faceBoundsValue) { + CGRect faceBounds = [faceBoundsValue CGRectValue]; + CGPoint point = CGPointMake(CGRectGetMidX(faceBounds), CGRectGetMidY(faceBounds)); + return point; + } else { + return CGPointMake(-1, -1); // An invalid point. + } +} + +/** + Setting a new focus/exposure point needs high CPU usage, so we only set a new POI when we have to. This method is to + return whether setting this new point if necessary. + If not, there is no need to change the POI. + */ +- (BOOL)_shouldChangeToNewPoint:(CGPoint)newPoint + withNewFaceID:(NSNumber *)newFaceID + newFaceBounds:(CGRect)newFaceBounds +{ + SCTraceODPCompatibleStart(2); + BOOL shouldChange = NO; + if (!newFaceID || !self.targetingFaceID || + ![newFaceID isEqualToNumber:self.targetingFaceID]) { // Return YES if it is a new face. + shouldChange = YES; + } else if (CGRectEqualToRect(self.targetingFaceBounds, CGRectZero) || + !CGRectContainsPoint(self.targetingFaceBounds, + newPoint)) { // Return YES if the new point if out of the current face bounds. + shouldChange = YES; + } else { + CGFloat currentBoundsArea = + CGRectGetWidth(self.targetingFaceBounds) * CGRectGetHeight(self.targetingFaceBounds); + CGFloat newBoundsArea = CGRectGetWidth(newFaceBounds) * CGRectGetHeight(newFaceBounds); + if (newBoundsArea >= currentBoundsArea * 1.2 || + newBoundsArea <= + currentBoundsArea * + 0.8) { // Return YES if the area of new bounds if over 20% more or 20% less than the current one. + shouldChange = YES; + } + } + return shouldChange; +} + +- (NSNumber *)_getFaceIDOfFaceBoundsContainingPoint:(CGPoint)point + fromFaceBounds:(NSDictionary *)faceBoundsByFaceID +{ + SC_GUARD_ELSE_RETURN_VALUE(faceBoundsByFaceID.count > 0, nil); + __block NSNumber *faceID = nil; + [faceBoundsByFaceID + enumerateKeysAndObjectsUsingBlock:^(NSNumber *_Nonnull key, NSValue *_Nonnull obj, BOOL *_Nonnull stop) { + CGRect faceBounds = [obj CGRectValue]; + if (CGRectContainsPoint(faceBounds, point)) { + faceID = key; + *stop = YES; + } + }]; + return faceID; +} + +@end diff --git a/ManagedCapturer/SCManagedCapturePreviewLayerController.h b/ManagedCapturer/SCManagedCapturePreviewLayerController.h new file mode 100644 index 0000000..9b639d6 --- /dev/null +++ b/ManagedCapturer/SCManagedCapturePreviewLayerController.h @@ -0,0 +1,80 @@ +// +// SCManagedCapturePreviewLayerController.h +// Snapchat +// +// Created by Liu Liu on 5/5/15. +// Copyright (c) 2015 Snapchat, Inc. All rights reserved. +// + +#import +#import +#import + +#import +#import +#import +#import + +@protocol SCCapturer; +@class LSAGLView, SCBlackCameraDetector, SCManagedCapturePreviewLayerController; + +@protocol SCManagedCapturePreviewLayerControllerDelegate + +- (SCBlackCameraDetector *)blackCameraDetectorForManagedCapturePreviewLayerController: + (SCManagedCapturePreviewLayerController *)controller; +- (sc_create_g2s_ticket_f)g2sTicketForManagedCapturePreviewLayerController: + (SCManagedCapturePreviewLayerController *)controller; + +@end + +/** + * SCManagedCapturePreviewLayerController controls display of frame in a view. The controller has 3 + * different methods for this. + * AVCaptureVideoPreviewLayer: This is a feed coming straight from the camera and does not allow any + * image processing or modification of the frames displayed. + * LSAGLView: OpenGL based video for displaying video that is being processed (Lenses etc.) + * CAMetalLayer: Metal layer drawing textures on a vertex quad for display on screen. + */ +@interface SCManagedCapturePreviewLayerController : NSObject + +@property (nonatomic, strong, readonly) UIView *view; + +@property (nonatomic, strong, readonly) AVCaptureVideoPreviewLayer *videoPreviewLayer; + +@property (nonatomic, strong, readonly) LSAGLView *videoPreviewGLView; + +@property (nonatomic, weak) id delegate; + ++ (instancetype)sharedInstance; + +- (void)pause; + +- (void)resume; + +- (UIView *)newStandInViewWithRect:(CGRect)rect; + +- (void)setManagedCapturer:(id)managedCapturer; + +// This method returns a token that you can hold on to. As long as the token is hold, +// an outdated view will be hold unless the app backgrounded. +- (NSString *)keepDisplayingOutdatedPreview; + +// End displaying the outdated frame with an issued keep token. If there is no one holds +// any token any more, this outdated view will be flushed. +- (void)endDisplayingOutdatedPreview:(NSString *)keepToken; + +// Create views for Metal, this method need to be called on the main thread. +- (void)setupPreviewLayer; + +// Create render pipeline state, setup shaders for Metal, this need to be called off the main thread. +- (void)setupRenderPipeline; + +- (void)applicationDidEnterBackground; + +- (void)applicationWillEnterForeground; + +- (void)applicationWillResignActive; + +- (void)applicationDidBecomeActive; + +@end diff --git a/ManagedCapturer/SCManagedCapturePreviewLayerController.m b/ManagedCapturer/SCManagedCapturePreviewLayerController.m new file mode 100644 index 0000000..2678b0a --- /dev/null +++ b/ManagedCapturer/SCManagedCapturePreviewLayerController.m @@ -0,0 +1,563 @@ +// +// SCManagedCapturePreviewLayerController.m +// Snapchat +// +// Created by Liu Liu on 5/5/15. +// Copyright (c) 2015 Snapchat, Inc. All rights reserved. +// + +#import "SCManagedCapturePreviewLayerController.h" + +#import "SCBlackCameraDetector.h" +#import "SCCameraTweaks.h" +#import "SCManagedCapturePreviewView.h" +#import "SCManagedCapturer.h" +#import "SCManagedCapturerListener.h" +#import "SCManagedCapturerUtils.h" +#import "SCMetalUtils.h" + +#import +#import +#import +#import +#import +#import +#import +#import +#import + +#import + +#define SCLogPreviewLayerInfo(fmt, ...) SCLogCoreCameraInfo(@"[PreviewLayerController] " fmt, ##__VA_ARGS__) +#define SCLogPreviewLayerWarning(fmt, ...) SCLogCoreCameraWarning(@"[PreviewLayerController] " fmt, ##__VA_ARGS__) +#define SCLogPreviewLayerError(fmt, ...) SCLogCoreCameraError(@"[PreviewLayerController] " fmt, ##__VA_ARGS__) + +const static CGSize kSCManagedCapturePreviewDefaultRenderSize = { + .width = 720, .height = 1280, +}; + +const static CGSize kSCManagedCapturePreviewRenderSize1080p = { + .width = 1080, .height = 1920, +}; + +#if !TARGET_IPHONE_SIMULATOR + +static NSInteger const kSCMetalCannotAcquireDrawableLimit = 2; + +@interface CAMetalLayer (SCSecretFature) + +// Call discardContents. +- (void)sc_secretFeature; + +@end + +@implementation CAMetalLayer (SCSecretFature) + +- (void)sc_secretFeature +{ + // "discardContents" + char buffer[] = {0x9b, 0x96, 0x8c, 0x9c, 0x9e, 0x8d, 0x9b, 0xbc, 0x90, 0x91, 0x8b, 0x9a, 0x91, 0x8b, 0x8c, 0}; + unsigned long len = strlen(buffer); + for (unsigned idx = 0; idx < len; ++idx) { + buffer[idx] = ~buffer[idx]; + } + SEL selector = NSSelectorFromString([NSString stringWithUTF8String:buffer]); + if ([self respondsToSelector:selector]) { + NSMethodSignature *signature = [self methodSignatureForSelector:selector]; + NSInvocation *invocation = [NSInvocation invocationWithMethodSignature:signature]; + [invocation setTarget:self]; + [invocation setSelector:selector]; + [invocation invoke]; + } + // For anyone curious, here is the actual implementation for discardContents in 10.3 (With Hopper v4, arm64) + // From glance, this seems pretty safe to call. + // void -[CAMetalLayer(CAMetalLayerPrivate) discardContents](int arg0) + // { + // *(r31 + 0xffffffffffffffe0) = r20; + // *(0xfffffffffffffff0 + r31) = r19; + // r31 = r31 + 0xffffffffffffffe0; + // *(r31 + 0x10) = r29; + // *(0x20 + r31) = r30; + // r29 = r31 + 0x10; + // r19 = *(arg0 + sign_extend_64(*(int32_t *)0x1a6300510)); + // if (r19 != 0x0) { + // r0 = loc_1807079dc(*0x1a7811fc8, r19); + // r0 = _CAImageQueueConsumeUnconsumed(*(r19 + 0x10)); + // r0 = _CAImageQueueFlush(*(r19 + 0x10)); + // r29 = *(r31 + 0x10); + // r30 = *(0x20 + r31); + // r20 = *r31; + // r19 = *(r31 + 0x10); + // r31 = r31 + 0x20; + // r0 = loc_1807079dc(*0x1a7811fc8, zero_extend_64(0x0)); + // } else { + // r29 = *(r31 + 0x10); + // r30 = *(0x20 + r31); + // r20 = *r31; + // r19 = *(r31 + 0x10); + // r31 = r31 + 0x20; + // } + // return; + // } +} + +@end + +#endif + +@interface SCManagedCapturePreviewLayerController () + +@property (nonatomic) BOOL renderSuspended; + +@end + +@implementation SCManagedCapturePreviewLayerController { + SCManagedCapturePreviewView *_view; + CGSize _drawableSize; + SCQueuePerformer *_performer; + FBKVOController *_renderingKVO; +#if !TARGET_IPHONE_SIMULATOR + CAMetalLayer *_metalLayer; + id _commandQueue; + id _renderPipelineState; + CVMetalTextureCacheRef _textureCache; + dispatch_semaphore_t _commandBufferSemaphore; + // If the current view contains an outdated display (or any display) + BOOL _containOutdatedPreview; + // If we called empty outdated display already, but for some reason, hasn't emptied it yet. + BOOL _requireToFlushOutdatedPreview; + NSMutableSet *_tokenSet; + NSUInteger _cannotAcquireDrawable; +#endif +} + ++ (instancetype)sharedInstance +{ + static dispatch_once_t onceToken; + static SCManagedCapturePreviewLayerController *managedCapturePreviewLayerController; + dispatch_once(&onceToken, ^{ + managedCapturePreviewLayerController = [[SCManagedCapturePreviewLayerController alloc] init]; + }); + return managedCapturePreviewLayerController; +} + +- (instancetype)init +{ + self = [super init]; + if (self) { +#if !TARGET_IPHONE_SIMULATOR + // We only allow one renders at a time (Sorry, no double / triple buffering). + // It has to be created early here, otherwise integrity of other parts of the code is not + // guaranteed. + // TODO: I need to reason more about the initialization sequence. + _commandBufferSemaphore = dispatch_semaphore_create(1); + // Set _renderSuspended to be YES so that we won't render until it is fully setup. + _renderSuspended = YES; + _tokenSet = [NSMutableSet set]; +#endif + // If the screen is less than default size, we should fallback. + CGFloat nativeScale = [UIScreen mainScreen].nativeScale; + CGSize screenSize = [UIScreen mainScreen].fixedCoordinateSpace.bounds.size; + CGSize renderSize = [SCDeviceName isIphoneX] ? kSCManagedCapturePreviewRenderSize1080p + : kSCManagedCapturePreviewDefaultRenderSize; + if (screenSize.width * nativeScale < renderSize.width) { + _drawableSize = CGSizeMake(screenSize.width * nativeScale, screenSize.height * nativeScale); + } else { + _drawableSize = SCSizeIntegral( + SCSizeCropToAspectRatio(renderSize, SCSizeGetAspectRatio(SCManagedCapturerAllScreenSize()))); + } + _performer = [[SCQueuePerformer alloc] initWithLabel:"SCManagedCapturePreviewLayerController" + qualityOfService:QOS_CLASS_USER_INITIATED + queueType:DISPATCH_QUEUE_SERIAL + context:SCQueuePerformerContextCoreCamera]; + + _renderingKVO = [[FBKVOController alloc] initWithObserver:self]; + [_renderingKVO observe:self + keyPath:@keypath(self, renderSuspended) + options:NSKeyValueObservingOptionNew | NSKeyValueObservingOptionOld + block:^(id observer, id object, NSDictionary *change) { + BOOL oldValue = [change[NSKeyValueChangeOldKey] boolValue]; + BOOL newValue = [change[NSKeyValueChangeNewKey] boolValue]; + if (oldValue != newValue) { + [[_delegate blackCameraDetectorForManagedCapturePreviewLayerController:self] + capturePreviewDidBecomeVisible:!newValue]; + } + }]; + } + return self; +} + +- (void)pause +{ +#if !TARGET_IPHONE_SIMULATOR + SCTraceStart(); + SCLogPreviewLayerInfo(@"pause Metal rendering performer waiting"); + [_performer performAndWait:^() { + self.renderSuspended = YES; + }]; + SCLogPreviewLayerInfo(@"pause Metal rendering performer finished"); +#endif +} + +- (void)resume +{ +#if !TARGET_IPHONE_SIMULATOR + SCTraceStart(); + SCLogPreviewLayerInfo(@"resume Metal rendering performer waiting"); + [_performer performAndWait:^() { + self.renderSuspended = NO; + }]; + SCLogPreviewLayerInfo(@"resume Metal rendering performer finished"); +#endif +} + +- (void)setupPreviewLayer +{ +#if !TARGET_IPHONE_SIMULATOR + SCTraceStart(); + SCAssertMainThread(); + SC_GUARD_ELSE_RETURN(SCDeviceSupportsMetal()); + + if (!_metalLayer) { + _metalLayer = [CAMetalLayer new]; + SCLogPreviewLayerInfo(@"setup metalLayer:%@", _metalLayer); + + if (!_view) { + // Create capture preview view and setup the metal layer + [self view]; + } else { + [_view setupMetalLayer:_metalLayer]; + } + } +#endif +} + +- (UIView *)newStandInViewWithRect:(CGRect)rect +{ + return [self.view resizableSnapshotViewFromRect:rect afterScreenUpdates:YES withCapInsets:UIEdgeInsetsZero]; +} + +- (void)setupRenderPipeline +{ +#if !TARGET_IPHONE_SIMULATOR + SCTraceStart(); + SC_GUARD_ELSE_RETURN(SCDeviceSupportsMetal()); + SCAssertNotMainThread(); + id device = SCGetManagedCaptureMetalDevice(); + id shaderLibrary = [device newDefaultLibrary]; + _commandQueue = [device newCommandQueue]; + MTLRenderPipelineDescriptor *renderPipelineDescriptor = [MTLRenderPipelineDescriptor new]; + renderPipelineDescriptor.colorAttachments[0].pixelFormat = MTLPixelFormatBGRA8Unorm; + renderPipelineDescriptor.vertexFunction = [shaderLibrary newFunctionWithName:@"yuv_vertex_reshape"]; + renderPipelineDescriptor.fragmentFunction = [shaderLibrary newFunctionWithName:@"yuv_fragment_texture"]; + MTLVertexDescriptor *vertexDescriptor = [MTLVertexDescriptor vertexDescriptor]; + vertexDescriptor.attributes[0].format = MTLVertexFormatFloat2; // position + vertexDescriptor.attributes[0].offset = 0; + vertexDescriptor.attributes[0].bufferIndex = 0; + vertexDescriptor.attributes[1].format = MTLVertexFormatFloat2; // texCoords + vertexDescriptor.attributes[1].offset = 2 * sizeof(float); + vertexDescriptor.attributes[1].bufferIndex = 0; + vertexDescriptor.layouts[0].stepRate = 1; + vertexDescriptor.layouts[0].stepFunction = MTLVertexStepFunctionPerVertex; + vertexDescriptor.layouts[0].stride = 4 * sizeof(float); + renderPipelineDescriptor.vertexDescriptor = vertexDescriptor; + _renderPipelineState = [device newRenderPipelineStateWithDescriptor:renderPipelineDescriptor error:nil]; + CVMetalTextureCacheCreate(kCFAllocatorDefault, nil, device, nil, &_textureCache); + _metalLayer.device = device; + _metalLayer.drawableSize = _drawableSize; + _metalLayer.pixelFormat = MTLPixelFormatBGRA8Unorm; + _metalLayer.framebufferOnly = YES; // It is default to Yes. + [_performer performAndWait:^() { + self.renderSuspended = NO; + }]; + SCLogPreviewLayerInfo(@"did setup render pipeline"); +#endif +} + +- (UIView *)view +{ + SCTraceStart(); + SCAssertMainThread(); + if (!_view) { +#if TARGET_IPHONE_SIMULATOR + _view = [[SCManagedCapturePreviewView alloc] initWithFrame:[UIScreen mainScreen].fixedCoordinateSpace.bounds + aspectRatio:SCSizeGetAspectRatio(_drawableSize) + metalLayer:nil]; +#else + _view = [[SCManagedCapturePreviewView alloc] initWithFrame:[UIScreen mainScreen].fixedCoordinateSpace.bounds + aspectRatio:SCSizeGetAspectRatio(_drawableSize) + metalLayer:_metalLayer]; + SCLogPreviewLayerInfo(@"created SCManagedCapturePreviewView:%@", _view); +#endif + } + return _view; +} + +- (void)setManagedCapturer:(id)managedCapturer +{ + SCTraceStart(); + SCLogPreviewLayerInfo(@"setManagedCapturer:%@", managedCapturer); + if (SCDeviceSupportsMetal()) { + [managedCapturer addSampleBufferDisplayController:self context:SCCapturerContext]; + } + [managedCapturer addListener:self]; +} + +- (void)applicationDidEnterBackground +{ +#if !TARGET_IPHONE_SIMULATOR + SCTraceStart(); + SCAssertMainThread(); + SC_GUARD_ELSE_RETURN(SCDeviceSupportsMetal()); + SCLogPreviewLayerInfo(@"applicationDidEnterBackground waiting for performer"); + [_performer performAndWait:^() { + CVMetalTextureCacheFlush(_textureCache, 0); + [_tokenSet removeAllObjects]; + self.renderSuspended = YES; + }]; + SCLogPreviewLayerInfo(@"applicationDidEnterBackground signal performer finishes"); +#endif +} + +- (void)applicationWillResignActive +{ + SC_GUARD_ELSE_RETURN(SCDeviceSupportsMetal()); + SCTraceStart(); + SCAssertMainThread(); +#if !TARGET_IPHONE_SIMULATOR + SCLogPreviewLayerInfo(@"pause Metal rendering"); + [_performer performAndWait:^() { + self.renderSuspended = YES; + }]; +#endif +} + +- (void)applicationDidBecomeActive +{ + SC_GUARD_ELSE_RETURN(SCDeviceSupportsMetal()); + SCTraceStart(); + SCAssertMainThread(); +#if !TARGET_IPHONE_SIMULATOR + SCLogPreviewLayerInfo(@"resume Metal rendering waiting for performer"); + [_performer performAndWait:^() { + self.renderSuspended = NO; + }]; + SCLogPreviewLayerInfo(@"resume Metal rendering performer finished"); +#endif +} + +- (void)applicationWillEnterForeground +{ +#if !TARGET_IPHONE_SIMULATOR + SCTraceStart(); + SCAssertMainThread(); + SC_GUARD_ELSE_RETURN(SCDeviceSupportsMetal()); + SCLogPreviewLayerInfo(@"applicationWillEnterForeground waiting for performer"); + [_performer performAndWait:^() { + self.renderSuspended = NO; + if (_containOutdatedPreview && _tokenSet.count == 0) { + [self _flushOutdatedPreview]; + } + }]; + SCLogPreviewLayerInfo(@"applicationWillEnterForeground performer finished"); +#endif +} + +- (NSString *)keepDisplayingOutdatedPreview +{ + SCTraceStart(); + NSString *token = [NSData randomBase64EncodedStringOfLength:8]; +#if !TARGET_IPHONE_SIMULATOR + SCLogPreviewLayerInfo(@"keepDisplayingOutdatedPreview waiting for performer"); + [_performer performAndWait:^() { + [_tokenSet addObject:token]; + }]; + SCLogPreviewLayerInfo(@"keepDisplayingOutdatedPreview performer finished"); +#endif + return token; +} + +- (void)endDisplayingOutdatedPreview:(NSString *)keepToken +{ +#if !TARGET_IPHONE_SIMULATOR + SC_GUARD_ELSE_RETURN(SCDeviceSupportsMetal()); + // I simply use a lock for this. If it becomes a bottleneck, I can figure something else out. + SCTraceStart(); + SCLogPreviewLayerInfo(@"endDisplayingOutdatedPreview waiting for performer"); + [_performer performAndWait:^() { + [_tokenSet removeObject:keepToken]; + if (_tokenSet.count == 0 && _requireToFlushOutdatedPreview && _containOutdatedPreview && !_renderSuspended) { + [self _flushOutdatedPreview]; + } + }]; + SCLogPreviewLayerInfo(@"endDisplayingOutdatedPreview performer finished"); +#endif +} + +#pragma mark - SCManagedSampleBufferDisplayController + +- (void)enqueueSampleBuffer:(CMSampleBufferRef)sampleBuffer +{ +#if !TARGET_IPHONE_SIMULATOR + // Just drop the frame if it is rendering. + SC_GUARD_ELSE_RUN_AND_RETURN_VALUE(dispatch_semaphore_wait(_commandBufferSemaphore, DISPATCH_TIME_NOW) == 0, + SCLogPreviewLayerInfo(@"waiting for commandBufferSemaphore signaled"), ); + // Just drop the frame, simple. + [_performer performAndWait:^() { + if (_renderSuspended) { + SCLogGeneralInfo(@"Preview rendering suspends and current sample buffer is dropped"); + dispatch_semaphore_signal(_commandBufferSemaphore); + return; + } + @autoreleasepool { + const BOOL isFirstPreviewFrame = !_containOutdatedPreview; + if (isFirstPreviewFrame) { + // Signal that we receieved the first frame (otherwise this will be YES already). + SCGhostToSnappableSignalDidReceiveFirstPreviewFrame(); + sc_create_g2s_ticket_f func = [_delegate g2sTicketForManagedCapturePreviewLayerController:self]; + SCG2SActivateManiphestTicketQueueWithTicketCreationFunction(func); + } + CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); + + CVPixelBufferLockBaseAddress(imageBuffer, kCVPixelBufferLock_ReadOnly); + size_t pixelWidth = CVPixelBufferGetWidth(imageBuffer); + size_t pixelHeight = CVPixelBufferGetHeight(imageBuffer); + id yTexture = + SCMetalTextureFromPixelBuffer(imageBuffer, 0, MTLPixelFormatR8Unorm, _textureCache); + id cbCrTexture = + SCMetalTextureFromPixelBuffer(imageBuffer, 1, MTLPixelFormatRG8Unorm, _textureCache); + CVPixelBufferUnlockBaseAddress(imageBuffer, kCVPixelBufferLock_ReadOnly); + + SC_GUARD_ELSE_RUN_AND_RETURN(yTexture && cbCrTexture, dispatch_semaphore_signal(_commandBufferSemaphore)); + id commandBuffer = _commandQueue.commandBuffer; + id drawable = _metalLayer.nextDrawable; + if (!drawable) { + // Count how many times I cannot acquire drawable. + ++_cannotAcquireDrawable; + if (_cannotAcquireDrawable >= kSCMetalCannotAcquireDrawableLimit) { + // Calling [_metalLayer discardContents] to flush the CAImageQueue + SCLogGeneralInfo(@"Cannot acquire drawable, reboot Metal .."); + [_metalLayer sc_secretFeature]; + } + dispatch_semaphore_signal(_commandBufferSemaphore); + return; + } + _cannotAcquireDrawable = 0; // Reset to 0 in case we can acquire drawable. + MTLRenderPassDescriptor *renderPassDescriptor = [MTLRenderPassDescriptor new]; + renderPassDescriptor.colorAttachments[0].texture = drawable.texture; + id renderEncoder = + [commandBuffer renderCommandEncoderWithDescriptor:renderPassDescriptor]; + [renderEncoder setRenderPipelineState:_renderPipelineState]; + [renderEncoder setFragmentTexture:yTexture atIndex:0]; + [renderEncoder setFragmentTexture:cbCrTexture atIndex:1]; + // TODO: Prob this out of the image buffer. + // 90 clock-wise rotated texture coordinate. + // Also do aspect fill. + float normalizedHeight, normalizedWidth; + if (pixelWidth * _drawableSize.width > _drawableSize.height * pixelHeight) { + normalizedHeight = 1.0; + normalizedWidth = pixelWidth * (_drawableSize.width / pixelHeight) / _drawableSize.height; + } else { + normalizedHeight = pixelHeight * (_drawableSize.height / pixelWidth) / _drawableSize.width; + normalizedWidth = 1.0; + } + const float vertices[] = { + -normalizedHeight, -normalizedWidth, 1, 1, // lower left -> upper right + normalizedHeight, -normalizedWidth, 1, 0, // lower right -> lower right + -normalizedHeight, normalizedWidth, 0, 1, // upper left -> upper left + normalizedHeight, normalizedWidth, 0, 0, // upper right -> lower left + }; + [renderEncoder setVertexBytes:vertices length:sizeof(vertices) atIndex:0]; + [renderEncoder drawPrimitives:MTLPrimitiveTypeTriangleStrip vertexStart:0 vertexCount:4]; + [renderEncoder endEncoding]; + // I need to set a minimum duration for the drawable. + // There is a bug on iOS 10.3, if I present as soon as I can, I am keeping the GPU + // at 30fps even you swipe between views, that causes undesirable visual jarring. + // By set a minimum duration, even it is incrediably small (I tried 10ms, and here 60fps works), + // the OS seems can adjust the frame rate much better when swiping. + // This is an iOS 10.3 new method. + if ([commandBuffer respondsToSelector:@selector(presentDrawable:afterMinimumDuration:)]) { + [(id)commandBuffer presentDrawable:drawable afterMinimumDuration:(1.0 / 60)]; + } else { + [commandBuffer presentDrawable:drawable]; + } + [commandBuffer addCompletedHandler:^(id commandBuffer) { + dispatch_semaphore_signal(_commandBufferSemaphore); + }]; + if (isFirstPreviewFrame) { + if ([drawable respondsToSelector:@selector(addPresentedHandler:)] && + [drawable respondsToSelector:@selector(presentedTime)]) { + [(id)drawable addPresentedHandler:^(id presentedDrawable) { + SCGhostToSnappableSignalDidRenderFirstPreviewFrame([(id)presentedDrawable presentedTime]); + }]; + } else { + [commandBuffer addCompletedHandler:^(id commandBuffer) { + // Using CACurrentMediaTime to approximate. + SCGhostToSnappableSignalDidRenderFirstPreviewFrame(CACurrentMediaTime()); + }]; + } + } + // We enqueued an sample buffer to display, therefore, it contains an outdated display (to be clean up). + _containOutdatedPreview = YES; + [commandBuffer commit]; + } + }]; +#endif +} + +- (void)flushOutdatedPreview +{ + SCTraceStart(); +#if !TARGET_IPHONE_SIMULATOR + // This method cannot drop frames (otherwise we will have residual on the screen). + SCLogPreviewLayerInfo(@"flushOutdatedPreview waiting for performer"); + [_performer performAndWait:^() { + _requireToFlushOutdatedPreview = YES; + SC_GUARD_ELSE_RETURN(!_renderSuspended); + // Have to make sure we have no token left before return. + SC_GUARD_ELSE_RETURN(_tokenSet.count == 0); + [self _flushOutdatedPreview]; + }]; + SCLogPreviewLayerInfo(@"flushOutdatedPreview performer finished"); +#endif +} + +- (void)_flushOutdatedPreview +{ + SCTraceStart(); + SCAssertPerformer(_performer); +#if !TARGET_IPHONE_SIMULATOR + SCLogPreviewLayerInfo(@"flushOutdatedPreview containOutdatedPreview:%d", _containOutdatedPreview); + // I don't care if this has renderSuspended or not, assuming I did the right thing. + // Emptied, no need to do this any more on foregrounding. + SC_GUARD_ELSE_RETURN(_containOutdatedPreview); + _containOutdatedPreview = NO; + _requireToFlushOutdatedPreview = NO; + [_metalLayer sc_secretFeature]; +#endif +} + +#pragma mark - SCManagedCapturerListener + +- (void)managedCapturer:(id)managedCapturer + didChangeVideoPreviewLayer:(AVCaptureVideoPreviewLayer *)videoPreviewLayer +{ + SCTraceStart(); + SCAssertMainThread(); + // Force to load the view + [self view]; + _view.videoPreviewLayer = videoPreviewLayer; + SCLogPreviewLayerInfo(@"didChangeVideoPreviewLayer:%@", videoPreviewLayer); +} + +- (void)managedCapturer:(id)managedCapturer didChangeVideoPreviewGLView:(LSAGLView *)videoPreviewGLView +{ + SCTraceStart(); + SCAssertMainThread(); + // Force to load the view + [self view]; + _view.videoPreviewGLView = videoPreviewGLView; + SCLogPreviewLayerInfo(@"didChangeVideoPreviewGLView:%@", videoPreviewGLView); +} + +@end diff --git a/ManagedCapturer/SCManagedCapturePreviewView.h b/ManagedCapturer/SCManagedCapturePreviewView.h new file mode 100644 index 0000000..43fd14d --- /dev/null +++ b/ManagedCapturer/SCManagedCapturePreviewView.h @@ -0,0 +1,25 @@ +// +// SCManagedCapturePreviewView.h +// Snapchat +// +// Created by Liu Liu on 5/5/15. +// Copyright (c) 2015 Snapchat, Inc. All rights reserved. +// + +#import +#import + +@class LSAGLView; + +@interface SCManagedCapturePreviewView : UIView + +- (instancetype)initWithFrame:(CGRect)frame NS_UNAVAILABLE; + +- (instancetype)initWithFrame:(CGRect)frame aspectRatio:(CGFloat)aspectRatio metalLayer:(CALayer *)metalLayer; +// This method is called only once in case the metalLayer is nil previously. +- (void)setupMetalLayer:(CALayer *)metalLayer; + +@property (nonatomic, strong) AVCaptureVideoPreviewLayer *videoPreviewLayer; +@property (nonatomic, strong) LSAGLView *videoPreviewGLView; + +@end diff --git a/ManagedCapturer/SCManagedCapturePreviewView.m b/ManagedCapturer/SCManagedCapturePreviewView.m new file mode 100644 index 0000000..6da2f03 --- /dev/null +++ b/ManagedCapturer/SCManagedCapturePreviewView.m @@ -0,0 +1,173 @@ +// +// SCManagedCapturePreviewView.m +// Snapchat +// +// Created by Liu Liu on 5/5/15. +// Copyright (c) 2015 Snapchat, Inc. All rights reserved. +// + +#import "SCManagedCapturePreviewView.h" + +#import "SCCameraTweaks.h" +#import "SCManagedCapturePreviewLayerController.h" +#import "SCManagedCapturePreviewViewDebugView.h" +#import "SCMetalUtils.h" + +#import +#import +#import + +#import + +@implementation SCManagedCapturePreviewView { + CGFloat _aspectRatio; + CALayer *_containerLayer; + CALayer *_metalLayer; + SCManagedCapturePreviewViewDebugView *_debugView; +} + +- (instancetype)initWithFrame:(CGRect)frame aspectRatio:(CGFloat)aspectRatio metalLayer:(CALayer *)metalLayer +{ + SCTraceStart(); + SCAssertMainThread(); + self = [super initWithFrame:frame]; + if (self) { + _aspectRatio = aspectRatio; + if (SCDeviceSupportsMetal()) { + [CATransaction begin]; + [CATransaction setDisableActions:YES]; + _metalLayer = metalLayer; + _metalLayer.frame = [self _layerFrame]; + [self.layer insertSublayer:_metalLayer below:[self.layer sublayers][0]]; + [CATransaction commit]; + } else { + _containerLayer = [[CALayer alloc] init]; + _containerLayer.frame = [self _layerFrame]; + // Using a container layer such that the software zooming is happening on this layer + [self.layer insertSublayer:_containerLayer below:[self.layer sublayers][0]]; + } + if ([self _shouldShowDebugView]) { + _debugView = [[SCManagedCapturePreviewViewDebugView alloc] init]; + [self addSubview:_debugView]; + } + } + return self; +} + +- (void)_layoutVideoPreviewLayer +{ + SCAssertMainThread(); + [CATransaction begin]; + [CATransaction setDisableActions:YES]; + if (SCDeviceSupportsMetal()) { + _metalLayer.frame = [self _layerFrame]; + } else { + if (_videoPreviewLayer) { + SCLogGeneralInfo(@"container layer frame %@, video preview layer frame %@", + NSStringFromCGRect(_containerLayer.frame), NSStringFromCGRect(_videoPreviewLayer.frame)); + } + // Using bounds because we don't really care about the position at this point. + _containerLayer.frame = [self _layerFrame]; + _videoPreviewLayer.frame = _containerLayer.bounds; + _videoPreviewLayer.position = + CGPointMake(CGRectGetWidth(_containerLayer.bounds) * 0.5, CGRectGetHeight(_containerLayer.bounds) * 0.5); + } + [CATransaction commit]; +} + +- (void)_layoutVideoPreviewGLView +{ + SCCAssertMainThread(); + _videoPreviewGLView.frame = [self _layerFrame]; +} + +- (CGRect)_layerFrame +{ + CGRect frame = SCRectMakeWithCenterAndSize( + SCRectGetMid(self.bounds), SCSizeIntegral(SCSizeExpandToAspectRatio(self.bounds.size, _aspectRatio))); + + CGFloat x = frame.origin.x; + x = isnan(x) ? 0.0 : (isfinite(x) ? x : INFINITY); + + CGFloat y = frame.origin.y; + y = isnan(y) ? 0.0 : (isfinite(y) ? y : INFINITY); + + CGFloat width = frame.size.width; + width = isnan(width) ? 0.0 : (isfinite(width) ? width : INFINITY); + + CGFloat height = frame.size.height; + height = isnan(height) ? 0.0 : (isfinite(height) ? height : INFINITY); + + return CGRectMake(x, y, width, height); +} + +- (void)setVideoPreviewLayer:(AVCaptureVideoPreviewLayer *)videoPreviewLayer +{ + SCAssertMainThread(); + if (_videoPreviewLayer != videoPreviewLayer) { + [_videoPreviewLayer removeFromSuperlayer]; + _videoPreviewLayer = videoPreviewLayer; + [_containerLayer addSublayer:_videoPreviewLayer]; + [self _layoutVideoPreviewLayer]; + } +} + +- (void)setupMetalLayer:(CALayer *)metalLayer +{ + SCAssert(!_metalLayer, @"_metalLayer should be nil."); + SCAssert(metalLayer, @"metalLayer must exists."); + SCAssertMainThread(); + _metalLayer = metalLayer; + [self.layer insertSublayer:_metalLayer below:[self.layer sublayers][0]]; + [self _layoutVideoPreviewLayer]; +} + +- (void)setVideoPreviewGLView:(LSAGLView *)videoPreviewGLView +{ + SCAssertMainThread(); + if (_videoPreviewGLView != videoPreviewGLView) { + [_videoPreviewGLView removeFromSuperview]; + _videoPreviewGLView = videoPreviewGLView; + [self addSubview:_videoPreviewGLView]; + [self _layoutVideoPreviewGLView]; + } +} + +#pragma mark - Overridden methods + +- (void)layoutSubviews +{ + SCAssertMainThread(); + [super layoutSubviews]; + [self _layoutVideoPreviewLayer]; + [self _layoutVideoPreviewGLView]; + [self _layoutDebugViewIfNeeded]; +} + +- (void)setHidden:(BOOL)hidden +{ + SCAssertMainThread(); + [super setHidden:hidden]; + if (hidden) { + SCLogGeneralInfo(@"[SCManagedCapturePreviewView] - isHidden is being set to YES"); + } +} + +#pragma mark - Debug View + +- (BOOL)_shouldShowDebugView +{ + // Only show debug view in internal builds and tweak settings are turned on. + return SCIsInternalBuild() && + (SCCameraTweaksEnableFocusPointObservation() || SCCameraTweaksEnableExposurePointObservation()); +} + +- (void)_layoutDebugViewIfNeeded +{ + SCAssertMainThread(); + SC_GUARD_ELSE_RETURN([self _shouldShowDebugView]); + _debugView.frame = self.bounds; + [self bringSubviewToFront:_debugView]; +} + +@end diff --git a/ManagedCapturer/SCManagedCapturePreviewViewDebugView.h b/ManagedCapturer/SCManagedCapturePreviewViewDebugView.h new file mode 100644 index 0000000..0e3110c --- /dev/null +++ b/ManagedCapturer/SCManagedCapturePreviewViewDebugView.h @@ -0,0 +1,14 @@ +// +// SCManagedCapturePreviewViewDebugView.h +// Snapchat +// +// Created by Jiyang Zhu on 1/19/18. +// Copyright © 2018 Snapchat, Inc. All rights reserved. +// + +#import +#import + +@interface SCManagedCapturePreviewViewDebugView : UIView + +@end diff --git a/ManagedCapturer/SCManagedCapturePreviewViewDebugView.m b/ManagedCapturer/SCManagedCapturePreviewViewDebugView.m new file mode 100644 index 0000000..946ed88 --- /dev/null +++ b/ManagedCapturer/SCManagedCapturePreviewViewDebugView.m @@ -0,0 +1,204 @@ +// +// SCManagedCapturePreviewViewDebugView.m +// Snapchat +// +// Created by Jiyang Zhu on 1/19/18. +// Copyright © 2018 Snapchat, Inc. All rights reserved. +// + +#import "SCManagedCapturePreviewViewDebugView.h" + +#import "SCManagedCapturer.h" +#import "SCManagedCapturerListener.h" + +#import +#import +#import + +@import CoreText; + +static CGFloat const kSCManagedCapturePreviewViewDebugViewCrossHairLineWidth = 1.0; +static CGFloat const kSCManagedCapturePreviewViewDebugViewCrossHairWidth = 20.0; + +@interface SCManagedCapturePreviewViewDebugView () + +@property (assign, nonatomic) CGPoint focusPoint; +@property (assign, nonatomic) CGPoint exposurePoint; +@property (strong, nonatomic) NSDictionary *faceBoundsByFaceID; + +@end + +@implementation SCManagedCapturePreviewViewDebugView + +- (instancetype)initWithFrame:(CGRect)frame +{ + self = [super initWithFrame:frame]; + if (self) { + self.userInteractionEnabled = NO; + self.backgroundColor = [UIColor clearColor]; + _focusPoint = [self _convertPointOfInterest:CGPointMake(0.5, 0.5)]; + _exposurePoint = [self _convertPointOfInterest:CGPointMake(0.5, 0.5)]; + [[SCManagedCapturer sharedInstance] addListener:self]; + } + return self; +} + +- (void)drawRect:(CGRect)rect +{ + CGContextRef context = UIGraphicsGetCurrentContext(); + + if (self.focusPoint.x > 0 || self.focusPoint.y > 0) { + [self _drawCrossHairAtPoint:self.focusPoint inContext:context withColor:[UIColor greenColor] isXShaped:YES]; + } + + if (self.exposurePoint.x > 0 || self.exposurePoint.y > 0) { + [self _drawCrossHairAtPoint:self.exposurePoint inContext:context withColor:[UIColor yellowColor] isXShaped:NO]; + } + + if (self.faceBoundsByFaceID.count > 0) { + [self.faceBoundsByFaceID + enumerateKeysAndObjectsUsingBlock:^(NSNumber *_Nonnull key, NSValue *_Nonnull obj, BOOL *_Nonnull stop) { + CGRect faceRect = [obj CGRectValue]; + NSInteger faceID = [key integerValue]; + [self _drawRectangle:faceRect + text:[NSString sc_stringWithFormat:@"ID: %@", key] + inContext:context + withColor:[UIColor colorWithRed:((faceID % 3) == 0) + green:((faceID % 3) == 1) + blue:((faceID % 3) == 2) + alpha:1.0]]; + }]; + } +} + +- (void)dealloc +{ + [[SCManagedCapturer sharedInstance] removeListener:self]; +} + +/** + Draw a crosshair with center point, context, color and shape. + + @param isXShaped "X" or "+" + */ +- (void)_drawCrossHairAtPoint:(CGPoint)center + inContext:(CGContextRef)context + withColor:(UIColor *)color + isXShaped:(BOOL)isXShaped +{ + CGFloat width = kSCManagedCapturePreviewViewDebugViewCrossHairWidth; + + CGContextSetStrokeColorWithColor(context, color.CGColor); + CGContextSetLineWidth(context, kSCManagedCapturePreviewViewDebugViewCrossHairLineWidth); + CGContextBeginPath(context); + + if (isXShaped) { + CGContextMoveToPoint(context, center.x - width / 2, center.y - width / 2); + CGContextAddLineToPoint(context, center.x + width / 2, center.y + width / 2); + CGContextMoveToPoint(context, center.x + width / 2, center.y - width / 2); + CGContextAddLineToPoint(context, center.x - width / 2, center.y + width / 2); + } else { + CGContextMoveToPoint(context, center.x - width / 2, center.y); + CGContextAddLineToPoint(context, center.x + width / 2, center.y); + CGContextMoveToPoint(context, center.x, center.y - width / 2); + CGContextAddLineToPoint(context, center.x, center.y + width / 2); + } + + CGContextStrokePath(context); +} + +/** + Draw a rectangle, with a text on the top left. + */ +- (void)_drawRectangle:(CGRect)rect text:(NSString *)text inContext:(CGContextRef)context withColor:(UIColor *)color +{ + CGContextSetStrokeColorWithColor(context, color.CGColor); + CGContextSetLineWidth(context, kSCManagedCapturePreviewViewDebugViewCrossHairLineWidth); + CGContextBeginPath(context); + + CGContextMoveToPoint(context, CGRectGetMinX(rect), CGRectGetMinY(rect)); + CGContextAddLineToPoint(context, CGRectGetMinX(rect), CGRectGetMaxY(rect)); + CGContextAddLineToPoint(context, CGRectGetMaxX(rect), CGRectGetMaxY(rect)); + CGContextAddLineToPoint(context, CGRectGetMaxX(rect), CGRectGetMinY(rect)); + CGContextAddLineToPoint(context, CGRectGetMinX(rect), CGRectGetMinY(rect)); + + NSMutableParagraphStyle *textStyle = [[NSMutableParagraphStyle alloc] init]; + textStyle.alignment = NSTextAlignmentLeft; + NSDictionary *attributes = @{ + NSFontAttributeName : [UIFont boldSystemFontOfSize:16], + NSForegroundColorAttributeName : color, + NSParagraphStyleAttributeName : textStyle + }; + + [text drawInRect:rect withAttributes:attributes]; + + CGContextStrokePath(context); +} + +- (CGPoint)_convertPointOfInterest:(CGPoint)point +{ + SCAssertMainThread(); + CGPoint convertedPoint = + CGPointMake((1 - point.y) * CGRectGetWidth(self.bounds), point.x * CGRectGetHeight(self.bounds)); + if ([[SCManagedCapturer sharedInstance] isVideoMirrored]) { + convertedPoint.x = CGRectGetWidth(self.bounds) - convertedPoint.x; + } + return convertedPoint; +} + +- (NSDictionary *)_convertFaceBounds:(NSDictionary *)faceBoundsByFaceID +{ + SCAssertMainThread(); + NSMutableDictionary *convertedFaceBoundsByFaceID = + [NSMutableDictionary dictionaryWithCapacity:faceBoundsByFaceID.count]; + for (NSNumber *key in faceBoundsByFaceID.allKeys) { + CGRect faceBounds = [[faceBoundsByFaceID objectForKey:key] CGRectValue]; + CGRect convertedBounds = CGRectMake(CGRectGetMinY(faceBounds) * CGRectGetWidth(self.bounds), + CGRectGetMinX(faceBounds) * CGRectGetHeight(self.bounds), + CGRectGetHeight(faceBounds) * CGRectGetWidth(self.bounds), + CGRectGetWidth(faceBounds) * CGRectGetHeight(self.bounds)); + if (![[SCManagedCapturer sharedInstance] isVideoMirrored]) { + convertedBounds.origin.x = CGRectGetWidth(self.bounds) - CGRectGetMaxX(convertedBounds); + } + [convertedFaceBoundsByFaceID setObject:[NSValue valueWithCGRect:convertedBounds] forKey:key]; + } + return convertedFaceBoundsByFaceID; +} + +#pragma mark - SCManagedCapturerListener +- (void)managedCapturer:(id)managedCapturer didChangeExposurePoint:(CGPoint)exposurePoint +{ + runOnMainThreadAsynchronouslyIfNecessary(^{ + self.exposurePoint = [self _convertPointOfInterest:exposurePoint]; + [self setNeedsDisplay]; + }); +} + +- (void)managedCapturer:(id)managedCapturer didChangeFocusPoint:(CGPoint)focusPoint +{ + runOnMainThreadAsynchronouslyIfNecessary(^{ + self.focusPoint = [self _convertPointOfInterest:focusPoint]; + [self setNeedsDisplay]; + }); +} + +- (void)managedCapturer:(id)managedCapturer + didDetectFaceBounds:(NSDictionary *)faceBoundsByFaceID +{ + runOnMainThreadAsynchronouslyIfNecessary(^{ + self.faceBoundsByFaceID = [self _convertFaceBounds:faceBoundsByFaceID]; + [self setNeedsDisplay]; + }); +} + +- (void)managedCapturer:(id)managedCapturer didChangeCaptureDevicePosition:(SCManagedCapturerState *)state +{ + runOnMainThreadAsynchronouslyIfNecessary(^{ + self.faceBoundsByFaceID = nil; + self.focusPoint = [self _convertPointOfInterest:CGPointMake(0.5, 0.5)]; + self.exposurePoint = [self _convertPointOfInterest:CGPointMake(0.5, 0.5)]; + [self setNeedsDisplay]; + }); +} + +@end diff --git a/ManagedCapturer/SCManagedCaptureSession.h b/ManagedCapturer/SCManagedCaptureSession.h new file mode 100644 index 0000000..9d5f1ee --- /dev/null +++ b/ManagedCapturer/SCManagedCaptureSession.h @@ -0,0 +1,67 @@ +// +// SCManagedCaptureSession.h +// Snapchat +// +// Created by Derek Wang on 02/03/2018. +// + +#import + +#import +#import + +/** + `SCManagedCaptureSession` is a wrapper class of `AVCaptureSession`. The purpose of this class is to provide additional + functionalities to `AVCaptureSession`. + For example, for black camera detection, we need to monitor when some method is called. Another example is that we can + treat it as a more stable version of `AVCaptureSession` by moving some `AVCaptureSession` fixing logic to this class, + and it provides reliable interfaces to the outside. That would be the next step. + It also tries to mimic the `AVCaptureSession` by implmenting some methods in `AVCaptureSession`. The original methods + in `AVCaptureSession` should not be used anymore + */ + +@class SCBlackCameraDetector; + +NS_ASSUME_NONNULL_BEGIN +@interface SCManagedCaptureSession : NSObject + +/** + Expose avSession property + */ +@property (nonatomic, strong, readonly) AVCaptureSession *avSession; + +/** + Expose avSession isRunning property for convenience. + */ +@property (nonatomic, readonly, assign) BOOL isRunning; + +/** + Wrap [AVCaptureSession startRunning] method. Monitor startRunning method. [AVCaptureSession startRunning] should not be + called + */ +- (void)startRunning; +/** + Wrap [AVCaptureSession stopRunning] method. Monitor stopRunning method. [AVCaptureSession stopRunning] should not be + called + */ +- (void)stopRunning; + +/** + Wrap [AVCaptureSession beginConfiguration]. Monitor beginConfiguration method + */ +- (void)beginConfiguration; +/** + Wrap [AVCaptureSession commitConfiguration]. Monitor commitConfiguration method + */ +- (void)commitConfiguration; +/** + Configurate internal AVCaptureSession with block + @params block. configuration block with AVCaptureSession as parameter + */ +- (void)performConfiguration:(void (^)(void))block; + +- (instancetype)initWithBlackCameraDetector:(SCBlackCameraDetector *)detector NS_DESIGNATED_INITIALIZER; +SC_INIT_AND_NEW_UNAVAILABLE + +@end +NS_ASSUME_NONNULL_END diff --git a/ManagedCapturer/SCManagedCaptureSession.m b/ManagedCapturer/SCManagedCaptureSession.m new file mode 100644 index 0000000..076c31b --- /dev/null +++ b/ManagedCapturer/SCManagedCaptureSession.m @@ -0,0 +1,74 @@ +// +// SCManagedCaptureSession.m +// Snapchat +// +// Created by Derek Wang on 02/03/2018. +// + +#import "SCManagedCaptureSession.h" + +#import "SCBlackCameraDetector.h" + +#import + +@interface SCManagedCaptureSession () { + SCBlackCameraDetector *_blackCameraDetector; +} + +@end + +@implementation SCManagedCaptureSession + +- (instancetype)initWithBlackCameraDetector:(SCBlackCameraDetector *)detector +{ + self = [super init]; + if (self) { + _avSession = [[AVCaptureSession alloc] init]; + _blackCameraDetector = detector; + } + return self; +} + +- (void)startRunning +{ + SCTraceODPCompatibleStart(2); + [_blackCameraDetector sessionWillCallStartRunning]; + [_avSession startRunning]; + [_blackCameraDetector sessionDidCallStartRunning]; +} + +- (void)stopRunning +{ + SCTraceODPCompatibleStart(2); + [_blackCameraDetector sessionWillCallStopRunning]; + [_avSession stopRunning]; + [_blackCameraDetector sessionDidCallStopRunning]; +} + +- (void)performConfiguration:(nonnull void (^)(void))block +{ + SC_GUARD_ELSE_RETURN(block); + [self beginConfiguration]; + block(); + [self commitConfiguration]; +} + +- (void)beginConfiguration +{ + [_avSession beginConfiguration]; +} + +- (void)commitConfiguration +{ + SCTraceODPCompatibleStart(2); + [_blackCameraDetector sessionWillCommitConfiguration]; + [_avSession commitConfiguration]; + [_blackCameraDetector sessionDidCommitConfiguration]; +} + +- (BOOL)isRunning +{ + return _avSession.isRunning; +} + +@end diff --git a/ManagedCapturer/SCManagedCapturer.h b/ManagedCapturer/SCManagedCapturer.h new file mode 100644 index 0000000..bb1cc1e --- /dev/null +++ b/ManagedCapturer/SCManagedCapturer.h @@ -0,0 +1,23 @@ +// SCManagedCapturer.h +// Snapchat +// +// Created by Liu Liu on 4/20/15. + +#import "SCCapturer.h" +#import "SCManagedCapturerListener.h" +#import "SCManagedCapturerUtils.h" + +#import + +/* + SCManagedCapturer is a shell class. Its job is to provide an singleton instance which follows protocol of + SCManagedCapturerImpl. The reason we use this pattern is because we are building SCManagedCapturerV2. This setup + offers + possbility for us to code V2 without breaking the existing app, and can test the new implementation via Tweak. + */ + +@interface SCManagedCapturer : NSObject + ++ (id)sharedInstance; + +@end diff --git a/ManagedCapturer/SCManagedCapturer.m b/ManagedCapturer/SCManagedCapturer.m new file mode 100644 index 0000000..d009045 --- /dev/null +++ b/ManagedCapturer/SCManagedCapturer.m @@ -0,0 +1,26 @@ +// +// SCManagedCapturer.m +// Snapchat +// +// Created by Lin Jia on 9/28/17. +// + +#import "SCManagedCapturer.h" + +#import "SCCameraTweaks.h" +#import "SCCaptureCore.h" +#import "SCManagedCapturerV1.h" + +@implementation SCManagedCapturer + ++ (id)sharedInstance +{ + static dispatch_once_t onceToken; + static id managedCapturer; + dispatch_once(&onceToken, ^{ + managedCapturer = [[SCCaptureCore alloc] init]; + }); + return managedCapturer; +} + +@end diff --git a/ManagedCapturer/SCManagedCapturerARSessionHandler.h b/ManagedCapturer/SCManagedCapturerARSessionHandler.h new file mode 100644 index 0000000..fcf357c --- /dev/null +++ b/ManagedCapturer/SCManagedCapturerARSessionHandler.h @@ -0,0 +1,26 @@ +// +// SCManagedCapturerARSessionHandler.h +// Snapchat +// +// Created by Xiaokang Liu on 16/03/2018. +// +// This class is used to handle the AVCaptureSession event when ARSession is enabled. +// The stopARSessionRunning will be blocked till the AVCaptureSessionDidStopRunningNotification event has been received +// successfully, +// after then we can restart AVCaptureSession gracefully. + +#import + +#import + +@class SCCaptureResource; + +@interface SCManagedCapturerARSessionHandler : NSObject + +SC_INIT_AND_NEW_UNAVAILABLE +- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource NS_DESIGNATED_INITIALIZER; + +- (void)stopObserving; + +- (void)stopARSessionRunning NS_AVAILABLE_IOS(11_0); +@end diff --git a/ManagedCapturer/SCManagedCapturerARSessionHandler.m b/ManagedCapturer/SCManagedCapturerARSessionHandler.m new file mode 100644 index 0000000..e262085 --- /dev/null +++ b/ManagedCapturer/SCManagedCapturerARSessionHandler.m @@ -0,0 +1,76 @@ +// +// SCManagedCapturerARSessionHandler.m +// Snapchat +// +// Created by Xiaokang Liu on 16/03/2018. +// + +#import "SCManagedCapturerARSessionHandler.h" + +#import "SCCaptureResource.h" +#import "SCManagedCaptureSession.h" + +#import +#import +#import + +@import ARKit; + +static CGFloat const kSCManagedCapturerARKitShutdownTimeoutDuration = 2; + +@interface SCManagedCapturerARSessionHandler () { + SCCaptureResource *__weak _captureResource; + dispatch_semaphore_t _arSesssionShutdownSemaphore; +} + +@end + +@implementation SCManagedCapturerARSessionHandler + +- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource +{ + self = [super init]; + if (self) { + SCAssert(captureResource, @""); + _captureResource = captureResource; + _arSesssionShutdownSemaphore = dispatch_semaphore_create(0); + } + return self; +} + +- (void)stopObserving +{ + [[NSNotificationCenter defaultCenter] removeObserver:self + name:AVCaptureSessionDidStopRunningNotification + object:nil]; +} + +- (void)stopARSessionRunning +{ + SCAssertPerformer(_captureResource.queuePerformer); + SCAssert(SC_AT_LEAST_IOS_11, @"Shoule be only call from iOS 11+"); + if (@available(iOS 11.0, *)) { + // ARSession stops its internal AVCaptureSession asynchronously. We listen for its callback and actually restart + // our own capture session once it's finished shutting down so the two ARSessions don't conflict. + [[NSNotificationCenter defaultCenter] addObserver:self + selector:@selector(_completeARSessionShutdown:) + name:AVCaptureSessionDidStopRunningNotification + object:nil]; + [_captureResource.arSession pause]; + dispatch_semaphore_wait( + _arSesssionShutdownSemaphore, + dispatch_time(DISPATCH_TIME_NOW, (int64_t)(kSCManagedCapturerARKitShutdownTimeoutDuration * NSEC_PER_SEC))); + } +} + +- (void)_completeARSessionShutdown:(NSNotification *)note +{ + // This notification is only registered for IMMEDIATELY before arkit shutdown. + // Explicitly guard that the notification object IS NOT the main session's. + SC_GUARD_ELSE_RETURN(![note.object isEqual:_captureResource.managedSession.avSession]); + [[NSNotificationCenter defaultCenter] removeObserver:self + name:AVCaptureSessionDidStopRunningNotification + object:nil]; + dispatch_semaphore_signal(_arSesssionShutdownSemaphore); +} +@end diff --git a/ManagedCapturer/SCManagedCapturerListener.h b/ManagedCapturer/SCManagedCapturerListener.h new file mode 100644 index 0000000..288c201 --- /dev/null +++ b/ManagedCapturer/SCManagedCapturerListener.h @@ -0,0 +1,135 @@ +//#!announcer.rb +// +// SCManagedCaptuerListener +// Snapchat +// +// Created by Liu Liu on 4/23/15. +// Copyright (c) 2015 Liu Liu. All rights reserved. +// + +#import "SCCapturer.h" +#import "SCManagedCaptureDevice.h" +#import "SCManagedRecordedVideo.h" +#import "SCVideoCaptureSessionInfo.h" + +#import + +#import +#import + +@class SCManagedCapturer; +@class SCManagedCapturerState; +@class LSAGLView; +@class SCManagedCapturerSampleMetadata; + +@protocol SCManagedCapturerListener + +@optional + +// All these calbacks are invoked on main queue + +// Start / stop / reset + +- (void)managedCapturer:(id)managedCapturer didStartRunning:(SCManagedCapturerState *)state; + +- (void)managedCapturer:(id)managedCapturer didStopRunning:(SCManagedCapturerState *)state; + +- (void)managedCapturer:(id)managedCapturer didResetFromRuntimeError:(SCManagedCapturerState *)state; + +// Change state methods + +- (void)managedCapturer:(id)managedCapturer didChangeState:(SCManagedCapturerState *)state; + +- (void)managedCapturer:(id)managedCapturer didChangeNightModeActive:(SCManagedCapturerState *)state; + +- (void)managedCapturer:(id)managedCapturer didChangePortraitModeActive:(SCManagedCapturerState *)state; + +- (void)managedCapturer:(id)managedCapturer didChangeFlashActive:(SCManagedCapturerState *)state; + +- (void)managedCapturer:(id)managedCapturer didChangeLensesActive:(SCManagedCapturerState *)state; + +- (void)managedCapturer:(id)managedCapturer didChangeARSessionActive:(SCManagedCapturerState *)state; + +- (void)managedCapturer:(id)managedCapturer + didChangeFlashSupportedAndTorchSupported:(SCManagedCapturerState *)state; + +- (void)managedCapturer:(id)managedCapturer didChangeZoomFactor:(SCManagedCapturerState *)state; + +- (void)managedCapturer:(id)managedCapturer didChangeLowLightCondition:(SCManagedCapturerState *)state; + +- (void)managedCapturer:(id)managedCapturer didChangeAdjustingExposure:(SCManagedCapturerState *)state; + +- (void)managedCapturer:(id)managedCapturer didChangeCaptureDevicePosition:(SCManagedCapturerState *)state; + +// The video preview layer is not maintained as a state, therefore, its change is not related to the state of +// the camera at all, listener show only manage the setup of the videoPreviewLayer. +// Since the AVCaptureVideoPreviewLayer can only attach to one AVCaptureSession per app, it is recommended you +// have a view and controller which manages the video preview layer, and for upper layer, only manage that view +// or view controller, which maintains the pointer consistency. The video preview layer is required to recreate +// every now and then because otherwise we will have cases that the old video preview layer may contain +// residual images. + +- (void)managedCapturer:(id)managedCapturer + didChangeVideoPreviewLayer:(AVCaptureVideoPreviewLayer *)videoPreviewLayer; + +- (void)managedCapturer:(id)managedCapturer didChangeVideoPreviewGLView:(LSAGLView *)videoPreviewGLView; + +// Video recording-related methods + +- (void)managedCapturer:(id)managedCapturer + didBeginVideoRecording:(SCManagedCapturerState *)state + session:(SCVideoCaptureSessionInfo)session; + +- (void)managedCapturer:(id)managedCapturer + didBeginAudioRecording:(SCManagedCapturerState *)state + session:(SCVideoCaptureSessionInfo)session; + +- (void)managedCapturer:(id)managedCapturer + willFinishRecording:(SCManagedCapturerState *)state + session:(SCVideoCaptureSessionInfo)session + recordedVideoFuture:(SCFuture> *)recordedVideoFuture + videoSize:(CGSize)videoSize + placeholderImage:(UIImage *)placeholderImage; + +- (void)managedCapturer:(id)managedCapturer + didFinishRecording:(SCManagedCapturerState *)state + session:(SCVideoCaptureSessionInfo)session + recordedVideo:(SCManagedRecordedVideo *)recordedVideo; + +- (void)managedCapturer:(id)managedCapturer + didFailRecording:(SCManagedCapturerState *)state + session:(SCVideoCaptureSessionInfo)session + error:(NSError *)error; + +- (void)managedCapturer:(id)managedCapturer + didCancelRecording:(SCManagedCapturerState *)state + session:(SCVideoCaptureSessionInfo)session; + +- (void)managedCapturer:(id)managedCapturer + didGetError:(NSError *)error + forType:(SCManagedVideoCapturerInfoType)type + session:(SCVideoCaptureSessionInfo)session; + +- (void)managedCapturerDidCallLenseResume:(id)managedCapturer session:(SCVideoCaptureSessionInfo)session; + +- (void)managedCapturer:(id)managedCapturer + didAppendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer + sampleMetadata:(SCManagedCapturerSampleMetadata *)sampleMetadata; + +// Photo methods +- (void)managedCapturer:(id)managedCapturer + willCapturePhoto:(SCManagedCapturerState *)state + sampleMetadata:(SCManagedCapturerSampleMetadata *)sampleMetadata; + +- (void)managedCapturer:(id)managedCapturer didCapturePhoto:(SCManagedCapturerState *)state; + +- (BOOL)managedCapturer:(id)managedCapturer isUnderDeviceMotion:(SCManagedCapturerState *)state; + +- (BOOL)managedCapturer:(id)managedCapturer shouldProcessFileInput:(SCManagedCapturerState *)state; + +// Face detection +- (void)managedCapturer:(id)managedCapturer + didDetectFaceBounds:(NSDictionary *)faceBoundsByFaceID; +- (void)managedCapturer:(id)managedCapturer didChangeExposurePoint:(CGPoint)exposurePoint; +- (void)managedCapturer:(id)managedCapturer didChangeFocusPoint:(CGPoint)focusPoint; +@end diff --git a/ManagedCapturer/SCManagedCapturerListenerAnnouncer.h b/ManagedCapturer/SCManagedCapturerListenerAnnouncer.h new file mode 100644 index 0000000..2dce0b4 --- /dev/null +++ b/ManagedCapturer/SCManagedCapturerListenerAnnouncer.h @@ -0,0 +1,12 @@ +// Generated by the announcer.rb DO NOT EDIT!! + +#import "SCManagedCapturerListener.h" + +#import + +@interface SCManagedCapturerListenerAnnouncer : NSObject + +- (BOOL)addListener:(id)listener; +- (void)removeListener:(id)listener; + +@end diff --git a/ManagedCapturer/SCManagedCapturerListenerAnnouncer.mm b/ManagedCapturer/SCManagedCapturerListenerAnnouncer.mm new file mode 100644 index 0000000..d4eea38 --- /dev/null +++ b/ManagedCapturer/SCManagedCapturerListenerAnnouncer.mm @@ -0,0 +1,505 @@ +// Generated by the announcer.rb DO NOT EDIT!! + +#import "SCManagedCapturerListenerAnnouncer.h" + +#include +using std::lock_guard; +using std::mutex; +#include +using std::find; +using std::make_shared; +using std::shared_ptr; +using std::vector; + +@implementation SCManagedCapturerListenerAnnouncer { + mutex _mutex; + shared_ptr>> _listeners; +} + +- (NSString *)description +{ + auto listeners = atomic_load(&self->_listeners); + NSMutableString *desc = [NSMutableString string]; + [desc appendFormat:@": [", self]; + for (int i = 0; i < listeners->size(); ++i) { + [desc appendFormat:@"%@", (*listeners)[i]]; + if (i != listeners->size() - 1) { + [desc appendString:@", "]; + } + } + [desc appendString:@"]"]; + return desc; +} + +- (BOOL)addListener:(id)listener +{ + lock_guard lock(_mutex); + auto listeners = make_shared>>(); + if (_listeners != nil) { + // The listener we want to add already exists + if (find(_listeners->begin(), _listeners->end(), listener) != _listeners->end()) { + return NO; + } + for (auto &one : *_listeners) { + if (one != nil) { + listeners->push_back(one); + } + } + listeners->push_back(listener); + atomic_store(&self->_listeners, listeners); + } else { + listeners->push_back(listener); + atomic_store(&self->_listeners, listeners); + } + return YES; +} + +- (void)removeListener:(id)listener +{ + lock_guard lock(_mutex); + if (_listeners == nil) { + return; + } + // If the only item in the listener list is the one we want to remove, store it back to nil again + if (_listeners->size() == 1 && (*_listeners)[0] == listener) { + atomic_store(&self->_listeners, shared_ptr>>()); + return; + } + auto listeners = make_shared>>(); + for (auto &one : *_listeners) { + if (one != nil && one != listener) { + listeners->push_back(one); + } + } + atomic_store(&self->_listeners, listeners); +} + +- (void)managedCapturer:(id)managedCapturer didStartRunning:(SCManagedCapturerState *)state +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didStartRunning:)]) { + [listener managedCapturer:managedCapturer didStartRunning:state]; + } + } + } +} + +- (void)managedCapturer:(id)managedCapturer didStopRunning:(SCManagedCapturerState *)state +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didStopRunning:)]) { + [listener managedCapturer:managedCapturer didStopRunning:state]; + } + } + } +} + +- (void)managedCapturer:(id)managedCapturer didResetFromRuntimeError:(SCManagedCapturerState *)state +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didResetFromRuntimeError:)]) { + [listener managedCapturer:managedCapturer didResetFromRuntimeError:state]; + } + } + } +} + +- (void)managedCapturer:(id)managedCapturer didChangeState:(SCManagedCapturerState *)state +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didChangeState:)]) { + [listener managedCapturer:managedCapturer didChangeState:state]; + } + } + } +} + +- (void)managedCapturer:(id)managedCapturer didChangeNightModeActive:(SCManagedCapturerState *)state +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didChangeNightModeActive:)]) { + [listener managedCapturer:managedCapturer didChangeNightModeActive:state]; + } + } + } +} + +- (void)managedCapturer:(id)managedCapturer didChangePortraitModeActive:(SCManagedCapturerState *)state +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didChangePortraitModeActive:)]) { + [listener managedCapturer:managedCapturer didChangePortraitModeActive:state]; + } + } + } +} + +- (void)managedCapturer:(id)managedCapturer didChangeFlashActive:(SCManagedCapturerState *)state +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didChangeFlashActive:)]) { + [listener managedCapturer:managedCapturer didChangeFlashActive:state]; + } + } + } +} + +- (void)managedCapturer:(id)managedCapturer didChangeLensesActive:(SCManagedCapturerState *)state +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didChangeLensesActive:)]) { + [listener managedCapturer:managedCapturer didChangeLensesActive:state]; + } + } + } +} + +- (void)managedCapturer:(id)managedCapturer didChangeARSessionActive:(SCManagedCapturerState *)state +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didChangeARSessionActive:)]) { + [listener managedCapturer:managedCapturer didChangeARSessionActive:state]; + } + } + } +} + +- (void)managedCapturer:(id)managedCapturer + didChangeFlashSupportedAndTorchSupported:(SCManagedCapturerState *)state +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didChangeFlashSupportedAndTorchSupported:)]) { + [listener managedCapturer:managedCapturer didChangeFlashSupportedAndTorchSupported:state]; + } + } + } +} + +- (void)managedCapturer:(id)managedCapturer didChangeZoomFactor:(SCManagedCapturerState *)state +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didChangeZoomFactor:)]) { + [listener managedCapturer:managedCapturer didChangeZoomFactor:state]; + } + } + } +} + +- (void)managedCapturer:(id)managedCapturer didChangeLowLightCondition:(SCManagedCapturerState *)state +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didChangeLowLightCondition:)]) { + [listener managedCapturer:managedCapturer didChangeLowLightCondition:state]; + } + } + } +} + +- (void)managedCapturer:(id)managedCapturer didChangeAdjustingExposure:(SCManagedCapturerState *)state +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didChangeAdjustingExposure:)]) { + [listener managedCapturer:managedCapturer didChangeAdjustingExposure:state]; + } + } + } +} + +- (void)managedCapturer:(id)managedCapturer didChangeCaptureDevicePosition:(SCManagedCapturerState *)state +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didChangeCaptureDevicePosition:)]) { + [listener managedCapturer:managedCapturer didChangeCaptureDevicePosition:state]; + } + } + } +} + +- (void)managedCapturer:(id)managedCapturer + didChangeVideoPreviewLayer:(AVCaptureVideoPreviewLayer *)videoPreviewLayer +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didChangeVideoPreviewLayer:)]) { + [listener managedCapturer:managedCapturer didChangeVideoPreviewLayer:videoPreviewLayer]; + } + } + } +} + +- (void)managedCapturer:(id)managedCapturer didChangeVideoPreviewGLView:(LSAGLView *)videoPreviewGLView +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didChangeVideoPreviewGLView:)]) { + [listener managedCapturer:managedCapturer didChangeVideoPreviewGLView:videoPreviewGLView]; + } + } + } +} + +- (void)managedCapturer:(id)managedCapturer + didBeginVideoRecording:(SCManagedCapturerState *)state + session:(SCVideoCaptureSessionInfo)session +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didBeginVideoRecording:session:)]) { + [listener managedCapturer:managedCapturer didBeginVideoRecording:state session:session]; + } + } + } +} + +- (void)managedCapturer:(id)managedCapturer + didBeginAudioRecording:(SCManagedCapturerState *)state + session:(SCVideoCaptureSessionInfo)session +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didBeginAudioRecording:session:)]) { + [listener managedCapturer:managedCapturer didBeginAudioRecording:state session:session]; + } + } + } +} + +- (void)managedCapturer:(id)managedCapturer + willFinishRecording:(SCManagedCapturerState *)state + session:(SCVideoCaptureSessionInfo)session + recordedVideoFuture:(SCFuture> *)recordedVideoFuture + videoSize:(CGSize)videoSize + placeholderImage:(UIImage *)placeholderImage +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer: + willFinishRecording: + session: + recordedVideoFuture: + videoSize: + placeholderImage:)]) { + [listener managedCapturer:managedCapturer + willFinishRecording:state + session:session + recordedVideoFuture:recordedVideoFuture + videoSize:videoSize + placeholderImage:placeholderImage]; + } + } + } +} + +- (void)managedCapturer:(id)managedCapturer + didFinishRecording:(SCManagedCapturerState *)state + session:(SCVideoCaptureSessionInfo)session + recordedVideo:(SCManagedRecordedVideo *)recordedVideo +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didFinishRecording:session:recordedVideo:)]) { + [listener managedCapturer:managedCapturer + didFinishRecording:state + session:session + recordedVideo:recordedVideo]; + } + } + } +} + +- (void)managedCapturer:(id)managedCapturer + didFailRecording:(SCManagedCapturerState *)state + session:(SCVideoCaptureSessionInfo)session + error:(NSError *)error +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didFailRecording:session:error:)]) { + [listener managedCapturer:managedCapturer didFailRecording:state session:session error:error]; + } + } + } +} + +- (void)managedCapturer:(id)managedCapturer + didCancelRecording:(SCManagedCapturerState *)state + session:(SCVideoCaptureSessionInfo)session +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didCancelRecording:session:)]) { + [listener managedCapturer:managedCapturer didCancelRecording:state session:session]; + } + } + } +} + +- (void)managedCapturer:(id)managedCapturer + didGetError:(NSError *)error + forType:(SCManagedVideoCapturerInfoType)type + session:(SCVideoCaptureSessionInfo)session +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didGetError:forType:session:)]) { + [listener managedCapturer:managedCapturer didGetError:error forType:type session:session]; + } + } + } +} + +- (void)managedCapturerDidCallLenseResume:(id)managedCapturer session:(SCVideoCaptureSessionInfo)session +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturerDidCallLenseResume:session:)]) { + [listener managedCapturerDidCallLenseResume:managedCapturer session:session]; + } + } + } +} + +- (void)managedCapturer:(id)managedCapturer + didAppendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer + sampleMetadata:(SCManagedCapturerSampleMetadata *)sampleMetadata +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didAppendVideoSampleBuffer:sampleMetadata:)]) { + [listener managedCapturer:managedCapturer + didAppendVideoSampleBuffer:sampleBuffer + sampleMetadata:sampleMetadata]; + } + } + } +} + +- (void)managedCapturer:(id)managedCapturer + willCapturePhoto:(SCManagedCapturerState *)state + sampleMetadata:(SCManagedCapturerSampleMetadata *)sampleMetadata +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:willCapturePhoto:sampleMetadata:)]) { + [listener managedCapturer:managedCapturer willCapturePhoto:state sampleMetadata:sampleMetadata]; + } + } + } +} + +- (void)managedCapturer:(id)managedCapturer didCapturePhoto:(SCManagedCapturerState *)state +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didCapturePhoto:)]) { + [listener managedCapturer:managedCapturer didCapturePhoto:state]; + } + } + } +} + +- (BOOL)managedCapturer:(id)managedCapturer isUnderDeviceMotion:(SCManagedCapturerState *)state +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didCapturePhoto:)]) { + return [listener managedCapturer:managedCapturer isUnderDeviceMotion:state]; + } + } + } + return NO; +} + +- (BOOL)managedCapturer:(id)managedCapturer shouldProcessFileInput:(SCManagedCapturerState *)state +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didCapturePhoto:)]) { + return [listener managedCapturer:managedCapturer isUnderDeviceMotion:state]; + } + } + } + return NO; +} + +- (void)managedCapturer:(id)managedCapturer + didDetectFaceBounds:(NSDictionary *)faceBoundsByFaceID +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didDetectFaceBounds:)]) { + [listener managedCapturer:managedCapturer didDetectFaceBounds:faceBoundsByFaceID]; + } + } + } +} + +- (void)managedCapturer:(id)managedCapturer didChangeExposurePoint:(CGPoint)exposurePoint +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didChangeExposurePoint:)]) { + [listener managedCapturer:managedCapturer didChangeExposurePoint:exposurePoint]; + } + } + } +} + +- (void)managedCapturer:(id)managedCapturer didChangeFocusPoint:(CGPoint)focusPoint +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedCapturer:didChangeFocusPoint:)]) { + [listener managedCapturer:managedCapturer didChangeFocusPoint:focusPoint]; + } + } + } +} + +@end diff --git a/ManagedCapturer/SCManagedCapturerSampleMetadata.h b/ManagedCapturer/SCManagedCapturerSampleMetadata.h new file mode 100644 index 0000000..50e9c6d --- /dev/null +++ b/ManagedCapturer/SCManagedCapturerSampleMetadata.h @@ -0,0 +1,26 @@ +// +// SCRecordingMetadata.h +// Snapchat +// + +#import + +#import +#import + +NS_ASSUME_NONNULL_BEGIN + +@interface SCManagedCapturerSampleMetadata : NSObject + +SC_INIT_AND_NEW_UNAVAILABLE + +- (instancetype)initWithPresentationTimestamp:(CMTime)presentationTimestamp + fieldOfView:(float)fieldOfView NS_DESIGNATED_INITIALIZER; + +@property (nonatomic, readonly) CMTime presentationTimestamp; + +@property (nonatomic, readonly) float fieldOfView; + +@end + +NS_ASSUME_NONNULL_END diff --git a/ManagedCapturer/SCManagedCapturerSampleMetadata.m b/ManagedCapturer/SCManagedCapturerSampleMetadata.m new file mode 100644 index 0000000..8b08fc4 --- /dev/null +++ b/ManagedCapturer/SCManagedCapturerSampleMetadata.m @@ -0,0 +1,24 @@ +// +// SCRecordingMetadata.m +// Snapchat +// + +#import "SCManagedCapturerSampleMetadata.h" + +NS_ASSUME_NONNULL_BEGIN + +@implementation SCManagedCapturerSampleMetadata + +- (instancetype)initWithPresentationTimestamp:(CMTime)presentationTimestamp fieldOfView:(float)fieldOfView +{ + self = [super init]; + if (self) { + _presentationTimestamp = presentationTimestamp; + _fieldOfView = fieldOfView; + } + return self; +} + +@end + +NS_ASSUME_NONNULL_END diff --git a/ManagedCapturer/SCManagedCapturerState.h b/ManagedCapturer/SCManagedCapturerState.h new file mode 100644 index 0000000..439c0a1 --- /dev/null +++ b/ManagedCapturer/SCManagedCapturerState.h @@ -0,0 +1,93 @@ +// 49126048c3d19dd5b676b8d39844cf133833b67a +// Generated by the value-object.rb DO NOT EDIT!! + +#import "SCManagedCaptureDevice.h" + +#import + +#import +#import + +@protocol SCManagedCapturerState + +@property (nonatomic, assign, readonly) BOOL isRunning; + +@property (nonatomic, assign, readonly) BOOL isNightModeActive; + +@property (nonatomic, assign, readonly) BOOL isPortraitModeActive; + +@property (nonatomic, assign, readonly) BOOL lowLightCondition; + +@property (nonatomic, assign, readonly) BOOL adjustingExposure; + +@property (nonatomic, assign, readonly) SCManagedCaptureDevicePosition devicePosition; + +@property (nonatomic, assign, readonly) CGFloat zoomFactor; + +@property (nonatomic, assign, readonly) BOOL flashSupported; + +@property (nonatomic, assign, readonly) BOOL torchSupported; + +@property (nonatomic, assign, readonly) BOOL flashActive; + +@property (nonatomic, assign, readonly) BOOL torchActive; + +@property (nonatomic, assign, readonly) BOOL lensesActive; + +@property (nonatomic, assign, readonly) BOOL arSessionActive; + +@property (nonatomic, assign, readonly) BOOL liveVideoStreaming; + +@property (nonatomic, assign, readonly) BOOL lensProcessorReady; + +@end + +@interface SCManagedCapturerState : NSObject + +@property (nonatomic, assign, readonly) BOOL isRunning; + +@property (nonatomic, assign, readonly) BOOL isNightModeActive; + +@property (nonatomic, assign, readonly) BOOL isPortraitModeActive; + +@property (nonatomic, assign, readonly) BOOL lowLightCondition; + +@property (nonatomic, assign, readonly) BOOL adjustingExposure; + +@property (nonatomic, assign, readonly) SCManagedCaptureDevicePosition devicePosition; + +@property (nonatomic, assign, readonly) CGFloat zoomFactor; + +@property (nonatomic, assign, readonly) BOOL flashSupported; + +@property (nonatomic, assign, readonly) BOOL torchSupported; + +@property (nonatomic, assign, readonly) BOOL flashActive; + +@property (nonatomic, assign, readonly) BOOL torchActive; + +@property (nonatomic, assign, readonly) BOOL lensesActive; + +@property (nonatomic, assign, readonly) BOOL arSessionActive; + +@property (nonatomic, assign, readonly) BOOL liveVideoStreaming; + +@property (nonatomic, assign, readonly) BOOL lensProcessorReady; + +- (instancetype)initWithIsRunning:(BOOL)isRunning + isNightModeActive:(BOOL)isNightModeActive + isPortraitModeActive:(BOOL)isPortraitModeActive + lowLightCondition:(BOOL)lowLightCondition + adjustingExposure:(BOOL)adjustingExposure + devicePosition:(SCManagedCaptureDevicePosition)devicePosition + zoomFactor:(CGFloat)zoomFactor + flashSupported:(BOOL)flashSupported + torchSupported:(BOOL)torchSupported + flashActive:(BOOL)flashActive + torchActive:(BOOL)torchActive + lensesActive:(BOOL)lensesActive + arSessionActive:(BOOL)arSessionActive + liveVideoStreaming:(BOOL)liveVideoStreaming + lensProcessorReady:(BOOL)lensProcessorReady; + +@end diff --git a/ManagedCapturer/SCManagedCapturerState.m b/ManagedCapturer/SCManagedCapturerState.m new file mode 100644 index 0000000..d9b9454 --- /dev/null +++ b/ManagedCapturer/SCManagedCapturerState.m @@ -0,0 +1,359 @@ +// 49126048c3d19dd5b676b8d39844cf133833b67a +// Generated by the value-object.rb DO NOT EDIT!! + +#import "SCManagedCapturerState.h" + +#import + +#import + +@implementation SCManagedCapturerState + +static ptrdiff_t sSCManagedCapturerStateOffsets[0]; +static BOOL sSCManagedCapturerStateHasOffsets; + +- (instancetype)initWithIsRunning:(BOOL)isRunning + isNightModeActive:(BOOL)isNightModeActive + isPortraitModeActive:(BOOL)isPortraitModeActive + lowLightCondition:(BOOL)lowLightCondition + adjustingExposure:(BOOL)adjustingExposure + devicePosition:(SCManagedCaptureDevicePosition)devicePosition + zoomFactor:(CGFloat)zoomFactor + flashSupported:(BOOL)flashSupported + torchSupported:(BOOL)torchSupported + flashActive:(BOOL)flashActive + torchActive:(BOOL)torchActive + lensesActive:(BOOL)lensesActive + arSessionActive:(BOOL)arSessionActive + liveVideoStreaming:(BOOL)liveVideoStreaming + lensProcessorReady:(BOOL)lensProcessorReady +{ + self = [super init]; + if (self) { + _isRunning = isRunning; + _isNightModeActive = isNightModeActive; + _isPortraitModeActive = isPortraitModeActive; + _lowLightCondition = lowLightCondition; + _adjustingExposure = adjustingExposure; + _devicePosition = devicePosition; + _zoomFactor = zoomFactor; + _flashSupported = flashSupported; + _torchSupported = torchSupported; + _flashActive = flashActive; + _torchActive = torchActive; + _lensesActive = lensesActive; + _arSessionActive = arSessionActive; + _liveVideoStreaming = liveVideoStreaming; + _lensProcessorReady = lensProcessorReady; + } + return self; +} + +#pragma mark - NSCopying + +- (instancetype)copyWithZone:(NSZone *)zone +{ + // Immutable object, bypass copy + return self; +} + +#pragma mark - NSCoding + +- (instancetype)initWithCoder:(NSCoder *)aDecoder +{ + self = [super init]; + if (self) { + _isRunning = [aDecoder decodeBoolForKey:@"isRunning"]; + _isNightModeActive = [aDecoder decodeBoolForKey:@"isNightModeActive"]; + _isPortraitModeActive = [aDecoder decodeBoolForKey:@"isPortraitModeActive"]; + _lowLightCondition = [aDecoder decodeBoolForKey:@"lowLightCondition"]; + _adjustingExposure = [aDecoder decodeBoolForKey:@"adjustingExposure"]; + _devicePosition = (SCManagedCaptureDevicePosition)[aDecoder decodeIntegerForKey:@"devicePosition"]; + _zoomFactor = [aDecoder decodeFloatForKey:@"zoomFactor"]; + _flashSupported = [aDecoder decodeBoolForKey:@"flashSupported"]; + _torchSupported = [aDecoder decodeBoolForKey:@"torchSupported"]; + _flashActive = [aDecoder decodeBoolForKey:@"flashActive"]; + _torchActive = [aDecoder decodeBoolForKey:@"torchActive"]; + _lensesActive = [aDecoder decodeBoolForKey:@"lensesActive"]; + _arSessionActive = [aDecoder decodeBoolForKey:@"arSessionActive"]; + _liveVideoStreaming = [aDecoder decodeBoolForKey:@"liveVideoStreaming"]; + _lensProcessorReady = [aDecoder decodeBoolForKey:@"lensProcessorReady"]; + } + return self; +} + +- (void)encodeWithCoder:(NSCoder *)aCoder +{ + [aCoder encodeBool:_isRunning forKey:@"isRunning"]; + [aCoder encodeBool:_isNightModeActive forKey:@"isNightModeActive"]; + [aCoder encodeBool:_isPortraitModeActive forKey:@"isPortraitModeActive"]; + [aCoder encodeBool:_lowLightCondition forKey:@"lowLightCondition"]; + [aCoder encodeBool:_adjustingExposure forKey:@"adjustingExposure"]; + [aCoder encodeInteger:(NSInteger)_devicePosition forKey:@"devicePosition"]; + [aCoder encodeFloat:_zoomFactor forKey:@"zoomFactor"]; + [aCoder encodeBool:_flashSupported forKey:@"flashSupported"]; + [aCoder encodeBool:_torchSupported forKey:@"torchSupported"]; + [aCoder encodeBool:_flashActive forKey:@"flashActive"]; + [aCoder encodeBool:_torchActive forKey:@"torchActive"]; + [aCoder encodeBool:_lensesActive forKey:@"lensesActive"]; + [aCoder encodeBool:_arSessionActive forKey:@"arSessionActive"]; + [aCoder encodeBool:_liveVideoStreaming forKey:@"liveVideoStreaming"]; + [aCoder encodeBool:_lensProcessorReady forKey:@"lensProcessorReady"]; +} + +#pragma mark - FasterCoding + +- (BOOL)preferFasterCoding +{ + return YES; +} + +- (void)encodeWithFasterCoder:(id)fasterCoder +{ + [fasterCoder encodeBool:_adjustingExposure]; + [fasterCoder encodeBool:_arSessionActive]; + [fasterCoder encodeSInt32:_devicePosition]; + [fasterCoder encodeBool:_flashActive]; + [fasterCoder encodeBool:_flashSupported]; + [fasterCoder encodeBool:_isNightModeActive]; + [fasterCoder encodeBool:_isPortraitModeActive]; + [fasterCoder encodeBool:_isRunning]; + [fasterCoder encodeBool:_lensProcessorReady]; + [fasterCoder encodeBool:_lensesActive]; + [fasterCoder encodeBool:_liveVideoStreaming]; + [fasterCoder encodeBool:_lowLightCondition]; + [fasterCoder encodeBool:_torchActive]; + [fasterCoder encodeBool:_torchSupported]; + [fasterCoder encodeFloat64:_zoomFactor]; +} + +- (void)decodeWithFasterDecoder:(id)fasterDecoder +{ + _adjustingExposure = (BOOL)[fasterDecoder decodeBool]; + _arSessionActive = (BOOL)[fasterDecoder decodeBool]; + _devicePosition = (SCManagedCaptureDevicePosition)[fasterDecoder decodeSInt32]; + _flashActive = (BOOL)[fasterDecoder decodeBool]; + _flashSupported = (BOOL)[fasterDecoder decodeBool]; + _isNightModeActive = (BOOL)[fasterDecoder decodeBool]; + _isPortraitModeActive = (BOOL)[fasterDecoder decodeBool]; + _isRunning = (BOOL)[fasterDecoder decodeBool]; + _lensProcessorReady = (BOOL)[fasterDecoder decodeBool]; + _lensesActive = (BOOL)[fasterDecoder decodeBool]; + _liveVideoStreaming = (BOOL)[fasterDecoder decodeBool]; + _lowLightCondition = (BOOL)[fasterDecoder decodeBool]; + _torchActive = (BOOL)[fasterDecoder decodeBool]; + _torchSupported = (BOOL)[fasterDecoder decodeBool]; + _zoomFactor = (CGFloat)[fasterDecoder decodeFloat64]; +} + +- (void)setBool:(BOOL)val forUInt64Key:(uint64_t)key +{ + switch (key) { + case 15633755733674300ULL: + _adjustingExposure = (BOOL)val; + break; + case 11461798188076803ULL: + _arSessionActive = (BOOL)val; + break; + case 12833337784991002ULL: + _flashActive = (BOOL)val; + break; + case 51252237764061994ULL: + _flashSupported = (BOOL)val; + break; + case 1498048848502287ULL: + _isNightModeActive = (BOOL)val; + break; + case 56151582267629469ULL: + _isPortraitModeActive = (BOOL)val; + break; + case 12346172623874083ULL: + _isRunning = (BOOL)val; + break; + case 67168377441917657ULL: + _lensProcessorReady = (BOOL)val; + break; + case 5791542045168142ULL: + _lensesActive = (BOOL)val; + break; + case 28486888710545224ULL: + _liveVideoStreaming = (BOOL)val; + break; + case 24071673583499455ULL: + _lowLightCondition = (BOOL)val; + break; + case 40774429934225315ULL: + _torchActive = (BOOL)val; + break; + case 41333098301057670ULL: + _torchSupported = (BOOL)val; + break; + } +} + +- (void)setSInt32:(int32_t)val forUInt64Key:(uint64_t)key +{ + switch (key) { + case 66264093189780655ULL: + _devicePosition = (SCManagedCaptureDevicePosition)val; + break; + } +} + +- (void)setFloat64:(double)val forUInt64Key:(uint64_t)key +{ + switch (key) { + case 61340640993537628ULL: + _zoomFactor = (CGFloat)val; + break; + } +} + ++ (uint64_t)fasterCodingVersion +{ + return 10319810232046341562ULL; +} + ++ (uint64_t *)fasterCodingKeys +{ + static uint64_t keys[] = { + 15 /* Total */, + FC_ENCODE_KEY_TYPE(15633755733674300, FCEncodeTypeBool), + FC_ENCODE_KEY_TYPE(11461798188076803, FCEncodeTypeBool), + FC_ENCODE_KEY_TYPE(66264093189780655, FCEncodeTypeSInt32), + FC_ENCODE_KEY_TYPE(12833337784991002, FCEncodeTypeBool), + FC_ENCODE_KEY_TYPE(51252237764061994, FCEncodeTypeBool), + FC_ENCODE_KEY_TYPE(1498048848502287, FCEncodeTypeBool), + FC_ENCODE_KEY_TYPE(56151582267629469, FCEncodeTypeBool), + FC_ENCODE_KEY_TYPE(12346172623874083, FCEncodeTypeBool), + FC_ENCODE_KEY_TYPE(67168377441917657, FCEncodeTypeBool), + FC_ENCODE_KEY_TYPE(5791542045168142, FCEncodeTypeBool), + FC_ENCODE_KEY_TYPE(28486888710545224, FCEncodeTypeBool), + FC_ENCODE_KEY_TYPE(24071673583499455, FCEncodeTypeBool), + FC_ENCODE_KEY_TYPE(40774429934225315, FCEncodeTypeBool), + FC_ENCODE_KEY_TYPE(41333098301057670, FCEncodeTypeBool), + FC_ENCODE_KEY_TYPE(61340640993537628, FCEncodeTypeFloat64), + }; + return keys; +} + +#pragma mark - isEqual + +- (BOOL)isEqual:(id)object +{ + if (!SCObjectsIsEqual(self, object, &sSCManagedCapturerStateHasOffsets, sSCManagedCapturerStateOffsets, 15, 0)) { + return NO; + } + SCManagedCapturerState *other = (SCManagedCapturerState *)object; + if (other->_isRunning != _isRunning) { + return NO; + } + + if (other->_isNightModeActive != _isNightModeActive) { + return NO; + } + + if (other->_isPortraitModeActive != _isPortraitModeActive) { + return NO; + } + + if (other->_lowLightCondition != _lowLightCondition) { + return NO; + } + + if (other->_adjustingExposure != _adjustingExposure) { + return NO; + } + + if (other->_devicePosition != _devicePosition) { + return NO; + } + + if (other->_zoomFactor != _zoomFactor) { + return NO; + } + + if (other->_flashSupported != _flashSupported) { + return NO; + } + + if (other->_torchSupported != _torchSupported) { + return NO; + } + + if (other->_flashActive != _flashActive) { + return NO; + } + + if (other->_torchActive != _torchActive) { + return NO; + } + + if (other->_lensesActive != _lensesActive) { + return NO; + } + + if (other->_arSessionActive != _arSessionActive) { + return NO; + } + + if (other->_liveVideoStreaming != _liveVideoStreaming) { + return NO; + } + + if (other->_lensProcessorReady != _lensProcessorReady) { + return NO; + } + + return YES; +} + +- (NSUInteger)hash +{ + NSUInteger subhashes[] = { + (NSUInteger)_isRunning, (NSUInteger)_isNightModeActive, (NSUInteger)_isPortraitModeActive, + (NSUInteger)_lowLightCondition, (NSUInteger)_adjustingExposure, (NSUInteger)_devicePosition, + (NSUInteger)_zoomFactor, (NSUInteger)_flashSupported, (NSUInteger)_torchSupported, + (NSUInteger)_flashActive, (NSUInteger)_torchActive, (NSUInteger)_lensesActive, + (NSUInteger)_arSessionActive, (NSUInteger)_liveVideoStreaming, (NSUInteger)_lensProcessorReady}; + NSUInteger result = subhashes[0]; + for (int i = 1; i < 15; i++) { + unsigned long long base = (((unsigned long long)result) << 32 | subhashes[i]); + base = (~base) + (base << 18); + base ^= (base >> 31); + base *= 21; + base ^= (base >> 11); + base += (base << 6); + base ^= (base >> 22); + result = (NSUInteger)base; + } + return result; +} + +#pragma mark - Print description in console: lldb> po #{variable name} + +- (NSString *)description +{ + NSMutableString *desc = [NSMutableString string]; + [desc appendString:@"{\n"]; + [desc appendFormat:@"\tisRunning:%@\n", [@(_isRunning) description]]; + [desc appendFormat:@"\tisNightModeActive:%@\n", [@(_isNightModeActive) description]]; + [desc appendFormat:@"\tisPortraitModeActive:%@\n", [@(_isPortraitModeActive) description]]; + [desc appendFormat:@"\tlowLightCondition:%@\n", [@(_lowLightCondition) description]]; + [desc appendFormat:@"\tadjustingExposure:%@\n", [@(_adjustingExposure) description]]; + [desc appendFormat:@"\tdevicePosition:%@\n", [@(_devicePosition) description]]; + [desc appendFormat:@"\tzoomFactor:%@\n", [@(_zoomFactor) description]]; + [desc appendFormat:@"\tflashSupported:%@\n", [@(_flashSupported) description]]; + [desc appendFormat:@"\ttorchSupported:%@\n", [@(_torchSupported) description]]; + [desc appendFormat:@"\tflashActive:%@\n", [@(_flashActive) description]]; + [desc appendFormat:@"\ttorchActive:%@\n", [@(_torchActive) description]]; + [desc appendFormat:@"\tlensesActive:%@\n", [@(_lensesActive) description]]; + [desc appendFormat:@"\tarSessionActive:%@\n", [@(_arSessionActive) description]]; + [desc appendFormat:@"\tliveVideoStreaming:%@\n", [@(_liveVideoStreaming) description]]; + [desc appendFormat:@"\tlensProcessorReady:%@\n", [@(_lensProcessorReady) description]]; + [desc appendString:@"}\n"]; + + return [desc copy]; +} + +@end diff --git a/ManagedCapturer/SCManagedCapturerState.value b/ManagedCapturer/SCManagedCapturerState.value new file mode 100644 index 0000000..1d49d3d --- /dev/null +++ b/ManagedCapturer/SCManagedCapturerState.value @@ -0,0 +1,20 @@ +#import +#import "SCManagedCaptureDevice.h" + +interface SCManagedCapturerState + BOOL isRunning + BOOL isNightModeActive + BOOL isPortraitModeActive + BOOL lowLightCondition + BOOL adjustingExposure + enum SCManagedCaptureDevicePosition devicePosition + CGFloat zoomFactor + BOOL flashSupported + BOOL torchSupported + BOOL flashActive + BOOL torchActive + BOOL lensesActive + BOOL arSessionActive + BOOL liveVideoStreaming + BOOL lensProcessorReady +end diff --git a/ManagedCapturer/SCManagedCapturerStateBuilder.h b/ManagedCapturer/SCManagedCapturerStateBuilder.h new file mode 100644 index 0000000..7a9adb8 --- /dev/null +++ b/ManagedCapturer/SCManagedCapturerStateBuilder.h @@ -0,0 +1,46 @@ +// 49126048c3d19dd5b676b8d39844cf133833b67a +// Generated by the value-object.rb DO NOT EDIT!! + +#import "SCManagedCapturerState.h" + +#import + +#import + +@interface SCManagedCapturerStateBuilder : NSObject + ++ (instancetype)withManagedCapturerState:(id)managedCapturerState; + +- (SCManagedCapturerState *)build; + +- (instancetype)setIsRunning:(BOOL)isRunning; + +- (instancetype)setIsNightModeActive:(BOOL)isNightModeActive; + +- (instancetype)setIsPortraitModeActive:(BOOL)isPortraitModeActive; + +- (instancetype)setLowLightCondition:(BOOL)lowLightCondition; + +- (instancetype)setAdjustingExposure:(BOOL)adjustingExposure; + +- (instancetype)setDevicePosition:(SCManagedCaptureDevicePosition)devicePosition; + +- (instancetype)setZoomFactor:(CGFloat)zoomFactor; + +- (instancetype)setFlashSupported:(BOOL)flashSupported; + +- (instancetype)setTorchSupported:(BOOL)torchSupported; + +- (instancetype)setFlashActive:(BOOL)flashActive; + +- (instancetype)setTorchActive:(BOOL)torchActive; + +- (instancetype)setLensesActive:(BOOL)lensesActive; + +- (instancetype)setArSessionActive:(BOOL)arSessionActive; + +- (instancetype)setLiveVideoStreaming:(BOOL)liveVideoStreaming; + +- (instancetype)setLensProcessorReady:(BOOL)lensProcessorReady; + +@end diff --git a/ManagedCapturer/SCManagedCapturerStateBuilder.m b/ManagedCapturer/SCManagedCapturerStateBuilder.m new file mode 100644 index 0000000..c468335 --- /dev/null +++ b/ManagedCapturer/SCManagedCapturerStateBuilder.m @@ -0,0 +1,158 @@ +// 49126048c3d19dd5b676b8d39844cf133833b67a +// Generated by the value-object.rb DO NOT EDIT!! + +#import "SCManagedCapturerStateBuilder.h" + +#import + +#import + +@implementation SCManagedCapturerStateBuilder { + BOOL _isRunning; + BOOL _isNightModeActive; + BOOL _isPortraitModeActive; + BOOL _lowLightCondition; + BOOL _adjustingExposure; + SCManagedCaptureDevicePosition _devicePosition; + CGFloat _zoomFactor; + BOOL _flashSupported; + BOOL _torchSupported; + BOOL _flashActive; + BOOL _torchActive; + BOOL _lensesActive; + BOOL _arSessionActive; + BOOL _liveVideoStreaming; + BOOL _lensProcessorReady; +} + ++ (instancetype)withManagedCapturerState:(id)managedCapturerState +{ + SCManagedCapturerStateBuilder *builder = [[SCManagedCapturerStateBuilder alloc] init]; + builder->_isRunning = managedCapturerState.isRunning; + builder->_isNightModeActive = managedCapturerState.isNightModeActive; + builder->_isPortraitModeActive = managedCapturerState.isPortraitModeActive; + builder->_lowLightCondition = managedCapturerState.lowLightCondition; + builder->_adjustingExposure = managedCapturerState.adjustingExposure; + builder->_devicePosition = managedCapturerState.devicePosition; + builder->_zoomFactor = managedCapturerState.zoomFactor; + builder->_flashSupported = managedCapturerState.flashSupported; + builder->_torchSupported = managedCapturerState.torchSupported; + builder->_flashActive = managedCapturerState.flashActive; + builder->_torchActive = managedCapturerState.torchActive; + builder->_lensesActive = managedCapturerState.lensesActive; + builder->_arSessionActive = managedCapturerState.arSessionActive; + builder->_liveVideoStreaming = managedCapturerState.liveVideoStreaming; + builder->_lensProcessorReady = managedCapturerState.lensProcessorReady; + return builder; +} + +- (SCManagedCapturerState *)build +{ + return [[SCManagedCapturerState alloc] initWithIsRunning:_isRunning + isNightModeActive:_isNightModeActive + isPortraitModeActive:_isPortraitModeActive + lowLightCondition:_lowLightCondition + adjustingExposure:_adjustingExposure + devicePosition:_devicePosition + zoomFactor:_zoomFactor + flashSupported:_flashSupported + torchSupported:_torchSupported + flashActive:_flashActive + torchActive:_torchActive + lensesActive:_lensesActive + arSessionActive:_arSessionActive + liveVideoStreaming:_liveVideoStreaming + lensProcessorReady:_lensProcessorReady]; +} + +- (instancetype)setIsRunning:(BOOL)isRunning +{ + _isRunning = isRunning; + return self; +} + +- (instancetype)setIsNightModeActive:(BOOL)isNightModeActive +{ + _isNightModeActive = isNightModeActive; + return self; +} + +- (instancetype)setIsPortraitModeActive:(BOOL)isPortraitModeActive +{ + _isPortraitModeActive = isPortraitModeActive; + return self; +} + +- (instancetype)setLowLightCondition:(BOOL)lowLightCondition +{ + _lowLightCondition = lowLightCondition; + return self; +} + +- (instancetype)setAdjustingExposure:(BOOL)adjustingExposure +{ + _adjustingExposure = adjustingExposure; + return self; +} + +- (instancetype)setDevicePosition:(SCManagedCaptureDevicePosition)devicePosition +{ + _devicePosition = devicePosition; + return self; +} + +- (instancetype)setZoomFactor:(CGFloat)zoomFactor +{ + _zoomFactor = zoomFactor; + return self; +} + +- (instancetype)setFlashSupported:(BOOL)flashSupported +{ + _flashSupported = flashSupported; + return self; +} + +- (instancetype)setTorchSupported:(BOOL)torchSupported +{ + _torchSupported = torchSupported; + return self; +} + +- (instancetype)setFlashActive:(BOOL)flashActive +{ + _flashActive = flashActive; + return self; +} + +- (instancetype)setTorchActive:(BOOL)torchActive +{ + _torchActive = torchActive; + return self; +} + +- (instancetype)setLensesActive:(BOOL)lensesActive +{ + _lensesActive = lensesActive; + return self; +} + +- (instancetype)setArSessionActive:(BOOL)arSessionActive +{ + _arSessionActive = arSessionActive; + return self; +} + +- (instancetype)setLiveVideoStreaming:(BOOL)liveVideoStreaming +{ + _liveVideoStreaming = liveVideoStreaming; + return self; +} + +- (instancetype)setLensProcessorReady:(BOOL)lensProcessorReady +{ + _lensProcessorReady = lensProcessorReady; + return self; +} + +@end diff --git a/ManagedCapturer/SCManagedCapturerUtils.h b/ManagedCapturer/SCManagedCapturerUtils.h new file mode 100644 index 0000000..2a0f0f8 --- /dev/null +++ b/ManagedCapturer/SCManagedCapturerUtils.h @@ -0,0 +1,36 @@ +// +// SCManagedCapturerUtils.h +// Snapchat +// +// Created by Chao Pang on 10/4/17. +// + +#import + +#import +#import + +SC_EXTERN_C_BEGIN + +extern const CGFloat kSCIPhoneXCapturedImageVideoCropRatio; + +extern CGFloat SCManagedCapturedImageAndVideoAspectRatio(void); + +extern CGSize SCManagedCapturerAllScreenSize(void); + +extern CGSize SCAsyncImageCapturePlaceholderViewSize(void); + +extern CGFloat SCAdjustedAspectRatio(UIImageOrientation orientation, CGFloat aspectRatio); + +extern UIImage *SCCropImageToTargetAspectRatio(UIImage *image, CGFloat targetAspectRatio); + +extern void SCCropImageSizeToAspectRatio(size_t inputWidth, size_t inputHeight, UIImageOrientation orientation, + CGFloat aspectRatio, size_t *outputWidth, size_t *outputHeight); + +extern BOOL SCNeedsCropImageToAspectRatio(CGImageRef image, UIImageOrientation orientation, CGFloat aspectRatio); + +extern CGRect SCCalculateRectToCrop(size_t imageWidth, size_t imageHeight, size_t croppedWidth, size_t croppedHeight); + +extern CGImageRef SCCreateCroppedImageToAspectRatio(CGImageRef image, UIImageOrientation orientation, + CGFloat aspectRatio); +SC_EXTERN_C_END diff --git a/ManagedCapturer/SCManagedCapturerUtils.m b/ManagedCapturer/SCManagedCapturerUtils.m new file mode 100644 index 0000000..1e7662c --- /dev/null +++ b/ManagedCapturer/SCManagedCapturerUtils.m @@ -0,0 +1,153 @@ +// +// SCManagedCapturerUtils.m +// Snapchat +// +// Created by Chao Pang on 10/4/17. +// + +#import "SCManagedCapturerUtils.h" + +#import "SCCaptureCommon.h" + +#import +#import +#import +#import + +// This is to calculate the crop ratio for generating the image shown in Preview page +// Check https://snapchat.quip.com/lU3kAoDxaAFG for our design. +const CGFloat kSCIPhoneXCapturedImageVideoCropRatio = (397.0 * 739.0) / (375.0 * 812.0); + +CGFloat SCManagedCapturedImageAndVideoAspectRatio(void) +{ + static dispatch_once_t onceToken; + static CGFloat aspectRatio; + dispatch_once(&onceToken, ^{ + CGSize screenSize = [UIScreen mainScreen].fixedCoordinateSpace.bounds.size; + UIEdgeInsets safeAreaInsets = [UIScreen sc_safeAreaInsets]; + aspectRatio = SCSizeGetAspectRatio( + CGSizeMake(screenSize.width, screenSize.height - safeAreaInsets.top - safeAreaInsets.bottom)); + }); + return aspectRatio; +} + +CGSize SCManagedCapturerAllScreenSize(void) +{ + static CGSize size; + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^{ + CGSize screenSize = [UIScreen mainScreen].fixedCoordinateSpace.bounds.size; + // This logic is complicated because we need to handle iPhone X properly. + // See https://snapchat.quip.com/lU3kAoDxaAFG for our design. + UIEdgeInsets safeAreaInsets = [UIScreen sc_safeAreaInsets]; + UIEdgeInsets visualSafeInsets = [UIScreen sc_visualSafeInsets]; + // This really is just some coordinate computations: + // We know in preview, our size is (screenWidth, screenHeight - topInset - bottomInset) + // We know that when the preview image is in the camera screen, the height is screenHeight - visualTopInset, + // thus, we need to figure out in camera screen, what's the bleed-over width should be + // (screenWidth * (screenHeight - visualTopInset) / (screenHeight - topInset - bottomInset) + size = CGSizeMake(roundf(screenSize.width * (screenSize.height - visualSafeInsets.top) / + (screenSize.height - safeAreaInsets.top - safeAreaInsets.bottom)), + screenSize.height); + }); + return size; +} + +CGSize SCAsyncImageCapturePlaceholderViewSize(void) +{ + static CGSize size; + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^{ + CGSize screenSize = [UIScreen mainScreen].fixedCoordinateSpace.bounds.size; + UIEdgeInsets safeAreaInsets = [UIScreen sc_safeAreaInsets]; + UIEdgeInsets visualSafeInsets = [UIScreen sc_visualSafeInsets]; + size = CGSizeMake(roundf((screenSize.height - visualSafeInsets.top) * screenSize.width / + (screenSize.height - safeAreaInsets.top - safeAreaInsets.bottom)), + screenSize.height - visualSafeInsets.top); + }); + return size; +} + +CGFloat SCAdjustedAspectRatio(UIImageOrientation orientation, CGFloat aspectRatio) +{ + SCCAssert(aspectRatio != kSCManagedCapturerAspectRatioUnspecified, @""); + switch (orientation) { + case UIImageOrientationLeft: + case UIImageOrientationRight: + case UIImageOrientationLeftMirrored: + case UIImageOrientationRightMirrored: + return 1.0 / aspectRatio; + default: + return aspectRatio; + } +} + +UIImage *SCCropImageToTargetAspectRatio(UIImage *image, CGFloat targetAspectRatio) +{ + if (SCNeedsCropImageToAspectRatio(image.CGImage, image.imageOrientation, targetAspectRatio)) { + CGImageRef croppedImageRef = + SCCreateCroppedImageToAspectRatio(image.CGImage, image.imageOrientation, targetAspectRatio); + UIImage *croppedImage = + [UIImage imageWithCGImage:croppedImageRef scale:image.scale orientation:image.imageOrientation]; + CGImageRelease(croppedImageRef); + return croppedImage; + } else { + return image; + } +} + +void SCCropImageSizeToAspectRatio(size_t inputWidth, size_t inputHeight, UIImageOrientation orientation, + CGFloat aspectRatio, size_t *outputWidth, size_t *outputHeight) +{ + SCCAssert(outputWidth != NULL && outputHeight != NULL, @""); + aspectRatio = SCAdjustedAspectRatio(orientation, aspectRatio); + if (inputWidth > roundf(inputHeight * aspectRatio)) { + *outputHeight = inputHeight; + *outputWidth = roundf(*outputHeight * aspectRatio); + } else { + *outputWidth = inputWidth; + *outputHeight = roundf(*outputWidth / aspectRatio); + } +} + +BOOL SCNeedsCropImageToAspectRatio(CGImageRef image, UIImageOrientation orientation, CGFloat aspectRatio) +{ + if (aspectRatio == kSCManagedCapturerAspectRatioUnspecified) { + return NO; + } + aspectRatio = SCAdjustedAspectRatio(orientation, aspectRatio); + size_t width = CGImageGetWidth(image); + size_t height = CGImageGetHeight(image); + return (width != roundf(height * aspectRatio)); +} + +CGRect SCCalculateRectToCrop(size_t imageWidth, size_t imageHeight, size_t croppedWidth, size_t croppedHeight) +{ + if ([SCDeviceName isIphoneX]) { + // X is pushed all the way over to crop out top section but none of bottom + CGFloat x = (imageWidth - croppedWidth); + // Crop y symmetrically. + CGFloat y = roundf((imageHeight - croppedHeight) / 2.0); + + return CGRectMake(x, y, croppedWidth, croppedHeight); + } + return CGRectMake((imageWidth - croppedWidth) / 2, (imageHeight - croppedHeight) / 2, croppedWidth, croppedHeight); +} + +CGImageRef SCCreateCroppedImageToAspectRatio(CGImageRef image, UIImageOrientation orientation, CGFloat aspectRatio) +{ + SCCAssert(aspectRatio != kSCManagedCapturerAspectRatioUnspecified, @""); + size_t width = CGImageGetWidth(image); + size_t height = CGImageGetHeight(image); + size_t croppedWidth, croppedHeight; + if ([SCDeviceName isIphoneX]) { + size_t adjustedWidth = (size_t)(width * kSCIPhoneXCapturedImageVideoCropRatio); + size_t adjustedHeight = (size_t)(height * kSCIPhoneXCapturedImageVideoCropRatio); + SCCropImageSizeToAspectRatio(adjustedWidth, adjustedHeight, orientation, aspectRatio, &croppedWidth, + &croppedHeight); + } else { + SCCropImageSizeToAspectRatio(width, height, orientation, aspectRatio, &croppedWidth, &croppedHeight); + } + CGRect cropRect = SCCalculateRectToCrop(width, height, croppedWidth, croppedHeight); + return CGImageCreateWithImageInRect(image, cropRect); +} diff --git a/ManagedCapturer/SCManagedCapturerV1.h b/ManagedCapturer/SCManagedCapturerV1.h new file mode 100644 index 0000000..be8fe65 --- /dev/null +++ b/ManagedCapturer/SCManagedCapturerV1.h @@ -0,0 +1,57 @@ +// +// SCManagedCapturer.h +// Snapchat +// +// Created by Liu Liu on 4/20/15. +// Copyright (c) 2015 Liu Liu. All rights reserved. +// + +#import "SCCaptureCommon.h" +#import "SCCapturer.h" + +#import + +#import +#import + +/** + * Manage AVCaptureSession with SCManagedCapturerV1 + * + * In phantom, there are a lot of places we use AVCaptureSession. However, since for each app, only one session + * can run at the same time, we need some kind of management for the capture session. + * + * SCManagedCapturerV1 manages the state of capture session in following ways: + * + * All operations in SCManagedCapturerV1 are handled on a serial queue, to ensure its sequence. All callbacks (either + * on the listener or the completion handler) are on the main thread. The state of SCManagedCapturerV1 are conveniently + * maintained in a SCManagedCapturerState object, which is immutable and can be passed across threads, it mains a + * consistent view of the capture session, if it is not delayed (thus, the state may deliver as current active device + * is back camera on main thread, but in reality, on the serial queue, the active device switched to the front camera + * already. However, this is OK because state.devicePosition will be back camera and with all its setup at that time. + * Note that it is impossible to have an on-time view of the state across threads without blocking each other). + * + * For main use cases, you setup the capturer, add the preview layer, and then can call capture still image + * or record video, and SCManagedCapturerV1 will do the rest (make sure it actually captures image / video, recover + * from error, or setup our more advanced image / video post-process). + * + * The key classes that drive the recording flow are SCManagedVideoStreamer and SCManagedVideoFileStreamer which + * conform to SCManagedVideoDataSource. They will stream images to consumers conforming to + * SCManagedVideoDataSourceListener + * such as SCManagedLensesProcessor, SCManagedDeviceCapacityAnalyzer, SCManagedVideoScanner and ultimately + * SCManagedVideoCapturer and SCManagedStillImageCapturer which record the final output. + * + */ +@class SCCaptureResource; + +extern NSString *const kSCLensesTweaksDidChangeFileInput; + +@interface SCManagedCapturerV1 : NSObject + ++ (SCManagedCapturerV1 *)sharedInstance; + +/* + The following APIs are reserved to be only used for SCCaptureCore aka managedCapturerV2. + */ +- (instancetype)initWithResource:(SCCaptureResource *)resource; + +@end diff --git a/ManagedCapturer/SCManagedCapturerV1.m b/ManagedCapturer/SCManagedCapturerV1.m new file mode 100644 index 0000000..ba3e579 --- /dev/null +++ b/ManagedCapturer/SCManagedCapturerV1.m @@ -0,0 +1,2165 @@ +// +// SCManagedCapturer.m +// Snapchat +// +// Created by Liu Liu on 4/20/15. +// Copyright (c) 2015 Liu Liu. All rights reserved. +// + +#import "SCManagedCapturerV1.h" +#import "SCManagedCapturerV1_Private.h" + +#import "ARConfiguration+SCConfiguration.h" +#import "NSURL+Asset.h" +#import "SCBlackCameraDetector.h" +#import "SCBlackCameraNoOutputDetector.h" +#import "SCCameraTweaks.h" +#import "SCCaptureResource.h" +#import "SCCaptureSessionFixer.h" +#import "SCCaptureUninitializedState.h" +#import "SCCaptureWorker.h" +#import "SCCapturerToken.h" +#import "SCManagedAudioStreamer.h" +#import "SCManagedCaptureDevice+SCManagedCapturer.h" +#import "SCManagedCaptureDeviceDefaultZoomHandler.h" +#import "SCManagedCaptureDeviceHandler.h" +#import "SCManagedCaptureDeviceSubjectAreaHandler.h" +#import "SCManagedCapturePreviewLayerController.h" +#import "SCManagedCaptureSession.h" +#import "SCManagedCapturerARImageCaptureProvider.h" +#import "SCManagedCapturerGLViewManagerAPI.h" +#import "SCManagedCapturerLSAComponentTrackerAPI.h" +#import "SCManagedCapturerLensAPI.h" +#import "SCManagedCapturerListenerAnnouncer.h" +#import "SCManagedCapturerLogging.h" +#import "SCManagedCapturerSampleMetadata.h" +#import "SCManagedCapturerState.h" +#import "SCManagedCapturerStateBuilder.h" +#import "SCManagedDeviceCapacityAnalyzer.h" +#import "SCManagedDroppedFramesReporter.h" +#import "SCManagedFrameHealthChecker.h" +#import "SCManagedFrontFlashController.h" +#import "SCManagedStillImageCapturer.h" +#import "SCManagedStillImageCapturerHandler.h" +#import "SCManagedVideoARDataSource.h" +#import "SCManagedVideoCapturer.h" +#import "SCManagedVideoFileStreamer.h" +#import "SCManagedVideoFrameSampler.h" +#import "SCManagedVideoScanner.h" +#import "SCManagedVideoStreamReporter.h" +#import "SCManagedVideoStreamer.h" +#import "SCMetalUtils.h" +#import "SCProcessingPipeline.h" +#import "SCProcessingPipelineBuilder.h" +#import "SCScanConfiguration.h" +#import "SCSingleFrameStreamCapturer.h" +#import "SCSnapCreationTriggers.h" +#import "SCTimedTask.h" + +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import + +#import + +@import ARKit; + +static NSUInteger const kSCManagedCapturerFixInconsistencyMaxRetriesWithCurrentSession = 22; +static CGFloat const kSCManagedCapturerFixInconsistencyARSessionDelayThreshold = 2; +static CGFloat const kSCManagedCapturerFixInconsistencyARSessionHungInitThreshold = 5; + +static NSTimeInterval const kMinFixAVSessionRunningInterval = 1; // Interval to run _fixAVSessionIfNecessary +static NSTimeInterval const kMinFixSessionRuntimeErrorInterval = + 1; // Min interval that RuntimeError calls _startNewSession + +static NSString *const kSCManagedCapturerErrorDomain = @"kSCManagedCapturerErrorDomain"; + +NSString *const kSCLensesTweaksDidChangeFileInput = @"kSCLensesTweaksDidChangeFileInput"; + +@implementation SCManagedCapturerV1 { + // No ivars for CapturerV1 please, they should be in resource. + SCCaptureResource *_captureResource; +} + ++ (SCManagedCapturerV1 *)sharedInstance +{ + static dispatch_once_t onceToken; + static SCManagedCapturerV1 *managedCapturerV1; + dispatch_once(&onceToken, ^{ + managedCapturerV1 = [[SCManagedCapturerV1 alloc] init]; + }); + return managedCapturerV1; +} + +- (instancetype)init +{ + SCTraceStart(); + SCAssertMainThread(); + SCCaptureResource *resource = [SCCaptureWorker generateCaptureResource]; + return [self initWithResource:resource]; +} + +- (instancetype)initWithResource:(SCCaptureResource *)resource +{ + SCTraceODPCompatibleStart(2); + SCAssertMainThread(); + self = [super init]; + if (self) { + // Assuming I am not in background. I can be more defensive here and fetch the app state. + // But to avoid potential problems, won't do that until later. + SCLogCapturerInfo(@"======================= cool startup ======================="); + // Initialization of capture resource should be done in worker to be shared between V1 and V2. + _captureResource = resource; + _captureResource.handleAVSessionStatusChange = @selector(_handleAVSessionStatusChange:); + _captureResource.sessionRuntimeError = @selector(_sessionRuntimeError:); + _captureResource.livenessConsistency = @selector(_livenessConsistency:); + _captureResource.deviceSubjectAreaHandler = + [[SCManagedCaptureDeviceSubjectAreaHandler alloc] initWithCaptureResource:_captureResource]; + _captureResource.snapCreationTriggers = [SCSnapCreationTriggers new]; + if (SCIsMasterBuild()) { + // We call _sessionRuntimeError to reset _captureResource.videoDataSource if input changes + [[NSNotificationCenter defaultCenter] addObserver:self + selector:@selector(_sessionRuntimeError:) + name:kSCLensesTweaksDidChangeFileInput + object:nil]; + } + } + return self; +} + +- (SCBlackCameraDetector *)blackCameraDetector +{ + return _captureResource.blackCameraDetector; +} + +- (void)recreateAVCaptureSession +{ + SCTraceODPCompatibleStart(2); + [self _startRunningWithNewCaptureSessionIfNecessary]; +} + +- (void)_handleAVSessionStatusChange:(NSDictionary *)change +{ + SCTraceODPCompatibleStart(2); + SC_GUARD_ELSE_RETURN(!_captureResource.state.arSessionActive); + SC_GUARD_ELSE_RETURN(!_captureResource.appInBackground); + BOOL wasRunning = [change[NSKeyValueChangeOldKey] boolValue]; + BOOL isRunning = [change[NSKeyValueChangeNewKey] boolValue]; + SCLogCapturerInfo(@"avSession running status changed: %@ -> %@", wasRunning ? @"running" : @"stopped", + isRunning ? @"running" : @"stopped"); + + [_captureResource.blackCameraDetector sessionDidChangeIsRunning:isRunning]; + + if (_captureResource.isRecreateSessionFixScheduled) { + SCLogCapturerInfo(@"Scheduled AVCaptureSession recreation, return"); + return; + } + + if (wasRunning != isRunning) { + runOnMainThreadAsynchronously(^{ + if (isRunning) { + [_captureResource.announcer managedCapturer:self didStartRunning:_captureResource.state]; + } else { + [_captureResource.announcer managedCapturer:self didStopRunning:_captureResource.state]; + } + }); + } + + if (!isRunning) { + [_captureResource.queuePerformer perform:^{ + [self _fixAVSessionIfNecessary]; + }]; + } else { + if (!SCDeviceSupportsMetal()) { + [self _fixNonMetalSessionPreviewInconsistency]; + } + } +} + +- (void)_fixAVSessionIfNecessary +{ + SCTraceODPCompatibleStart(2); + SCAssert([_captureResource.queuePerformer isCurrentPerformer], @""); + SC_GUARD_ELSE_RETURN(!_captureResource.appInBackground); + SC_GUARD_ELSE_RETURN(_captureResource.status == SCManagedCapturerStatusRunning); + [[SCLogger sharedInstance] logStepToEvent:kSCCameraFixAVCaptureSession + uniqueId:@"" + stepName:@"startConsistencyCheckAndFix"]; + + NSTimeInterval timeNow = [NSDate timeIntervalSinceReferenceDate]; + if (timeNow - _captureResource.lastFixSessionTimestamp < kMinFixAVSessionRunningInterval) { + SCLogCoreCameraInfo(@"Fixing session in less than %f, skip", kMinFixAVSessionRunningInterval); + return; + } + _captureResource.lastFixSessionTimestamp = timeNow; + + if (!_captureResource.managedSession.isRunning) { + SCTraceStartSection("Fix AVSession") + { + _captureResource.numRetriesFixAVCaptureSessionWithCurrentSession++; + SCGhostToSnappableSignalCameraFixInconsistency(); + if (_captureResource.numRetriesFixAVCaptureSessionWithCurrentSession <= + kSCManagedCapturerFixInconsistencyARSessionDelayThreshold) { + SCLogCapturerInfo(@"Fixing AVSession"); + [_captureResource.managedSession startRunning]; + SCLogCapturerInfo(@"Fixed AVSession, success : %@", @(_captureResource.managedSession.isRunning)); + [[SCLogger sharedInstance] logStepToEvent:kSCCameraFixAVCaptureSession + uniqueId:@"" + stepName:@"finishCaptureSessionFix"]; + } else { + // start running with new capture session if the inconsistency fixing not succeeds + SCLogCapturerInfo(@"*** Recreate and run new capture session to fix the inconsistency ***"); + [self _startRunningWithNewCaptureSessionIfNecessary]; + [[SCLogger sharedInstance] logStepToEvent:kSCCameraFixAVCaptureSession + uniqueId:@"" + stepName:@"finishNewCaptureSessionCreation"]; + } + } + SCTraceEndSection(); + [[SCLogger sharedInstance] + logTimedEventEnd:kSCCameraFixAVCaptureSession + uniqueId:@"" + parameters:@{ + @"success" : @(_captureResource.managedSession.isRunning), + @"count" : @(_captureResource.numRetriesFixAVCaptureSessionWithCurrentSession) + }]; + } else { + _captureResource.numRetriesFixAVCaptureSessionWithCurrentSession = 0; + [[SCLogger sharedInstance] cancelLogTimedEvent:kSCCameraFixAVCaptureSession uniqueId:@""]; + } + if (_captureResource.managedSession.isRunning) { + // If it is fixed, we signal received the first frame. + SCGhostToSnappableSignalDidReceiveFirstPreviewFrame(); + + // For non-metal preview render, we need to make sure preview is not hidden + if (!SCDeviceSupportsMetal()) { + [self _fixNonMetalSessionPreviewInconsistency]; + } + runOnMainThreadAsynchronously(^{ + [_captureResource.announcer managedCapturer:self didStartRunning:_captureResource.state]; + // To approximate this did render timer, it is not accurate. + SCGhostToSnappableSignalDidRenderFirstPreviewFrame(CACurrentMediaTime()); + }); + } else { + [_captureResource.queuePerformer perform:^{ + [self _fixAVSessionIfNecessary]; + } + after:1]; + } + + [_captureResource.blackCameraDetector sessionDidChangeIsRunning:_captureResource.managedSession.isRunning]; +} + +- (void)_fixNonMetalSessionPreviewInconsistency +{ + SCTraceODPCompatibleStart(2); + SC_GUARD_ELSE_RETURN(_captureResource.status == SCManagedCapturerStatusRunning); + if ((!_captureResource.videoPreviewLayer.hidden) != _captureResource.managedSession.isRunning) { + SCTraceStartSection("Fix non-Metal VideoPreviewLayer"); + { + [CATransaction begin]; + [CATransaction setDisableActions:YES]; + [SCCaptureWorker setupVideoPreviewLayer:_captureResource]; + [CATransaction commit]; + } + SCTraceEndSection(); + } +} + +- (SCCaptureResource *)captureResource +{ + SCTraceODPCompatibleStart(2); + return _captureResource; +} + +- (id)lensProcessingCore +{ + SCTraceODPCompatibleStart(2); + @weakify(self); + return (id)[[SCLazyLoadingProxy alloc] initWithInitializationBlock:^id { + @strongify(self); + SCReportErrorIf(self.captureResource.state.lensProcessorReady, @"[Lenses] Lens processing core is not ready"); + return self.captureResource.lensProcessingCore; + }]; +} + +- (SCVideoCaptureSessionInfo)activeSession +{ + SCTraceODPCompatibleStart(2); + return [SCCaptureWorker activeSession:_captureResource]; +} + +- (BOOL)isLensApplied +{ + SCTraceODPCompatibleStart(2); + return [SCCaptureWorker isLensApplied:_captureResource]; +} + +- (BOOL)isVideoMirrored +{ + SCTraceODPCompatibleStart(2); + return [SCCaptureWorker isVideoMirrored:_captureResource]; +} + +#pragma mark - Setup, Start & Stop + +- (void)_updateHRSIEnabled +{ + SCTraceODPCompatibleStart(2); + // Since night mode is low-res, we set high resolution still image output when night mode is enabled + // SoftwareZoom requires higher resolution image to get better zooming result too. + // We also want a higher resolution on newer devices + BOOL is1080pSupported = [SCManagedCaptureDevice is1080pSupported]; + BOOL shouldHRSIEnabled = + (_captureResource.device.isNightModeActive || _captureResource.device.softwareZoom || is1080pSupported); + SCLogCapturerInfo(@"Setting HRSIEnabled to: %d. isNightModeActive:%d softwareZoom:%d is1080pSupported:%d", + shouldHRSIEnabled, _captureResource.device.isNightModeActive, + _captureResource.device.softwareZoom, is1080pSupported); + [_captureResource.stillImageCapturer setHighResolutionStillImageOutputEnabled:shouldHRSIEnabled]; +} + +- (void)_updateStillImageStabilizationEnabled +{ + SCTraceODPCompatibleStart(2); + SCLogCapturerInfo(@"Enabling still image stabilization"); + [_captureResource.stillImageCapturer enableStillImageStabilization]; +} + +- (void)setupWithDevicePositionAsynchronously:(SCManagedCaptureDevicePosition)devicePosition + completionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + SCLogCapturerInfo(@"Setting up with devicePosition:%lu", (unsigned long)devicePosition); + SCTraceResumeToken token = SCTraceCapture(); + [[SCManagedCapturePreviewLayerController sharedInstance] setupPreviewLayer]; + [_captureResource.queuePerformer perform:^{ + SCTraceResume(token); + [self setupWithDevicePosition:devicePosition completionHandler:completionHandler]; + }]; +} + +- (void)setupWithDevicePosition:(SCManagedCaptureDevicePosition)devicePosition + completionHandler:(dispatch_block_t)completionHandler +{ + SCTraceODPCompatibleStart(2); + SCAssertPerformer(_captureResource.queuePerformer); + [SCCaptureWorker setupWithCaptureResource:_captureResource devicePosition:devicePosition]; + + [self addListener:_captureResource.stillImageCapturer]; + [self addListener:_captureResource.blackCameraDetector.blackCameraNoOutputDetector]; + [self addListener:_captureResource.lensProcessingCore]; + + [self _updateHRSIEnabled]; + [self _updateStillImageStabilizationEnabled]; + + [SCCaptureWorker updateLensesFieldOfViewTracking:_captureResource]; + + if (!SCDeviceSupportsMetal()) { + [SCCaptureWorker makeVideoPreviewLayer:_captureResource]; + } + + // I need to do this setup now. Thus, it is off the main thread. This also means my preview layer controller is + // entangled with the capturer. + [[SCManagedCapturePreviewLayerController sharedInstance] setupRenderPipeline]; + [[SCManagedCapturePreviewLayerController sharedInstance] setManagedCapturer:self]; + _captureResource.status = SCManagedCapturerStatusReady; + + SCManagedCapturerState *state = [_captureResource.state copy]; + AVCaptureVideoPreviewLayer *videoPreviewLayer = _captureResource.videoPreviewLayer; + runOnMainThreadAsynchronously(^{ + SCLogCapturerInfo(@"Did setup with devicePosition:%lu", (unsigned long)devicePosition); + [_captureResource.announcer managedCapturer:self didChangeState:state]; + [_captureResource.announcer managedCapturer:self didChangeCaptureDevicePosition:state]; + if (!SCDeviceSupportsMetal()) { + [_captureResource.announcer managedCapturer:self didChangeVideoPreviewLayer:videoPreviewLayer]; + } + if (completionHandler) { + completionHandler(); + } + }); +} + +- (void)addSampleBufferDisplayController:(id)sampleBufferDisplayController + context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + [_captureResource.queuePerformer perform:^{ + _captureResource.sampleBufferDisplayController = sampleBufferDisplayController; + [_captureResource.videoDataSource addSampleBufferDisplayController:sampleBufferDisplayController]; + }]; +} + +- (SCCapturerToken *)startRunningAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + SCTraceResumeToken resumeToken = SCTraceCapture(); + [[SCLogger sharedInstance] updateLogTimedEventStart:kSCCameraMetricsOpen uniqueId:@""]; + SCCapturerToken *token = [[SCCapturerToken alloc] initWithIdentifier:context]; + SCLogCapturerInfo(@"startRunningAsynchronouslyWithCompletionHandler called. token: %@", token); + [_captureResource.queuePerformer perform:^{ + SCTraceResume(resumeToken); + [SCCaptureWorker startRunningWithCaptureResource:_captureResource + token:token + completionHandler:completionHandler]; + // After startRunning, we need to make sure _fixAVSessionIfNecessary start running. + // The problem: with the new KVO fix strategy, it may happen that AVCaptureSession is in stopped state, thus no + // KVO callback is triggered. + // And calling startRunningAsynchronouslyWithCompletionHandler has no effect because SCManagedCapturerStatus is + // in SCManagedCapturerStatusRunning state + [self _fixAVSessionIfNecessary]; + }]; + return token; +} + +- (BOOL)stopRunningWithCaptureToken:(SCCapturerToken *)token + completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler + context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + SCAssertPerformer(_captureResource.queuePerformer); + SCLogCapturerInfo(@"Stop running. token:%@ context:%@", token, context); + return [SCCaptureWorker stopRunningWithCaptureResource:_captureResource + token:token + completionHandler:completionHandler]; +} + +- (void)stopRunningAsynchronously:(SCCapturerToken *)token + completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler + context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + SCLogCapturerInfo(@"Stop running asynchronously. token:%@ context:%@", token, context); + SCTraceResumeToken resumeToken = SCTraceCapture(); + [_captureResource.queuePerformer perform:^{ + SCTraceResume(resumeToken); + [SCCaptureWorker stopRunningWithCaptureResource:_captureResource + token:token + completionHandler:completionHandler]; + }]; +} + +- (void)stopRunningAsynchronously:(SCCapturerToken *)token + completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler + after:(NSTimeInterval)delay + context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + SCLogCapturerInfo(@"Stop running asynchronously. token:%@ delay:%f", token, delay); + NSTimeInterval startTime = CACurrentMediaTime(); + [_captureResource.queuePerformer perform:^{ + NSTimeInterval elapsedTime = CACurrentMediaTime() - startTime; + [_captureResource.queuePerformer perform:^{ + SCTraceStart(); + // If we haven't started a new running sequence yet, stop running now + [SCCaptureWorker stopRunningWithCaptureResource:_captureResource + token:token + completionHandler:completionHandler]; + } + after:MAX(delay - elapsedTime, 0)]; + }]; +} + +- (void)startStreamingAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + SCLogCapturerInfo(@"Start streaming asynchronously"); + [_captureResource.queuePerformer perform:^{ + SCTraceStart(); + [SCCaptureWorker startStreaming:_captureResource]; + if (completionHandler) { + runOnMainThreadAsynchronously(completionHandler); + } + }]; +} + +#pragma mark - Recording / Capture + +- (void)captureStillImageAsynchronouslyWithAspectRatio:(CGFloat)aspectRatio + captureSessionID:(NSString *)captureSessionID + completionHandler: + (sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler + context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + [_captureResource.queuePerformer perform:^{ + [SCCaptureWorker captureStillImageWithCaptureResource:_captureResource + aspectRatio:aspectRatio + captureSessionID:captureSessionID + shouldCaptureFromVideo:[self _shouldCaptureImageFromVideo] + completionHandler:completionHandler + context:context]; + }]; +} + +- (void)captureSingleVideoFrameAsynchronouslyWithCompletionHandler: + (sc_managed_capturer_capture_video_frame_completion_handler_t)completionHandler + context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + [_captureResource.queuePerformer perform:^{ + SCTraceStart(); + SCLogCapturerInfo(@"Start capturing single video frame"); + _captureResource.frameCap = [[SCSingleFrameStreamCapturer alloc] initWithCompletion:^void(UIImage *image) { + [_captureResource.queuePerformer perform:^{ + [_captureResource.videoDataSource removeListener:_captureResource.frameCap]; + _captureResource.frameCap = nil; + }]; + runOnMainThreadAsynchronously(^{ + [_captureResource.device setTorchActive:NO]; + SCLogCapturerInfo(@"End capturing single video frame"); + completionHandler(image); + }); + }]; + + BOOL waitForTorch = NO; + if (!_captureResource.state.torchActive) { + if (_captureResource.state.flashActive) { + waitForTorch = YES; + [_captureResource.device setTorchActive:YES]; + } + } + [_captureResource.queuePerformer perform:^{ + [_captureResource.videoDataSource addListener:_captureResource.frameCap]; + [SCCaptureWorker startStreaming:_captureResource]; + } + after:(waitForTorch ? 0.5 : 0)]; + + }]; +} + +- (void)prepareForRecordingAsynchronouslyWithContext:(NSString *)context + audioConfiguration:(SCAudioConfiguration *)configuration +{ + SCTraceODPCompatibleStart(2); + [_captureResource.queuePerformer perform:^{ + SCLogCapturerInfo(@"prepare for recording"); + [_captureResource.videoCapturer prepareForRecordingWithAudioConfiguration:configuration]; + }]; +} + +- (void)startRecordingAsynchronouslyWithOutputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings + audioConfiguration:(SCAudioConfiguration *)configuration + maxDuration:(NSTimeInterval)maxDuration + fileURL:(NSURL *)fileURL + captureSessionID:(NSString *)captureSessionID + completionHandler: + (sc_managed_capturer_start_recording_completion_handler_t)completionHandler + context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + [_captureResource.queuePerformer perform:^{ + [SCCaptureWorker startRecordingWithCaptureResource:_captureResource + outputSettings:outputSettings + audioConfiguration:configuration + maxDuration:maxDuration + fileURL:fileURL + captureSessionID:captureSessionID + completionHandler:completionHandler]; + }]; +} + +- (void)stopRecordingAsynchronouslyWithContext:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + [_captureResource.queuePerformer perform:^{ + [SCCaptureWorker stopRecordingWithCaptureResource:_captureResource]; + }]; +} + +- (void)cancelRecordingAsynchronouslyWithContext:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + [_captureResource.queuePerformer perform:^{ + [SCCaptureWorker cancelRecordingWithCaptureResource:_captureResource]; + }]; +} + +- (void)startScanAsynchronouslyWithScanConfiguration:(SCScanConfiguration *)configuration context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + [_captureResource.queuePerformer perform:^{ + SCTraceStart(); + [SCCaptureWorker startScanWithScanConfiguration:configuration resource:_captureResource]; + }]; +} + +- (void)stopScanAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + [_captureResource.queuePerformer perform:^{ + SCTraceStart(); + [SCCaptureWorker stopScanWithCompletionHandler:completionHandler resource:_captureResource]; + }]; +} + +- (void)sampleFrameWithCompletionHandler:(void (^)(UIImage *frame, CMTime presentationTime))completionHandler + context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + // Previously _captureResource.videoFrameSampler was conditionally created when setting up, but if this method is + // called it is a + // safe assumption the client wants it to run instead of failing silently, so always create + // _captureResource.videoFrameSampler + if (!_captureResource.videoFrameSampler) { + _captureResource.videoFrameSampler = [SCManagedVideoFrameSampler new]; + [_captureResource.announcer addListener:_captureResource.videoFrameSampler]; + } + SCLogCapturerInfo(@"Sampling next frame"); + [_captureResource.videoFrameSampler sampleNextFrame:completionHandler]; +} + +- (void)addTimedTask:(SCTimedTask *)task context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + SCLogCapturerInfo(@"Adding timed task:%@", task); + [_captureResource.queuePerformer perform:^{ + [_captureResource.videoCapturer addTimedTask:task]; + }]; +} + +- (void)clearTimedTasksWithContext:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + [_captureResource.queuePerformer perform:^{ + [_captureResource.videoCapturer clearTimedTasks]; + }]; +} + +#pragma mark - Utilities + +- (void)convertViewCoordinates:(CGPoint)viewCoordinates + completionHandler:(sc_managed_capturer_convert_view_coordniates_completion_handler_t)completionHandler + context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + SCAssert(completionHandler, @"completionHandler shouldn't be nil"); + [_captureResource.queuePerformer perform:^{ + SCTraceStart(); + if (SCDeviceSupportsMetal()) { + CGSize viewSize = [UIScreen mainScreen].fixedCoordinateSpace.bounds.size; + CGPoint pointOfInterest = + [_captureResource.device convertViewCoordinates:viewCoordinates + viewSize:viewSize + videoGravity:AVLayerVideoGravityResizeAspectFill]; + runOnMainThreadAsynchronously(^{ + completionHandler(pointOfInterest); + }); + } else { + CGSize viewSize = _captureResource.videoPreviewLayer.bounds.size; + CGPoint pointOfInterest = + [_captureResource.device convertViewCoordinates:viewCoordinates + viewSize:viewSize + videoGravity:_captureResource.videoPreviewLayer.videoGravity]; + runOnMainThreadAsynchronously(^{ + completionHandler(pointOfInterest); + }); + } + }]; +} + +- (void)detectLensCategoryOnNextFrame:(CGPoint)point + lenses:(NSArray *)lenses + completion:(sc_managed_lenses_processor_category_point_completion_handler_t)completion + context:(NSString *)context + +{ + SCTraceODPCompatibleStart(2); + SCAssert(completion, @"completionHandler shouldn't be nil"); + SCAssertMainThread(); + [_captureResource.queuePerformer perform:^{ + SCTraceStart(); + SCLogCapturerInfo(@"Detecting lens category on next frame. point:%@, lenses:%@", NSStringFromCGPoint(point), + [lenses valueForKey:NSStringFromSelector(@selector(lensId))]); + [_captureResource.lensProcessingCore + detectLensCategoryOnNextFrame:point + videoOrientation:_captureResource.videoDataSource.videoOrientation + lenses:lenses + completion:^(SCLensCategory *_Nullable category, NSInteger categoriesCount) { + runOnMainThreadAsynchronously(^{ + if (completion) { + completion(category, categoriesCount); + } + }); + }]; + }]; +} + +#pragma mark - Configurations + +- (void)setDevicePositionAsynchronously:(SCManagedCaptureDevicePosition)devicePosition + completionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + SCLogCapturerInfo(@"Setting device position asynchronously to: %lu", (unsigned long)devicePosition); + [_captureResource.queuePerformer perform:^{ + SCTraceStart(); + BOOL devicePositionChanged = NO; + BOOL nightModeChanged = NO; + BOOL portraitModeChanged = NO; + BOOL zoomFactorChanged = NO; + BOOL flashSupportedOrTorchSupportedChanged = NO; + SCManagedCapturerState *state = [_captureResource.state copy]; + if (_captureResource.state.devicePosition != devicePosition) { + SCManagedCaptureDevice *device = [SCManagedCaptureDevice deviceWithPosition:devicePosition]; + if (device) { + if (!device.delegate) { + device.delegate = _captureResource.captureDeviceHandler; + } + + SCManagedCaptureDevice *prevDevice = _captureResource.device; + [SCCaptureWorker turnARSessionOff:_captureResource]; + BOOL isStreaming = _captureResource.videoDataSource.isStreaming; + if (!SCDeviceSupportsMetal()) { + if (isStreaming) { + [_captureResource.videoDataSource stopStreaming]; + } + } + SCLogCapturerInfo(@"Set device position beginConfiguration"); + [_captureResource.videoDataSource beginConfiguration]; + [_captureResource.managedSession beginConfiguration]; + // Turn off flash for the current device in case it is active + [_captureResource.device setTorchActive:NO]; + if (_captureResource.state.devicePosition == SCManagedCaptureDevicePositionFront) { + _captureResource.frontFlashController.torchActive = NO; + } + [_captureResource.deviceCapacityAnalyzer removeFocusListener]; + [_captureResource.device removeDeviceAsInput:_captureResource.managedSession.avSession]; + _captureResource.device = device; + BOOL deviceSet = [_captureResource.device setDeviceAsInput:_captureResource.managedSession.avSession]; + // If we are toggling while recording, set the night mode back to not + // active + if (_captureResource.videoRecording) { + [self _setNightModeActive:NO]; + } + // Sync night mode, torch and flash state with the current device + devicePositionChanged = (_captureResource.state.devicePosition != devicePosition); + nightModeChanged = + (_captureResource.state.isNightModeActive != _captureResource.device.isNightModeActive); + portraitModeChanged = + devicePositionChanged && + (devicePosition == SCManagedCaptureDevicePositionBackDualCamera || + _captureResource.state.devicePosition == SCManagedCaptureDevicePositionBackDualCamera); + zoomFactorChanged = (_captureResource.state.zoomFactor != _captureResource.device.zoomFactor); + if (zoomFactorChanged && _captureResource.device.softwareZoom) { + [SCCaptureWorker softwareZoomWithDevice:_captureResource.device resource:_captureResource]; + } + if (_captureResource.state.flashActive != _captureResource.device.flashActive) { + // preserve flashActive across devices + _captureResource.device.flashActive = _captureResource.state.flashActive; + } + if (_captureResource.state.liveVideoStreaming != device.liveVideoStreamingActive) { + // preserve liveVideoStreaming state across devices + [_captureResource.device setLiveVideoStreaming:_captureResource.state.liveVideoStreaming + session:_captureResource.managedSession.avSession]; + } + if (devicePosition == SCManagedCaptureDevicePositionBackDualCamera && + _captureResource.state.isNightModeActive != _captureResource.device.isNightModeActive) { + // preserve nightMode when switching from back camera to back dual camera + [self _setNightModeActive:_captureResource.state.isNightModeActive]; + } + + flashSupportedOrTorchSupportedChanged = + (_captureResource.state.flashSupported != _captureResource.device.isFlashSupported || + _captureResource.state.torchSupported != _captureResource.device.isTorchSupported); + SCLogCapturerInfo(@"Set device position: %lu -> %lu, night mode: %d -> %d, zoom " + @"factor: %f -> %f, flash supported: %d -> %d, torch supported: %d -> %d", + (unsigned long)_captureResource.state.devicePosition, (unsigned long)devicePosition, + _captureResource.state.isNightModeActive, _captureResource.device.isNightModeActive, + _captureResource.state.zoomFactor, _captureResource.device.zoomFactor, + _captureResource.state.flashSupported, _captureResource.device.isFlashSupported, + _captureResource.state.torchSupported, _captureResource.device.isTorchSupported); + _captureResource.state = [[[[[[[[SCManagedCapturerStateBuilder + withManagedCapturerState:_captureResource.state] setDevicePosition:devicePosition] + setIsNightModeActive:_captureResource.device.isNightModeActive] + setZoomFactor:_captureResource.device.zoomFactor] + setFlashSupported:_captureResource.device.isFlashSupported] + setTorchSupported:_captureResource.device.isTorchSupported] + setIsPortraitModeActive:devicePosition == SCManagedCaptureDevicePositionBackDualCamera] build]; + [self _updateHRSIEnabled]; + [self _updateStillImageStabilizationEnabled]; + // This needs to be done after we have finished configure everything + // for session otherwise we + // may set it up without hooking up the video input yet, and will set + // wrong parameter for the + // output. + [_captureResource.videoDataSource setDevicePosition:devicePosition]; + if (@available(ios 11.0, *)) { + if (portraitModeChanged) { + [_captureResource.videoDataSource + setDepthCaptureEnabled:_captureResource.state.isPortraitModeActive]; + [_captureResource.device setCaptureDepthData:_captureResource.state.isPortraitModeActive + session:_captureResource.managedSession.avSession]; + [_captureResource.stillImageCapturer + setPortraitModeCaptureEnabled:_captureResource.state.isPortraitModeActive]; + if (_captureResource.state.isPortraitModeActive) { + SCProcessingPipelineBuilder *processingPipelineBuilder = + [[SCProcessingPipelineBuilder alloc] init]; + processingPipelineBuilder.portraitModeEnabled = YES; + SCProcessingPipeline *pipeline = [processingPipelineBuilder build]; + SCLogCapturerInfo(@"Adding processing pipeline:%@", pipeline); + [_captureResource.videoDataSource addProcessingPipeline:pipeline]; + } else { + [_captureResource.videoDataSource removeProcessingPipeline]; + } + } + } + [_captureResource.deviceCapacityAnalyzer setAsFocusListenerForDevice:_captureResource.device]; + + [SCCaptureWorker updateLensesFieldOfViewTracking:_captureResource]; + [_captureResource.managedSession commitConfiguration]; + [_captureResource.videoDataSource commitConfiguration]; + + // Checks if the flash is activated and if so switches the flash along + // with the camera view. Setting device's torch mode has to be called after -[AVCaptureSession + // commitConfiguration], otherwise flash may be not working, especially for iPhone 8/8 Plus. + if (_captureResource.state.torchActive || + (_captureResource.state.flashActive && _captureResource.videoRecording)) { + [_captureResource.device setTorchActive:YES]; + if (devicePosition == SCManagedCaptureDevicePositionFront) { + _captureResource.frontFlashController.torchActive = YES; + } + } + + SCLogCapturerInfo(@"Set device position commitConfiguration"); + [_captureResource.droppedFramesReporter didChangeCaptureDevicePosition]; + if (!SCDeviceSupportsMetal()) { + if (isStreaming) { + [SCCaptureWorker startStreaming:_captureResource]; + } + } + NSArray *inputs = _captureResource.managedSession.avSession.inputs; + if (!deviceSet) { + [self _logFailureSetDevicePositionFrom:_captureResource.state.devicePosition + to:devicePosition + reason:@"setDeviceForInput failed"]; + } else if (inputs.count == 0) { + [self _logFailureSetDevicePositionFrom:_captureResource.state.devicePosition + to:devicePosition + reason:@"no input"]; + } else if (inputs.count > 1) { + [self + _logFailureSetDevicePositionFrom:_captureResource.state.devicePosition + to:devicePosition + reason:[NSString sc_stringWithFormat:@"multiple inputs: %@", inputs]]; + } else { + AVCaptureDeviceInput *input = [inputs firstObject]; + AVCaptureDevice *resultDevice = input.device; + if (resultDevice == prevDevice.device) { + [self _logFailureSetDevicePositionFrom:_captureResource.state.devicePosition + to:devicePosition + reason:@"stayed on previous device"]; + } else if (resultDevice != _captureResource.device.device) { + [self + _logFailureSetDevicePositionFrom:_captureResource.state.devicePosition + to:devicePosition + reason:[NSString sc_stringWithFormat:@"unknown input device: %@", + resultDevice]]; + } + } + } else { + [self _logFailureSetDevicePositionFrom:_captureResource.state.devicePosition + to:devicePosition + reason:@"no device"]; + } + } else { + SCLogCapturerInfo(@"Device position did not change"); + if (_captureResource.device.position != _captureResource.state.devicePosition) { + [self _logFailureSetDevicePositionFrom:state.devicePosition + to:devicePosition + reason:@"state position set incorrectly"]; + } + } + BOOL stateChanged = ![_captureResource.state isEqual:state]; + state = [_captureResource.state copy]; + runOnMainThreadAsynchronously(^{ + if (stateChanged) { + [_captureResource.announcer managedCapturer:self didChangeState:state]; + } + if (devicePositionChanged) { + [_captureResource.announcer managedCapturer:self didChangeCaptureDevicePosition:state]; + } + if (nightModeChanged) { + [_captureResource.announcer managedCapturer:self didChangeNightModeActive:state]; + } + if (portraitModeChanged) { + [_captureResource.announcer managedCapturer:self didChangePortraitModeActive:state]; + } + if (zoomFactorChanged) { + [_captureResource.announcer managedCapturer:self didChangeZoomFactor:state]; + } + if (flashSupportedOrTorchSupportedChanged) { + [_captureResource.announcer managedCapturer:self didChangeFlashSupportedAndTorchSupported:state]; + } + if (completionHandler) { + completionHandler(); + } + }); + }]; +} + +- (void)_logFailureSetDevicePositionFrom:(SCManagedCaptureDevicePosition)start + to:(SCManagedCaptureDevicePosition)end + reason:(NSString *)reason +{ + SCTraceODPCompatibleStart(2); + SCLogCapturerInfo(@"Device position change failed: %@", reason); + [[SCLogger sharedInstance] logEvent:kSCCameraMetricsCameraFlipFailure + parameters:@{ + @"start" : @(start), + @"end" : @(end), + @"reason" : reason, + }]; +} + +- (void)setFlashActive:(BOOL)flashActive + completionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + [_captureResource.queuePerformer perform:^{ + SCTraceStart(); + BOOL flashActiveOrFrontFlashEnabledChanged = NO; + if (_captureResource.state.flashActive != flashActive) { + [_captureResource.device setFlashActive:flashActive]; + SCLogCapturerInfo(@"Set flash active: %d -> %d", _captureResource.state.flashActive, flashActive); + _captureResource.state = [[[SCManagedCapturerStateBuilder withManagedCapturerState:_captureResource.state] + setFlashActive:flashActive] build]; + flashActiveOrFrontFlashEnabledChanged = YES; + } + SCManagedCapturerState *state = [_captureResource.state copy]; + runOnMainThreadAsynchronously(^{ + if (flashActiveOrFrontFlashEnabledChanged) { + [_captureResource.announcer managedCapturer:self didChangeState:state]; + [_captureResource.announcer managedCapturer:self didChangeFlashActive:state]; + } + if (completionHandler) { + completionHandler(); + } + }); + }]; +} + +- (void)setLensesActive:(BOOL)lensesActive + completionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + [self _setLensesActive:lensesActive + liveVideoStreaming:NO + filterFactory:nil + completionHandler:completionHandler + context:context]; +} + +- (void)setLensesActive:(BOOL)lensesActive + filterFactory:(SCLookseryFilterFactory *)filterFactory + completionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context +{ + [self _setLensesActive:lensesActive + liveVideoStreaming:NO + filterFactory:filterFactory + completionHandler:completionHandler + context:context]; +} + +- (void)setLensesInTalkActive:(BOOL)lensesActive + completionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context +{ + // Talk requires liveVideoStreaming to be turned on + BOOL liveVideoStreaming = lensesActive; + + dispatch_block_t activationBlock = ^{ + [self _setLensesActive:lensesActive + liveVideoStreaming:liveVideoStreaming + filterFactory:nil + completionHandler:completionHandler + context:context]; + }; + + @weakify(self); + [_captureResource.queuePerformer perform:^{ + @strongify(self); + SC_GUARD_ELSE_RETURN(self); + // If lenses are enabled in TV3 and it was enabled not from TV3. We have to turn off lenses off at first. + BOOL shouldTurnOffBeforeActivation = liveVideoStreaming && !self->_captureResource.state.liveVideoStreaming && + self->_captureResource.state.lensesActive; + if (shouldTurnOffBeforeActivation) { + [self _setLensesActive:NO + liveVideoStreaming:NO + filterFactory:nil + completionHandler:activationBlock + context:context]; + } else { + activationBlock(); + } + }]; +} + +- (void)_setLensesActive:(BOOL)lensesActive + liveVideoStreaming:(BOOL)liveVideoStreaming + filterFactory:(SCLookseryFilterFactory *)filterFactory + completionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + SCLogCapturerInfo(@"Setting lenses active to: %d", lensesActive); + [_captureResource.queuePerformer perform:^{ + SCTraceStart(); + BOOL lensesActiveChanged = NO; + if (_captureResource.state.lensesActive != lensesActive) { + SCLogCapturerInfo(@"Set lenses active: %d -> %d", _captureResource.state.lensesActive, lensesActive); + _captureResource.state = [[[SCManagedCapturerStateBuilder withManagedCapturerState:_captureResource.state] + setLensesActive:lensesActive] build]; + + // Update capturer settings(orientation and resolution) after changing state, because + // _setLiveVideoStreaming logic is depends on it + [self _setLiveVideoStreaming:liveVideoStreaming]; + + [SCCaptureWorker turnARSessionOff:_captureResource]; + + // Only enable sample buffer display when lenses is not active. + [_captureResource.videoDataSource setSampleBufferDisplayEnabled:!lensesActive]; + [_captureResource.debugInfoDict setObject:!lensesActive ? @"True" : @"False" + forKey:@"sampleBufferDisplayEnabled"]; + + lensesActiveChanged = YES; + [_captureResource.lensProcessingCore setAspectRatio:_captureResource.state.liveVideoStreaming]; + [_captureResource.lensProcessingCore setLensesActive:_captureResource.state.lensesActive + videoOrientation:_captureResource.videoDataSource.videoOrientation + filterFactory:filterFactory]; + BOOL modifySource = _captureResource.state.liveVideoStreaming || _captureResource.videoRecording; + [_captureResource.lensProcessingCore setModifySource:modifySource]; + [_captureResource.lensProcessingCore setShouldMuteAllSounds:_captureResource.state.liveVideoStreaming]; + if (_captureResource.fileInputDecider.shouldProcessFileInput) { + [_captureResource.lensProcessingCore setLensesActive:YES + videoOrientation:_captureResource.videoDataSource.videoOrientation + filterFactory:filterFactory]; + } + [_captureResource.videoDataSource + setVideoStabilizationEnabledIfSupported:!_captureResource.state.lensesActive]; + + if (SCIsMasterBuild()) { + // Check that connection configuration is correct + if (_captureResource.state.lensesActive && + _captureResource.state.devicePosition == SCManagedCaptureDevicePositionFront) { + for (AVCaptureOutput *output in _captureResource.managedSession.avSession.outputs) { + if ([output isKindOfClass:[AVCaptureVideoDataOutput class]]) { + AVCaptureConnection *connection = [output connectionWithMediaType:AVMediaTypeVideo]; + SCAssert(connection.videoMirrored && + connection.videoOrientation == !_captureResource.state.liveVideoStreaming + ? AVCaptureVideoOrientationLandscapeRight + : AVCaptureVideoOrientationPortrait, + @"Connection configuration is not correct"); + } + } + } + } + } + dispatch_block_t viewChangeHandler = ^{ + SCManagedCapturerState *state = [_captureResource.state copy]; // update to latest state always + runOnMainThreadAsynchronously(^{ + [_captureResource.announcer managedCapturer:self didChangeState:state]; + [_captureResource.announcer managedCapturer:self didChangeLensesActive:state]; + [_captureResource.videoPreviewGLViewManager setLensesActive:state.lensesActive]; + if (completionHandler) { + completionHandler(); + } + }); + }; + if (lensesActiveChanged && !lensesActive && SCDeviceSupportsMetal()) { + // If we are turning off lenses and have sample buffer display on. + // We need to wait until new frame presented in sample buffer before + // dismiss the Lenses' OpenGL view. + [_captureResource.videoDataSource waitUntilSampleBufferDisplayed:_captureResource.queuePerformer.queue + completionHandler:viewChangeHandler]; + } else { + viewChangeHandler(); + } + }]; +} + +- (void)_setLiveVideoStreaming:(BOOL)liveVideoStreaming +{ + SCAssertPerformer(_captureResource.queuePerformer); + BOOL enableLiveVideoStreaming = liveVideoStreaming; + if (!_captureResource.state.lensesActive && liveVideoStreaming) { + SCLogLensesError(@"LiveVideoStreaming is not allowed when lenses are turned off"); + enableLiveVideoStreaming = NO; + } + SC_GUARD_ELSE_RETURN(enableLiveVideoStreaming != _captureResource.state.liveVideoStreaming); + + // We will disable blackCameraNoOutputDetector if in live video streaming + // In case there is some black camera when doing video call, will consider re-enable it + [self _setBlackCameraNoOutputDetectorEnabled:!liveVideoStreaming]; + + if (!_captureResource.device.isConnected) { + SCLogCapturerError(@"Can't perform configuration for live video streaming"); + } + SCLogCapturerInfo(@"Set live video streaming: %d -> %d", _captureResource.state.liveVideoStreaming, + enableLiveVideoStreaming); + _captureResource.state = [[[SCManagedCapturerStateBuilder withManagedCapturerState:_captureResource.state] + setLiveVideoStreaming:enableLiveVideoStreaming] build]; + + BOOL isStreaming = _captureResource.videoDataSource.isStreaming; + if (isStreaming) { + [_captureResource.videoDataSource stopStreaming]; + } + + SCLogCapturerInfo(@"Set live video streaming beginConfiguration"); + [_captureResource.managedSession performConfiguration:^{ + [_captureResource.videoDataSource beginConfiguration]; + + // If video chat is active we should use portrait orientation, otherwise landscape right + [_captureResource.videoDataSource setVideoOrientation:_captureResource.state.liveVideoStreaming + ? AVCaptureVideoOrientationPortrait + : AVCaptureVideoOrientationLandscapeRight]; + + [_captureResource.device setLiveVideoStreaming:_captureResource.state.liveVideoStreaming + session:_captureResource.managedSession.avSession]; + + [_captureResource.videoDataSource commitConfiguration]; + }]; + + SCLogCapturerInfo(@"Set live video streaming commitConfiguration"); + + if (isStreaming) { + [_captureResource.videoDataSource startStreaming]; + } +} + +- (void)_setBlackCameraNoOutputDetectorEnabled:(BOOL)enabled +{ + if (enabled) { + [self addListener:_captureResource.blackCameraDetector.blackCameraNoOutputDetector]; + [_captureResource.videoDataSource addListener:_captureResource.blackCameraDetector.blackCameraNoOutputDetector]; + } else { + [self removeListener:_captureResource.blackCameraDetector.blackCameraNoOutputDetector]; + [_captureResource.videoDataSource + removeListener:_captureResource.blackCameraDetector.blackCameraNoOutputDetector]; + } +} + +- (void)setTorchActiveAsynchronously:(BOOL)torchActive + completionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + SCLogCapturerInfo(@"Setting torch active asynchronously to: %d", torchActive); + [_captureResource.queuePerformer perform:^{ + SCTraceStart(); + BOOL torchActiveChanged = NO; + if (_captureResource.state.torchActive != torchActive) { + [_captureResource.device setTorchActive:torchActive]; + if (_captureResource.state.devicePosition == SCManagedCaptureDevicePositionFront) { + _captureResource.frontFlashController.torchActive = torchActive; + } + SCLogCapturerInfo(@"Set torch active: %d -> %d", _captureResource.state.torchActive, torchActive); + _captureResource.state = [[[SCManagedCapturerStateBuilder withManagedCapturerState:_captureResource.state] + setTorchActive:torchActive] build]; + torchActiveChanged = YES; + } + SCManagedCapturerState *state = [_captureResource.state copy]; + runOnMainThreadAsynchronously(^{ + if (torchActiveChanged) { + [_captureResource.announcer managedCapturer:self didChangeState:state]; + } + if (completionHandler) { + completionHandler(); + } + }); + }]; +} + +- (void)setNightModeActiveAsynchronously:(BOOL)active + completionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + [_captureResource.queuePerformer perform:^{ + SCTraceStart(); + // Only do the configuration if current device is connected + if (_captureResource.device.isConnected) { + SCLogCapturerInfo(@"Set night mode beginConfiguration"); + [_captureResource.managedSession performConfiguration:^{ + [self _setNightModeActive:active]; + [self _updateHRSIEnabled]; + [self _updateStillImageStabilizationEnabled]; + }]; + SCLogCapturerInfo(@"Set night mode commitConfiguration"); + } + BOOL nightModeChanged = (_captureResource.state.isNightModeActive != active); + if (nightModeChanged) { + SCLogCapturerInfo(@"Set night mode active: %d -> %d", _captureResource.state.isNightModeActive, active); + _captureResource.state = [[[SCManagedCapturerStateBuilder withManagedCapturerState:_captureResource.state] + setIsNightModeActive:active] build]; + } + SCManagedCapturerState *state = [_captureResource.state copy]; + runOnMainThreadAsynchronously(^{ + if (nightModeChanged) { + [_captureResource.announcer managedCapturer:self didChangeState:state]; + [_captureResource.announcer managedCapturer:self didChangeNightModeActive:state]; + } + if (completionHandler) { + completionHandler(); + } + }); + }]; +} + +- (void)_setNightModeActive:(BOOL)active +{ + SCTraceODPCompatibleStart(2); + [_captureResource.device setNightModeActive:active session:_captureResource.managedSession.avSession]; + if ([SCManagedCaptureDevice isEnhancedNightModeSupported]) { + [self _toggleSoftwareNightmode:active]; + } +} + +- (void)_toggleSoftwareNightmode:(BOOL)active +{ + SCTraceODPCompatibleStart(2); + if (active) { + SCLogCapturerInfo(@"Set enhanced night mode active"); + SCProcessingPipelineBuilder *processingPipelineBuilder = [[SCProcessingPipelineBuilder alloc] init]; + processingPipelineBuilder.enhancedNightMode = YES; + SCProcessingPipeline *pipeline = [processingPipelineBuilder build]; + SCLogCapturerInfo(@"Adding processing pipeline:%@", pipeline); + [_captureResource.videoDataSource addProcessingPipeline:pipeline]; + } else { + SCLogCapturerInfo(@"Removing processing pipeline"); + [_captureResource.videoDataSource removeProcessingPipeline]; + } +} + +- (BOOL)_shouldCaptureImageFromVideo +{ + SCTraceODPCompatibleStart(2); + BOOL isIphone5Series = [SCDeviceName isSimilarToIphone5orNewer] && ![SCDeviceName isSimilarToIphone6orNewer]; + return isIphone5Series && !_captureResource.state.flashActive && ![self isLensApplied]; +} + +- (void)lockZoomWithContext:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + SCAssertMainThread(); + SCLogCapturerInfo(@"Lock zoom"); + _captureResource.allowsZoom = NO; +} + +- (void)unlockZoomWithContext:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + SCAssertMainThread(); + SCLogCapturerInfo(@"Unlock zoom"); + // Don't let anyone unlock the zoom while ARKit is active. When ARKit shuts down, it'll unlock it. + SC_GUARD_ELSE_RETURN(!_captureResource.state.arSessionActive); + _captureResource.allowsZoom = YES; +} + +- (void)setZoomFactorAsynchronously:(CGFloat)zoomFactor context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + SCAssertMainThread(); + SC_GUARD_ELSE_RETURN(_captureResource.allowsZoom); + SCLogCapturerInfo(@"Setting zoom factor to: %f", zoomFactor); + [_captureResource.deviceZoomHandler setZoomFactor:zoomFactor forDevice:_captureResource.device immediately:NO]; +} + +- (void)resetZoomFactorAsynchronously:(CGFloat)zoomFactor + devicePosition:(SCManagedCaptureDevicePosition)devicePosition + context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + SCAssertMainThread(); + SC_GUARD_ELSE_RETURN(_captureResource.allowsZoom); + SCLogCapturerInfo(@"Setting zoom factor to: %f devicePosition:%lu", zoomFactor, (unsigned long)devicePosition); + SCManagedCaptureDevice *device = [SCManagedCaptureDevice deviceWithPosition:devicePosition]; + [_captureResource.deviceZoomHandler setZoomFactor:zoomFactor forDevice:device immediately:YES]; +} + +- (void)setExposurePointOfInterestAsynchronously:(CGPoint)pointOfInterest + fromUser:(BOOL)fromUser + completionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + [_captureResource.queuePerformer perform:^{ + SCTraceStart(); + if (_captureResource.device.isConnected) { + CGPoint exposurePoint; + if ([self isVideoMirrored]) { + exposurePoint = CGPointMake(pointOfInterest.x, 1 - pointOfInterest.y); + } else { + exposurePoint = pointOfInterest; + } + if (_captureResource.device.softwareZoom) { + // Fix for the zooming factor + [_captureResource.device + setExposurePointOfInterest:CGPointMake( + (exposurePoint.x - 0.5) / _captureResource.device.softwareZoom + 0.5, + (exposurePoint.y - 0.5) / _captureResource.device.softwareZoom + 0.5) + fromUser:fromUser]; + } else { + [_captureResource.device setExposurePointOfInterest:exposurePoint fromUser:fromUser]; + } + } + if (completionHandler) { + runOnMainThreadAsynchronously(completionHandler); + } + }]; +} + +- (void)setAutofocusPointOfInterestAsynchronously:(CGPoint)pointOfInterest + completionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + [_captureResource.queuePerformer perform:^{ + SCTraceStart(); + if (_captureResource.device.isConnected) { + CGPoint focusPoint; + if ([self isVideoMirrored]) { + focusPoint = CGPointMake(pointOfInterest.x, 1 - pointOfInterest.y); + } else { + focusPoint = pointOfInterest; + } + if (_captureResource.device.softwareZoom) { + // Fix for the zooming factor + [_captureResource.device + setAutofocusPointOfInterest:CGPointMake( + (focusPoint.x - 0.5) / _captureResource.device.softwareZoom + 0.5, + (focusPoint.y - 0.5) / _captureResource.device.softwareZoom + 0.5)]; + } else { + [_captureResource.device setAutofocusPointOfInterest:focusPoint]; + } + } + if (completionHandler) { + runOnMainThreadAsynchronously(completionHandler); + } + }]; +} + +- (void)setPortraitModePointOfInterestAsynchronously:(CGPoint)pointOfInterest + completionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + [SCCaptureWorker setPortraitModePointOfInterestAsynchronously:pointOfInterest + completionHandler:completionHandler + resource:_captureResource]; +} + +- (void)continuousAutofocusAndExposureAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler + context:(NSString *)context +{ + SCTraceODPCompatibleStart(2); + [_captureResource.queuePerformer perform:^{ + SCTraceStart(); + if (_captureResource.device.isConnected) { + [_captureResource.device continuousAutofocus]; + [_captureResource.device setExposurePointOfInterest:CGPointMake(0.5, 0.5) fromUser:NO]; + if (SCCameraTweaksEnablePortraitModeAutofocus()) { + [self setPortraitModePointOfInterestAsynchronously:CGPointMake(0.5, 0.5) + completionHandler:nil + context:context]; + } + } + if (completionHandler) { + runOnMainThreadAsynchronously(completionHandler); + } + }]; +} + +#pragma mark - Add / Remove Listener + +- (void)addListener:(id)listener +{ + SCTraceODPCompatibleStart(2); + // Only do the make sure thing if I added it to announcer fresh. + SC_GUARD_ELSE_RETURN([_captureResource.announcer addListener:listener]); + // After added the listener, make sure we called all these methods with its + // initial values + [_captureResource.queuePerformer perform:^{ + SCManagedCapturerState *state = [_captureResource.state copy]; + AVCaptureVideoPreviewLayer *videoPreviewLayer = _captureResource.videoPreviewLayer; + LSAGLView *videoPreviewGLView = _captureResource.videoPreviewGLViewManager.view; + runOnMainThreadAsynchronously(^{ + SCTraceStart(); + if ([listener respondsToSelector:@selector(managedCapturer:didChangeState:)]) { + [listener managedCapturer:self didChangeState:state]; + } + if ([listener respondsToSelector:@selector(managedCapturer:didChangeCaptureDevicePosition:)]) { + [listener managedCapturer:self didChangeCaptureDevicePosition:state]; + } + if ([listener respondsToSelector:@selector(managedCapturer:didChangeNightModeActive:)]) { + [listener managedCapturer:self didChangeNightModeActive:state]; + } + if ([listener respondsToSelector:@selector(managedCapturer:didChangeFlashActive:)]) { + [listener managedCapturer:self didChangeFlashActive:state]; + } + if ([listener respondsToSelector:@selector(managedCapturer:didChangeFlashSupportedAndTorchSupported:)]) { + [listener managedCapturer:self didChangeFlashSupportedAndTorchSupported:state]; + } + if ([listener respondsToSelector:@selector(managedCapturer:didChangeZoomFactor:)]) { + [listener managedCapturer:self didChangeZoomFactor:state]; + } + if ([listener respondsToSelector:@selector(managedCapturer:didChangeLowLightCondition:)]) { + [listener managedCapturer:self didChangeLowLightCondition:state]; + } + if ([listener respondsToSelector:@selector(managedCapturer:didChangeAdjustingExposure:)]) { + [listener managedCapturer:self didChangeAdjustingExposure:state]; + } + if (!SCDeviceSupportsMetal()) { + if ([listener respondsToSelector:@selector(managedCapturer:didChangeVideoPreviewLayer:)]) { + [listener managedCapturer:self didChangeVideoPreviewLayer:videoPreviewLayer]; + } + } + if (videoPreviewGLView && + [listener respondsToSelector:@selector(managedCapturer:didChangeVideoPreviewGLView:)]) { + [listener managedCapturer:self didChangeVideoPreviewGLView:videoPreviewGLView]; + } + if ([listener respondsToSelector:@selector(managedCapturer:didChangeLensesActive:)]) { + [listener managedCapturer:self didChangeLensesActive:state]; + } + }); + }]; +} + +- (void)removeListener:(id)listener +{ + SCTraceODPCompatibleStart(2); + [_captureResource.announcer removeListener:listener]; +} + +- (void)addVideoDataSourceListener:(id)listener +{ + SCTraceODPCompatibleStart(2); + [_captureResource.videoDataSource addListener:listener]; +} + +- (void)removeVideoDataSourceListener:(id)listener +{ + SCTraceODPCompatibleStart(2); + [_captureResource.videoDataSource removeListener:listener]; +} + +- (void)addDeviceCapacityAnalyzerListener:(id)listener +{ + SCTraceODPCompatibleStart(2); + [_captureResource.deviceCapacityAnalyzer addListener:listener]; +} + +- (void)removeDeviceCapacityAnalyzerListener:(id)listener +{ + SCTraceODPCompatibleStart(2); + [_captureResource.deviceCapacityAnalyzer removeListener:listener]; +} + +#pragma mark - Debug + +- (NSString *)debugInfo +{ + SCTraceODPCompatibleStart(2); + NSMutableString *info = [NSMutableString new]; + [info appendString:@"==== SCManagedCapturer tokens ====\n"]; + [_captureResource.tokenSet enumerateObjectsUsingBlock:^(SCCapturerToken *_Nonnull token, BOOL *_Nonnull stop) { + [info appendFormat:@"%@\n", token.debugDescription]; + }]; + return info.copy; +} + +- (NSString *)description +{ + return [self debugDescription]; +} + +- (NSString *)debugDescription +{ + return [NSString sc_stringWithFormat:@"SCManagedCapturer state:\n%@\nVideo streamer info:\n%@", + _captureResource.state.debugDescription, + _captureResource.videoDataSource.description]; +} + +- (CMTime)firstWrittenAudioBufferDelay +{ + SCTraceODPCompatibleStart(2); + return [SCCaptureWorker firstWrittenAudioBufferDelay:_captureResource]; +} + +- (BOOL)audioQueueStarted +{ + SCTraceODPCompatibleStart(2); + return [SCCaptureWorker audioQueueStarted:_captureResource]; +} + +#pragma mark - SCTimeProfilable + ++ (SCTimeProfilerContext)context +{ + return SCTimeProfilerContextCamera; +} + +// We disable and re-enable liveness timer when enter background and foreground + +- (void)applicationDidEnterBackground +{ + SCTraceODPCompatibleStart(2); + [SCCaptureWorker destroyLivenessConsistencyTimer:_captureResource]; + // Hide the view when in background. + if (!SCDeviceSupportsMetal()) { + [_captureResource.queuePerformer perform:^{ + _captureResource.appInBackground = YES; + [CATransaction begin]; + [CATransaction setDisableActions:YES]; + _captureResource.videoPreviewLayer.hidden = YES; + [CATransaction commit]; + }]; + } else { + [_captureResource.queuePerformer perform:^{ + _captureResource.appInBackground = YES; + // If it is running, stop the streaming. + if (_captureResource.status == SCManagedCapturerStatusRunning) { + [_captureResource.videoDataSource stopStreaming]; + } + }]; + } + [[SCManagedCapturePreviewLayerController sharedInstance] applicationDidEnterBackground]; +} + +- (void)applicationWillEnterForeground +{ + SCTraceODPCompatibleStart(2); + if (!SCDeviceSupportsMetal()) { + [_captureResource.queuePerformer perform:^{ + SCTraceStart(); + _captureResource.appInBackground = NO; + + if (!SCDeviceSupportsMetal()) { + [self _fixNonMetalSessionPreviewInconsistency]; + } + + // Doing this right now on iOS 10. It will probably work on iOS 9 as well, but need to verify. + if (SC_AT_LEAST_IOS_10) { + [self _runningConsistencyCheckAndFix]; + // For OS version >= iOS 10, try to fix AVCaptureSession when app is entering foreground. + _captureResource.numRetriesFixAVCaptureSessionWithCurrentSession = 0; + [self _fixAVSessionIfNecessary]; + } + }]; + } else { + [_captureResource.queuePerformer perform:^{ + SCTraceStart(); + _captureResource.appInBackground = NO; + if (_captureResource.status == SCManagedCapturerStatusRunning) { + [_captureResource.videoDataSource startStreaming]; + } + // Doing this right now on iOS 10. It will probably work on iOS 9 as well, but need to verify. + if (SC_AT_LEAST_IOS_10) { + [self _runningConsistencyCheckAndFix]; + // For OS version >= iOS 10, try to fix AVCaptureSession when app is entering foreground. + _captureResource.numRetriesFixAVCaptureSessionWithCurrentSession = 0; + [self _fixAVSessionIfNecessary]; + } + }]; + } + [[SCManagedCapturePreviewLayerController sharedInstance] applicationWillEnterForeground]; +} + +- (void)applicationWillResignActive +{ + SCTraceODPCompatibleStart(2); + [[SCManagedCapturePreviewLayerController sharedInstance] applicationWillResignActive]; + [_captureResource.queuePerformer perform:^{ + [self _pauseCaptureSessionKVOCheck]; + }]; +} + +- (void)applicationDidBecomeActive +{ + SCTraceODPCompatibleStart(2); + [[SCManagedCapturePreviewLayerController sharedInstance] applicationDidBecomeActive]; + [_captureResource.queuePerformer perform:^{ + SCTraceStart(); + // Since we foreground it, do the running consistency check immediately. + // Reset number of retries for fixing status inconsistency + _captureResource.numRetriesFixInconsistencyWithCurrentSession = 0; + [self _runningConsistencyCheckAndFix]; + if (!SC_AT_LEAST_IOS_10) { + // For OS version < iOS 10, try to fix AVCaptureSession after app becomes active. + _captureResource.numRetriesFixAVCaptureSessionWithCurrentSession = 0; + [self _fixAVSessionIfNecessary]; + } + [self _resumeCaptureSessionKVOCheck]; + if (_captureResource.status == SCManagedCapturerStatusRunning) { + // Reschedule the timer if we don't have it already + runOnMainThreadAsynchronously(^{ + SCTraceStart(); + [SCCaptureWorker setupLivenessConsistencyTimerIfForeground:_captureResource]; + }); + } + }]; +} + +- (void)_runningConsistencyCheckAndFix +{ + SCTraceODPCompatibleStart(2); + // Don't enforce consistency on simulator, as it'll constantly false-positive and restart session. + SC_GUARD_ELSE_RETURN(![SCDeviceName isSimulator]); + if (_captureResource.state.arSessionActive) { + [self _runningARSessionConsistencyCheckAndFix]; + } else { + [self _runningAVCaptureSessionConsistencyCheckAndFix]; + } +} + +- (void)_runningARSessionConsistencyCheckAndFix +{ + SCTraceODPCompatibleStart(2); + SCAssert([_captureResource.queuePerformer isCurrentPerformer], @""); + SCAssert(_captureResource.state.arSessionActive, @""); + if (@available(iOS 11.0, *)) { + // Occassionally the capture session will get into a weird "stuck" state. + // If this happens, we'll see that the timestamp for the most recent frame is behind the current time. + // Pausinging the session for a moment and restarting to attempt to jog it loose. + NSTimeInterval timeSinceLastFrame = CACurrentMediaTime() - _captureResource.arSession.currentFrame.timestamp; + BOOL reset = NO; + if (_captureResource.arSession.currentFrame.camera.trackingStateReason == ARTrackingStateReasonInitializing) { + if (timeSinceLastFrame > kSCManagedCapturerFixInconsistencyARSessionHungInitThreshold) { + SCLogCapturerInfo(@"*** Found inconsistency for ARSession timestamp (possible hung init), fix now ***"); + reset = YES; + } + } else if (timeSinceLastFrame > kSCManagedCapturerFixInconsistencyARSessionDelayThreshold) { + SCLogCapturerInfo(@"*** Found inconsistency for ARSession timestamp (init complete), fix now ***"); + reset = YES; + } + if (reset) { + [SCCaptureWorker turnARSessionOff:_captureResource]; + [SCCaptureWorker turnARSessionOn:_captureResource]; + } + } +} + +- (void)_runningAVCaptureSessionConsistencyCheckAndFix +{ + SCTraceODPCompatibleStart(2); + SCAssert([_captureResource.queuePerformer isCurrentPerformer], @""); + SCAssert(!_captureResource.state.arSessionActive, @""); + [[SCLogger sharedInstance] logStepToEvent:@"CAMERA_OPEN_WITH_FIX_INCONSISTENCY" + uniqueId:@"" + stepName:@"startConsistencyCheckAndFix"]; + // If the video preview layer's hidden status is out of sync with the + // session's running status, + // fix that now. Also, we don't care that much if the status is not running. + if (!SCDeviceSupportsMetal()) { + [self _fixNonMetalSessionPreviewInconsistency]; + } + // Skip the liveness consistency check if we are in background + if (_captureResource.appInBackground) { + SCLogCapturerInfo(@"*** Skipped liveness consistency check, as we are in the background ***"); + return; + } + if (_captureResource.status == SCManagedCapturerStatusRunning && !_captureResource.managedSession.isRunning) { + SCGhostToSnappableSignalCameraFixInconsistency(); + SCLogCapturerInfo(@"*** Found status inconsistency for running, fix now ***"); + _captureResource.numRetriesFixInconsistencyWithCurrentSession++; + if (_captureResource.numRetriesFixInconsistencyWithCurrentSession <= + kSCManagedCapturerFixInconsistencyMaxRetriesWithCurrentSession) { + SCTraceStartSection("Fix non-running session") + { + if (!SCDeviceSupportsMetal()) { + [CATransaction begin]; + [CATransaction setDisableActions:YES]; + [_captureResource.managedSession startRunning]; + [SCCaptureWorker setupVideoPreviewLayer:_captureResource]; + [CATransaction commit]; + } else { + [_captureResource.managedSession startRunning]; + } + } + SCTraceEndSection(); + } else { + SCTraceStartSection("Create new capturer session") + { + // start running with new capture session if the inconsistency fixing not succeeds + // after kSCManagedCapturerFixInconsistencyMaxRetriesWithCurrentSession retries + SCLogCapturerInfo(@"*** Recreate and run new capture session to fix the inconsistency ***"); + [self _startRunningWithNewCaptureSession]; + } + SCTraceEndSection(); + } + BOOL sessionIsRunning = _captureResource.managedSession.isRunning; + if (sessionIsRunning && !SCDeviceSupportsMetal()) { + // If it is fixed, we signal received the first frame. + SCGhostToSnappableSignalDidReceiveFirstPreviewFrame(); + runOnMainThreadAsynchronously(^{ + // To approximate this did render timer, it is not accurate. + SCGhostToSnappableSignalDidRenderFirstPreviewFrame(CACurrentMediaTime()); + }); + } + SCLogCapturerInfo(@"*** Applied inconsistency fix, running state : %@ ***", sessionIsRunning ? @"YES" : @"NO"); + if (_captureResource.managedSession.isRunning) { + [[SCLogger sharedInstance] logStepToEvent:@"CAMERA_OPEN_WITH_FIX_INCONSISTENCY" + uniqueId:@"" + stepName:@"finishConsistencyCheckAndFix"]; + [[SCLogger sharedInstance] + logTimedEventEnd:@"CAMERA_OPEN_WITH_FIX_INCONSISTENCY" + uniqueId:@"" + parameters:@{ + @"count" : @(_captureResource.numRetriesFixInconsistencyWithCurrentSession) + }]; + } + } else { + [[SCLogger sharedInstance] cancelLogTimedEvent:@"CAMERA_OPEN_WITH_FIX_INCONSISTENCY" uniqueId:@""]; + // Reset number of retries for fixing status inconsistency + _captureResource.numRetriesFixInconsistencyWithCurrentSession = 0; + } + + [_captureResource.blackCameraDetector sessionDidChangeIsRunning:_captureResource.managedSession.isRunning]; +} + +- (void)mediaServicesWereReset +{ + SCTraceODPCompatibleStart(2); + [self mediaServicesWereLost]; + [_captureResource.queuePerformer perform:^{ + /* If the current state requires the ARSession, restart it. + Explicitly flip the arSessionActive flag so that `turnSessionOn` thinks it can reset itself. + */ + if (_captureResource.state.arSessionActive) { + _captureResource.state = [[[SCManagedCapturerStateBuilder withManagedCapturerState:_captureResource.state] + setArSessionActive:NO] build]; + [SCCaptureWorker turnARSessionOn:_captureResource]; + } + }]; +} + +- (void)mediaServicesWereLost +{ + SCTraceODPCompatibleStart(2); + [_captureResource.queuePerformer perform:^{ + if (!_captureResource.state.arSessionActive && !_captureResource.managedSession.isRunning) { + /* + If the session is running we will trigger + _sessionRuntimeError: so nothing else is + needed here. + */ + [_captureResource.videoCapturer.outputURL reloadAssetKeys]; + } + }]; +} + +- (void)_livenessConsistency:(NSTimer *)timer +{ + SCTraceODPCompatibleStart(2); + SCAssertMainThread(); + // We can directly check the application state because this timer is scheduled + // on the main thread. + if ([UIApplication sharedApplication].applicationState == UIApplicationStateActive) { + [_captureResource.queuePerformer perform:^{ + [self _runningConsistencyCheckAndFix]; + }]; + } +} + +- (void)_sessionRuntimeError:(NSNotification *)notification +{ + SCTraceODPCompatibleStart(2); + NSError *sessionError = notification.userInfo[AVCaptureSessionErrorKey]; + SCLogCapturerError(@"Encountered runtime error for capture session %@", sessionError); + + NSString *errorString = + [sessionError.description stringByReplacingOccurrencesOfString:@" " withString:@"_"].uppercaseString + ?: @"UNKNOWN_ERROR"; + [[SCUserTraceLogger shared] + logUserTraceEvent:[NSString sc_stringWithFormat:@"AVCAPTURESESSION_RUNTIME_ERROR_%@", errorString]]; + + if (sessionError.code == AVErrorMediaServicesWereReset) { + // If it is a AVErrorMediaServicesWereReset error, we can just call startRunning, it is much light weighted + [_captureResource.queuePerformer perform:^{ + if (!SCDeviceSupportsMetal()) { + [CATransaction begin]; + [CATransaction setDisableActions:YES]; + [_captureResource.managedSession startRunning]; + [SCCaptureWorker setupVideoPreviewLayer:_captureResource]; + [CATransaction commit]; + } else { + [_captureResource.managedSession startRunning]; + } + }]; + } else { + if (_captureResource.isRecreateSessionFixScheduled) { + SCLogCoreCameraInfo(@"Fixing session runtime error is scheduled, skip"); + return; + } + + _captureResource.isRecreateSessionFixScheduled = YES; + NSTimeInterval delay = 0; + NSTimeInterval timeNow = [NSDate timeIntervalSinceReferenceDate]; + if (timeNow - _captureResource.lastSessionRuntimeErrorTime < kMinFixSessionRuntimeErrorInterval) { + SCLogCoreCameraInfo(@"Fixing runtime error session in less than %f, delay", + kMinFixSessionRuntimeErrorInterval); + delay = kMinFixSessionRuntimeErrorInterval; + } + _captureResource.lastSessionRuntimeErrorTime = timeNow; + [_captureResource.queuePerformer perform:^{ + SCTraceStart(); + // Occasionaly _captureResource.avSession will throw out an error when shutting down. If this happens while + // ARKit is starting up, + // _startRunningWithNewCaptureSession will throw a wrench in ARSession startup and freeze the image. + SC_GUARD_ELSE_RETURN(!_captureResource.state.arSessionActive); + // Need to reset the flag before _startRunningWithNewCaptureSession + _captureResource.isRecreateSessionFixScheduled = NO; + [self _startRunningWithNewCaptureSession]; + [self _fixAVSessionIfNecessary]; + } + after:delay]; + } + + [[SCLogger sharedInstance] logUnsampledEvent:kSCCameraMetricsRuntimeError + parameters:@{ + @"error" : sessionError == nil ? @"Unknown error" : sessionError.description, + } + secretParameters:nil + metrics:nil]; +} + +- (void)_startRunningWithNewCaptureSessionIfNecessary +{ + SCTraceODPCompatibleStart(2); + if (_captureResource.isRecreateSessionFixScheduled) { + SCLogCapturerInfo(@"Session recreation is scheduled, return"); + return; + } + _captureResource.isRecreateSessionFixScheduled = YES; + [_captureResource.queuePerformer perform:^{ + // Need to reset the flag before _startRunningWithNewCaptureSession + _captureResource.isRecreateSessionFixScheduled = NO; + [self _startRunningWithNewCaptureSession]; + }]; +} + +- (void)_startRunningWithNewCaptureSession +{ + SCTraceODPCompatibleStart(2); + SCAssert([_captureResource.queuePerformer isCurrentPerformer], @""); + SCLogCapturerInfo(@"Start running with new capture session. isRecording:%d isStreaming:%d status:%lu", + _captureResource.videoRecording, _captureResource.videoDataSource.isStreaming, + (unsigned long)_captureResource.status); + + // Mark the start of recreating session + [_captureResource.blackCameraDetector sessionWillRecreate]; + + // Light weight fix gating + BOOL lightWeightFix = SCCameraTweaksSessionLightWeightFixEnabled() || SCCameraTweaksBlackCameraRecoveryEnabled(); + + if (!lightWeightFix) { + [_captureResource.deviceCapacityAnalyzer removeListener:_captureResource.stillImageCapturer]; + [self removeListener:_captureResource.stillImageCapturer]; + [_captureResource.videoDataSource removeListener:_captureResource.lensProcessingCore.capturerListener]; + + [_captureResource.videoDataSource removeListener:_captureResource.deviceCapacityAnalyzer]; + [_captureResource.videoDataSource removeListener:_captureResource.stillImageCapturer]; + + if (SCIsMasterBuild()) { + [_captureResource.videoDataSource removeListener:_captureResource.videoStreamReporter]; + } + [_captureResource.videoDataSource removeListener:_captureResource.videoScanner]; + [_captureResource.videoDataSource removeListener:_captureResource.videoCapturer]; + [_captureResource.videoDataSource + removeListener:_captureResource.blackCameraDetector.blackCameraNoOutputDetector]; + } + + [_captureResource.videoCapturer.outputURL reloadAssetKeys]; + + BOOL isStreaming = _captureResource.videoDataSource.isStreaming; + if (_captureResource.videoRecording) { + // Stop video recording prematurely + [self stopRecordingAsynchronouslyWithContext:SCCapturerContext]; + NSError *error = [NSError + errorWithDomain:kSCManagedCapturerErrorDomain + description: + [NSString + sc_stringWithFormat:@"Interrupt video recording to start new session. %@", + @{ + @"isAVSessionRunning" : @(_captureResource.managedSession.isRunning), + @"numRetriesFixInconsistency" : + @(_captureResource.numRetriesFixInconsistencyWithCurrentSession), + @"numRetriesFixAVCaptureSession" : + @(_captureResource.numRetriesFixAVCaptureSessionWithCurrentSession), + @"lastSessionRuntimeErrorTime" : + @(_captureResource.lastSessionRuntimeErrorTime), + }] + code:-1]; + [[SCLogger sharedInstance] logUnsampledEvent:kSCCameraMetricsVideoRecordingInterrupted + parameters:@{ + @"error" : error.description + } + secretParameters:nil + metrics:nil]; + } + @try { + if (@available(iOS 11.0, *)) { + [_captureResource.arSession pause]; + if (!lightWeightFix) { + [_captureResource.videoDataSource removeListener:_captureResource.arImageCapturer]; + } + } + [_captureResource.managedSession stopRunning]; + [_captureResource.device removeDeviceAsInput:_captureResource.managedSession.avSession]; + } @catch (NSException *exception) { + SCLogCapturerError(@"Encountered Exception %@", exception); + } @finally { + // Nil out device inputs from both devices + [[SCManagedCaptureDevice front] resetDeviceAsInput]; + [[SCManagedCaptureDevice back] resetDeviceAsInput]; + } + + if (!SCDeviceSupportsMetal()) { + // Redo the video preview to mitigate https://ph.sc-corp.net/T42584 + [SCCaptureWorker redoVideoPreviewLayer:_captureResource]; + } + +#if !TARGET_IPHONE_SIMULATOR + if (@available(iOS 11.0, *)) { + _captureResource.arSession = [[ARSession alloc] init]; + _captureResource.arImageCapturer = + [_captureResource.arImageCaptureProvider arImageCapturerWith:_captureResource.queuePerformer + lensProcessingCore:_captureResource.lensProcessingCore]; + } + [self _resetAVCaptureSession]; +#endif + [_captureResource.managedSession.avSession setAutomaticallyConfiguresApplicationAudioSession:NO]; + [_captureResource.device setDeviceAsInput:_captureResource.managedSession.avSession]; + + if (_captureResource.fileInputDecider.shouldProcessFileInput) { + // Keep the same logic, always create new VideoDataSource + [self _setupNewVideoFileDataSource]; + } else { + if (!lightWeightFix) { + [self _setupNewVideoDataSource]; + } else { + [self _setupVideoDataSourceWithNewSession]; + } + } + + if (_captureResource.status == SCManagedCapturerStatusRunning) { + if (!SCDeviceSupportsMetal()) { + [CATransaction begin]; + [CATransaction setDisableActions:YES]; + // Set the session to be the new session before start running. + _captureResource.videoPreviewLayer.session = _captureResource.managedSession.avSession; + if (!_captureResource.appInBackground) { + [_captureResource.managedSession startRunning]; + } + [SCCaptureWorker setupVideoPreviewLayer:_captureResource]; + [CATransaction commit]; + } else { + if (!_captureResource.appInBackground) { + [_captureResource.managedSession startRunning]; + } + } + } + // Since this start and stop happens in one block, we don't have to worry + // about streamingSequence issues + if (isStreaming) { + [_captureResource.videoDataSource startStreaming]; + } + SCManagedCapturerState *state = [_captureResource.state copy]; + AVCaptureVideoPreviewLayer *videoPreviewLayer = _captureResource.videoPreviewLayer; + runOnMainThreadAsynchronously(^{ + [_captureResource.announcer managedCapturer:self didResetFromRuntimeError:state]; + if (!SCDeviceSupportsMetal()) { + [_captureResource.announcer managedCapturer:self didChangeVideoPreviewLayer:videoPreviewLayer]; + } + }); + + // Mark the end of recreating session + [_captureResource.blackCameraDetector sessionDidRecreate]; +} + +/** + * Heavy-weight session fixing approach: recreating everything + */ +- (void)_setupNewVideoDataSource +{ + if (@available(iOS 11.0, *)) { + _captureResource.videoDataSource = + [[SCManagedVideoStreamer alloc] initWithSession:_captureResource.managedSession.avSession + arSession:_captureResource.arSession + devicePosition:_captureResource.state.devicePosition]; + [_captureResource.videoDataSource addListener:_captureResource.arImageCapturer]; + if (_captureResource.state.isPortraitModeActive) { + [_captureResource.videoDataSource setDepthCaptureEnabled:YES]; + + SCProcessingPipelineBuilder *processingPipelineBuilder = [[SCProcessingPipelineBuilder alloc] init]; + processingPipelineBuilder.portraitModeEnabled = YES; + SCProcessingPipeline *pipeline = [processingPipelineBuilder build]; + [_captureResource.videoDataSource addProcessingPipeline:pipeline]; + } + } else { + _captureResource.videoDataSource = + [[SCManagedVideoStreamer alloc] initWithSession:_captureResource.managedSession.avSession + devicePosition:_captureResource.state.devicePosition]; + } + + [self _setupVideoDataSourceListeners]; +} + +- (void)_setupNewVideoFileDataSource +{ + _captureResource.videoDataSource = + [[SCManagedVideoFileStreamer alloc] initWithPlaybackForURL:_captureResource.fileInputDecider.fileURL]; + [_captureResource.lensProcessingCore setLensesActive:YES + videoOrientation:_captureResource.videoDataSource.videoOrientation + filterFactory:nil]; + runOnMainThreadAsynchronously(^{ + [_captureResource.videoPreviewGLViewManager prepareViewIfNecessary]; + }); + [self _setupVideoDataSourceListeners]; +} + +/** + * Light-weight session fixing approach: recreating AVCaptureSession / AVCaptureOutput, and bind it to the new session + */ +- (void)_setupVideoDataSourceWithNewSession +{ + if (@available(iOS 11.0, *)) { + SCManagedVideoStreamer *streamer = (SCManagedVideoStreamer *)_captureResource.videoDataSource; + [streamer setupWithSession:_captureResource.managedSession.avSession + devicePosition:_captureResource.state.devicePosition]; + [streamer setupWithARSession:_captureResource.arSession]; + } else { + SCManagedVideoStreamer *streamer = (SCManagedVideoStreamer *)_captureResource.videoDataSource; + [streamer setupWithSession:_captureResource.managedSession.avSession + devicePosition:_captureResource.state.devicePosition]; + } + [_captureResource.stillImageCapturer setupWithSession:_captureResource.managedSession.avSession]; +} + +- (void)_setupVideoDataSourceListeners +{ + if (_captureResource.videoFrameSampler) { + [_captureResource.announcer addListener:_captureResource.videoFrameSampler]; + } + + [_captureResource.videoDataSource addSampleBufferDisplayController:_captureResource.sampleBufferDisplayController]; + [_captureResource.videoDataSource addListener:_captureResource.lensProcessingCore.capturerListener]; + [_captureResource.videoDataSource addListener:_captureResource.deviceCapacityAnalyzer]; + if (SCIsMasterBuild()) { + [_captureResource.videoDataSource addListener:_captureResource.videoStreamReporter]; + } + [_captureResource.videoDataSource addListener:_captureResource.videoScanner]; + [_captureResource.videoDataSource addListener:_captureResource.blackCameraDetector.blackCameraNoOutputDetector]; + _captureResource.stillImageCapturer = [SCManagedStillImageCapturer capturerWithCaptureResource:_captureResource]; + [_captureResource.deviceCapacityAnalyzer addListener:_captureResource.stillImageCapturer]; + [_captureResource.videoDataSource addListener:_captureResource.stillImageCapturer]; + + [self addListener:_captureResource.stillImageCapturer]; +} + +- (void)_resetAVCaptureSession +{ + SCTraceODPCompatibleStart(2); + SCAssert([_captureResource.queuePerformer isCurrentPerformer], @""); + _captureResource.numRetriesFixAVCaptureSessionWithCurrentSession = 0; + // lazily initialize _captureResource.kvoController on background thread + if (!_captureResource.kvoController) { + _captureResource.kvoController = [[FBKVOController alloc] initWithObserver:self]; + } + [_captureResource.kvoController unobserve:_captureResource.managedSession.avSession]; + _captureResource.managedSession = + [[SCManagedCaptureSession alloc] initWithBlackCameraDetector:_captureResource.blackCameraDetector]; + [_captureResource.kvoController observe:_captureResource.managedSession.avSession + keyPath:@keypath(_captureResource.managedSession.avSession, running) + options:NSKeyValueObservingOptionNew | NSKeyValueObservingOptionOld + action:_captureResource.handleAVSessionStatusChange]; +} + +- (void)_pauseCaptureSessionKVOCheck +{ + SCTraceODPCompatibleStart(2); + SCAssert([_captureResource.queuePerformer isCurrentPerformer], @""); + [_captureResource.kvoController unobserve:_captureResource.managedSession.avSession]; +} + +- (void)_resumeCaptureSessionKVOCheck +{ + SCTraceODPCompatibleStart(2); + SCAssert([_captureResource.queuePerformer isCurrentPerformer], @""); + [_captureResource.kvoController observe:_captureResource.managedSession.avSession + keyPath:@keypath(_captureResource.managedSession.avSession, running) + options:NSKeyValueObservingOptionNew | NSKeyValueObservingOptionOld + action:_captureResource.handleAVSessionStatusChange]; +} + +- (id)currentVideoDataSource +{ + SCTraceODPCompatibleStart(2); + return _captureResource.videoDataSource; +} + +- (void)checkRestrictedCamera:(void (^)(BOOL, BOOL, AVAuthorizationStatus))callback +{ + SCTraceODPCompatibleStart(2); + [_captureResource.queuePerformer perform:^{ + // Front and back should be available if user has no restriction on camera. + BOOL front = [[SCManagedCaptureDevice front] isAvailable]; + BOOL back = [[SCManagedCaptureDevice back] isAvailable]; + AVAuthorizationStatus status = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo]; + runOnMainThreadAsynchronously(^{ + callback(front, back, status); + }); + }]; +} + +- (SCSnapCreationTriggers *)snapCreationTriggers +{ + return _captureResource.snapCreationTriggers; +} + +- (void)setBlackCameraDetector:(SCBlackCameraDetector *)blackCameraDetector + deviceMotionProvider:(id)deviceMotionProvider + fileInputDecider:(id)fileInputDecider + arImageCaptureProvider:(id)arImageCaptureProvider + glviewManager:(id)glViewManager + lensAPIProvider:(id)lensAPIProvider + lsaComponentTracker:(id)lsaComponentTracker + managedCapturerPreviewLayerControllerDelegate: + (id)previewLayerControllerDelegate +{ + _captureResource.blackCameraDetector = blackCameraDetector; + _captureResource.deviceMotionProvider = deviceMotionProvider; + _captureResource.fileInputDecider = fileInputDecider; + _captureResource.arImageCaptureProvider = arImageCaptureProvider; + _captureResource.videoPreviewGLViewManager = glViewManager; + [_captureResource.videoPreviewGLViewManager configureWithCaptureResource:_captureResource]; + _captureResource.lensAPIProvider = lensAPIProvider; + _captureResource.lsaTrackingComponentHandler = lsaComponentTracker; + [_captureResource.lsaTrackingComponentHandler configureWithCaptureResource:_captureResource]; + _captureResource.previewLayerControllerDelegate = previewLayerControllerDelegate; + [SCManagedCapturePreviewLayerController sharedInstance].delegate = previewLayerControllerDelegate; +} + +@end diff --git a/ManagedCapturer/SCManagedCapturerV1_Private.h b/ManagedCapturer/SCManagedCapturerV1_Private.h new file mode 100644 index 0000000..9826155 --- /dev/null +++ b/ManagedCapturer/SCManagedCapturerV1_Private.h @@ -0,0 +1,20 @@ +// +// SCManagedCapturerV1_Private.h +// Snapchat +// +// Created by Jingtian Yang on 20/12/2017. +// + +#import "SCManagedCapturerV1.h" + +@interface SCManagedCapturerV1 () + +- (SCCaptureResource *)captureResource; + +- (void)setupWithDevicePosition:(SCManagedCaptureDevicePosition)devicePosition + completionHandler:(dispatch_block_t)completionHandler; + +- (BOOL)stopRunningWithCaptureToken:(SCCapturerToken *)token + completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler + context:(NSString *)context; +@end diff --git a/ManagedCapturer/SCManagedDeviceCapacityAnalyzer.h b/ManagedCapturer/SCManagedDeviceCapacityAnalyzer.h new file mode 100644 index 0000000..07aca7f --- /dev/null +++ b/ManagedCapturer/SCManagedDeviceCapacityAnalyzer.h @@ -0,0 +1,32 @@ +// +// SCManagedDeviceCapacityAnalyzer.h +// Snapchat +// +// Created by Liu Liu on 5/1/15. +// Copyright (c) 2015 Liu Liu. All rights reserved. +// + +#import "SCManagedDeviceCapacityAnalyzerListener.h" + +#import + +#import + +@class SCManagedCaptureDevice; +@protocol SCPerforming; + +extern NSInteger const kSCManagedDeviceCapacityAnalyzerMaxISOPresetHigh; + +@interface SCManagedDeviceCapacityAnalyzer : NSObject + +@property (nonatomic, assign) BOOL lowLightConditionEnabled; + +- (instancetype)initWithPerformer:(id)performer; + +- (void)addListener:(id)listener; +- (void)removeListener:(id)listener; + +- (void)setAsFocusListenerForDevice:(SCManagedCaptureDevice *)captureDevice; +- (void)removeFocusListener; + +@end diff --git a/ManagedCapturer/SCManagedDeviceCapacityAnalyzer.m b/ManagedCapturer/SCManagedDeviceCapacityAnalyzer.m new file mode 100644 index 0000000..94f2804 --- /dev/null +++ b/ManagedCapturer/SCManagedDeviceCapacityAnalyzer.m @@ -0,0 +1,294 @@ +// +// SCManagedDeviceCapacityAnalyzer.m +// Snapchat +// +// Created by Liu Liu on 5/1/15. +// Copyright (c) 2015 Liu Liu. All rights reserved. +// + +#import "SCManagedDeviceCapacityAnalyzer.h" + +#import "SCCameraSettingUtils.h" +#import "SCCameraTweaks.h" +#import "SCManagedCaptureDevice+SCManagedDeviceCapacityAnalyzer.h" +#import "SCManagedCaptureDevice.h" +#import "SCManagedDeviceCapacityAnalyzerListenerAnnouncer.h" + +#import +#import +#import +#import +#import + +#import + +@import ImageIO; +@import QuartzCore; + +NSInteger const kSCManagedDeviceCapacityAnalyzerMaxISOPresetHighFor6WithHRSI = 500; + +NSInteger const kSCManagedDeviceCapacityAnalyzerMaxISOPresetHighFor6S = 800; + +NSInteger const kSCManagedDeviceCapacityAnalyzerMaxISOPresetHighFor7 = 640; + +NSInteger const kSCManagedDeviceCapacityAnalyzerMaxISOPresetHighFor8 = 800; + +// After this much frames we haven't changed exposure time or ISO, we will assume that the adjustingExposure is ended. +static NSInteger const kExposureUnchangedHighWatermark = 5; +// If deadline reached, and we still haven't reached high watermark yet, we will consult the low watermark and at least +// give the system a chance to take not-so-great pictures. +static NSInteger const kExposureUnchangedLowWatermark = 1; +static NSTimeInterval const kExposureUnchangedDeadline = 0.2; + +// It seems that between ISO 500 to 640, the brightness value is always somewhere around -0.4 to -0.5. +// Therefore, this threshold probably will work fine. +static float const kBrightnessValueThreshold = -2.25; +// Give some margins between recognized as bright enough and not enough light. +// If the brightness is lower than kBrightnessValueThreshold - kBrightnessValueThresholdConfidenceInterval, +// and then we count the frame as low light frame. Only if the brightness is higher than +// kBrightnessValueThreshold + kBrightnessValueThresholdConfidenceInterval, we think that we +// have enough light, and reset low light frame count to 0. 0.5 is choosing because in dark +// environment, the brightness value changes +-0.3 with minor orientation changes. +static float const kBrightnessValueThresholdConfidenceInterval = 0.5; +// If we are at good light condition for 5 frames, ready to change back +static NSInteger const kLowLightBoostUnchangedLowWatermark = 7; +// Requires we are at low light condition for ~2 seconds (assuming 20~30fps) +static NSInteger const kLowLightBoostUnchangedHighWatermark = 25; + +static NSInteger const kSCLightingConditionDecisionWatermark = 15; // For 30 fps, it is 0.5 second +static float const kSCLightingConditionNormalThreshold = 0; +static float const kSCLightingConditionDarkThreshold = -3; + +@implementation SCManagedDeviceCapacityAnalyzer { + float _lastExposureTime; + int _lastISOSpeedRating; + NSTimeInterval _lastAdjustingExposureStartTime; + + NSInteger _lowLightBoostLowLightCount; + NSInteger _lowLightBoostEnoughLightCount; + NSInteger _exposureUnchangedCount; + NSInteger _maxISOPresetHigh; + + NSInteger _normalLightingConditionCount; + NSInteger _darkLightingConditionCount; + NSInteger _extremeDarkLightingConditionCount; + SCCapturerLightingConditionType _lightingCondition; + + BOOL _lowLightCondition; + BOOL _adjustingExposure; + + SCManagedDeviceCapacityAnalyzerListenerAnnouncer *_announcer; + FBKVOController *_observeController; + id _performer; + + float + _lastBrightnessToLog; // Remember last logged brightness, only log again if it changes greater than a threshold +} + +- (instancetype)initWithPerformer:(id)performer +{ + SCTraceStart(); + self = [super init]; + if (self) { + _performer = performer; + _maxISOPresetHigh = kSCManagedDeviceCapacityAnalyzerMaxISOPresetHighFor6WithHRSI; + if ([SCDeviceName isIphone] && [SCDeviceName isSimilarToIphone8orNewer]) { + _maxISOPresetHigh = kSCManagedDeviceCapacityAnalyzerMaxISOPresetHighFor8; + } else if ([SCDeviceName isIphone] && [SCDeviceName isSimilarToIphone7orNewer]) { + _maxISOPresetHigh = kSCManagedDeviceCapacityAnalyzerMaxISOPresetHighFor7; + } else if ([SCDeviceName isIphone] && [SCDeviceName isSimilarToIphone6SorNewer]) { + // iPhone 6S supports higher ISO rate for video recording, accommadating that. + _maxISOPresetHigh = kSCManagedDeviceCapacityAnalyzerMaxISOPresetHighFor6S; + } + _announcer = [[SCManagedDeviceCapacityAnalyzerListenerAnnouncer alloc] init]; + _observeController = [[FBKVOController alloc] initWithObserver:self]; + } + return self; +} + +- (void)addListener:(id)listener +{ + SCTraceStart(); + [_announcer addListener:listener]; +} + +- (void)removeListener:(id)listener +{ + SCTraceStart(); + [_announcer removeListener:listener]; +} + +- (void)setLowLightConditionEnabled:(BOOL)lowLightConditionEnabled +{ + SCTraceStart(); + if (_lowLightConditionEnabled != lowLightConditionEnabled) { + _lowLightConditionEnabled = lowLightConditionEnabled; + if (!lowLightConditionEnabled) { + _lowLightBoostLowLightCount = 0; + _lowLightBoostEnoughLightCount = 0; + _lowLightCondition = NO; + [_announcer managedDeviceCapacityAnalyzer:self didChangeLowLightCondition:_lowLightCondition]; + } + } +} + +- (void)managedVideoDataSource:(id)managedVideoDataSource + didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer + devicePosition:(SCManagedCaptureDevicePosition)devicePosition +{ + SCTraceStart(); + SampleBufferMetadata metadata = { + .isoSpeedRating = _lastISOSpeedRating, .brightness = 0, .exposureTime = _lastExposureTime, + }; + retrieveSampleBufferMetadata(sampleBuffer, &metadata); + if ((SCIsDebugBuild() || SCIsMasterBuild()) + // Enable this on internal build only (excluding alpha) + && fabs(metadata.brightness - _lastBrightnessToLog) > 0.5f) { + // Log only when brightness change is greater than 0.5 + _lastBrightnessToLog = metadata.brightness; + SCLogCoreCameraInfo(@"ExposureTime: %f, ISO: %ld, Brightness: %f", metadata.exposureTime, + (long)metadata.isoSpeedRating, metadata.brightness); + } + [self _automaticallyDetectAdjustingExposure:metadata.exposureTime ISOSpeedRating:metadata.isoSpeedRating]; + _lastExposureTime = metadata.exposureTime; + _lastISOSpeedRating = metadata.isoSpeedRating; + if (!_adjustingExposure && _lastISOSpeedRating <= _maxISOPresetHigh && + _lowLightConditionEnabled) { // If we are not recording, we are not at ISO higher than we needed + [self _automaticallyDetectLowLightCondition:metadata.brightness]; + } + [self _automaticallyDetectLightingConditionWithBrightness:metadata.brightness]; + [_announcer managedDeviceCapacityAnalyzer:self didChangeBrightness:metadata.brightness]; +} + +- (void)setAsFocusListenerForDevice:(SCManagedCaptureDevice *)captureDevice +{ + SCTraceStart(); + [_observeController observe:captureDevice.device + keyPath:@keypath(captureDevice.device, adjustingFocus) + options:NSKeyValueObservingOptionNew + action:@selector(_adjustingFocusingChanged:)]; +} + +- (void)removeFocusListener +{ + SCTraceStart(); + [_observeController unobserveAll]; +} + +#pragma mark - Private methods + +- (void)_automaticallyDetectAdjustingExposure:(float)currentExposureTime ISOSpeedRating:(NSInteger)currentISOSpeedRating +{ + SCTraceStart(); + if (currentISOSpeedRating != _lastISOSpeedRating || fabsf(currentExposureTime - _lastExposureTime) > FLT_MIN) { + _exposureUnchangedCount = 0; + } else { + ++_exposureUnchangedCount; + } + NSTimeInterval currentTime = CACurrentMediaTime(); + if (_exposureUnchangedCount >= kExposureUnchangedHighWatermark || + (currentTime - _lastAdjustingExposureStartTime > kExposureUnchangedDeadline && + _exposureUnchangedCount >= kExposureUnchangedLowWatermark)) { + // The exposure values haven't changed for kExposureUnchangedHighWatermark times, considering the adjustment + // as done. Otherwise, if we waited long enough, and the exposure unchange count at least reached low + // watermark, we will call it done and give it a shot. + if (_adjustingExposure) { + _adjustingExposure = NO; + SCLogGeneralInfo(@"Adjusting exposure is done, unchanged count: %zd", _exposureUnchangedCount); + [_announcer managedDeviceCapacityAnalyzer:self didChangeAdjustingExposure:_adjustingExposure]; + } + } else { + // Otherwise signal that we have adjustments on exposure + if (!_adjustingExposure) { + _adjustingExposure = YES; + _lastAdjustingExposureStartTime = currentTime; + [_announcer managedDeviceCapacityAnalyzer:self didChangeAdjustingExposure:_adjustingExposure]; + } + } +} + +- (void)_automaticallyDetectLowLightCondition:(float)brightness +{ + SCTraceStart(); + if (!_lowLightCondition && _lastISOSpeedRating == _maxISOPresetHigh) { + // If we are at the stage that we need to use higher ISO (because current ISO is maxed out) + // and the brightness is lower than the threshold + if (brightness < kBrightnessValueThreshold - kBrightnessValueThresholdConfidenceInterval) { + // Either count how many frames like this continuously we encountered + // Or if reached the watermark, change the low light boost mode + if (_lowLightBoostLowLightCount >= kLowLightBoostUnchangedHighWatermark) { + _lowLightCondition = YES; + [_announcer managedDeviceCapacityAnalyzer:self didChangeLowLightCondition:_lowLightCondition]; + } else { + ++_lowLightBoostLowLightCount; + } + } else if (brightness >= kBrightnessValueThreshold + kBrightnessValueThresholdConfidenceInterval) { + // If the brightness is consistently better, reset the low light boost unchanged count to 0 + _lowLightBoostLowLightCount = 0; + } + } else if (_lowLightCondition) { + // Check the current ISO to see if we can disable low light boost + if (_lastISOSpeedRating <= _maxISOPresetHigh && + brightness >= kBrightnessValueThreshold + kBrightnessValueThresholdConfidenceInterval) { + if (_lowLightBoostEnoughLightCount >= kLowLightBoostUnchangedLowWatermark) { + _lowLightCondition = NO; + [_announcer managedDeviceCapacityAnalyzer:self didChangeLowLightCondition:_lowLightCondition]; + _lowLightBoostEnoughLightCount = 0; + } else { + ++_lowLightBoostEnoughLightCount; + } + } + } +} + +- (void)_adjustingFocusingChanged:(NSDictionary *)change +{ + SCTraceStart(); + BOOL adjustingFocus = [change[NSKeyValueChangeNewKey] boolValue]; + [_performer perform:^{ + [_announcer managedDeviceCapacityAnalyzer:self didChangeAdjustingFocus:adjustingFocus]; + }]; +} + +- (void)_automaticallyDetectLightingConditionWithBrightness:(float)brightness +{ + if (brightness >= kSCLightingConditionNormalThreshold) { + if (_normalLightingConditionCount > kSCLightingConditionDecisionWatermark) { + if (_lightingCondition != SCCapturerLightingConditionTypeNormal) { + _lightingCondition = SCCapturerLightingConditionTypeNormal; + [_announcer managedDeviceCapacityAnalyzer:self + didChangeLightingCondition:SCCapturerLightingConditionTypeNormal]; + } + } else { + _normalLightingConditionCount++; + } + _darkLightingConditionCount = 0; + _extremeDarkLightingConditionCount = 0; + } else if (brightness >= kSCLightingConditionDarkThreshold) { + if (_darkLightingConditionCount > kSCLightingConditionDecisionWatermark) { + if (_lightingCondition != SCCapturerLightingConditionTypeDark) { + _lightingCondition = SCCapturerLightingConditionTypeDark; + [_announcer managedDeviceCapacityAnalyzer:self + didChangeLightingCondition:SCCapturerLightingConditionTypeDark]; + } + } else { + _darkLightingConditionCount++; + } + _normalLightingConditionCount = 0; + _extremeDarkLightingConditionCount = 0; + } else { + if (_extremeDarkLightingConditionCount > kSCLightingConditionDecisionWatermark) { + if (_lightingCondition != SCCapturerLightingConditionTypeExtremeDark) { + _lightingCondition = SCCapturerLightingConditionTypeExtremeDark; + [_announcer managedDeviceCapacityAnalyzer:self + didChangeLightingCondition:SCCapturerLightingConditionTypeExtremeDark]; + } + } else { + _extremeDarkLightingConditionCount++; + } + _normalLightingConditionCount = 0; + _darkLightingConditionCount = 0; + } +} + +@end diff --git a/ManagedCapturer/SCManagedDeviceCapacityAnalyzerHandler.h b/ManagedCapturer/SCManagedDeviceCapacityAnalyzerHandler.h new file mode 100644 index 0000000..537f517 --- /dev/null +++ b/ManagedCapturer/SCManagedDeviceCapacityAnalyzerHandler.h @@ -0,0 +1,20 @@ +// +// SCManagedDeviceCapacityAnalyzerHandler.h +// Snapchat +// +// Created by Jingtian Yang on 11/12/2017. +// + +#import "SCManagedDeviceCapacityAnalyzerListener.h" + +#import + +@class SCCaptureResource; + +@interface SCManagedDeviceCapacityAnalyzerHandler : NSObject + +- (instancetype)init NS_UNAVAILABLE; + +- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource; + +@end diff --git a/ManagedCapturer/SCManagedDeviceCapacityAnalyzerHandler.m b/ManagedCapturer/SCManagedDeviceCapacityAnalyzerHandler.m new file mode 100644 index 0000000..d0f660c --- /dev/null +++ b/ManagedCapturer/SCManagedDeviceCapacityAnalyzerHandler.m @@ -0,0 +1,72 @@ +// +// SCManagedDeviceCapacityAnalyzerHandler.m +// Snapchat +// +// Created by Jingtian Yang on 11/12/2017. +// + +#import "SCManagedDeviceCapacityAnalyzerHandler.h" + +#import "SCCaptureResource.h" +#import "SCManagedCapturer.h" +#import "SCManagedCapturerLogging.h" +#import "SCManagedCapturerState.h" +#import "SCManagedCapturerStateBuilder.h" + +#import +#import +#import +#import + +@interface SCManagedDeviceCapacityAnalyzerHandler () { + __weak SCCaptureResource *_captureResource; +} +@end + +@implementation SCManagedDeviceCapacityAnalyzerHandler + +- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource +{ + self = [super init]; + if (self) { + SCAssert(captureResource, @""); + _captureResource = captureResource; + } + return self; +} + +- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer + didChangeLowLightCondition:(BOOL)lowLightCondition +{ + SCTraceODPCompatibleStart(2); + SCLogCapturerInfo(@"Change Low Light Condition %d", lowLightCondition); + [_captureResource.queuePerformer perform:^{ + _captureResource.state = [[[SCManagedCapturerStateBuilder withManagedCapturerState:_captureResource.state] + setLowLightCondition:lowLightCondition] build]; + SCManagedCapturerState *state = [_captureResource.state copy]; + runOnMainThreadAsynchronously(^{ + [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didChangeState:state]; + [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] + didChangeLowLightCondition:state]; + }); + }]; +} + +- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer + didChangeAdjustingExposure:(BOOL)adjustingExposure +{ + SCTraceODPCompatibleStart(2); + SCLogCapturerInfo(@"Capacity Analyzer Changes adjustExposure %d", adjustingExposure); + [_captureResource.queuePerformer perform:^{ + _captureResource.state = [[[SCManagedCapturerStateBuilder withManagedCapturerState:_captureResource.state] + setAdjustingExposure:adjustingExposure] build]; + SCManagedCapturerState *state = [_captureResource.state copy]; + runOnMainThreadAsynchronously(^{ + [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didChangeState:state]; + [_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] + didChangeAdjustingExposure:state]; + }); + }]; +} + +@end diff --git a/ManagedCapturer/SCManagedDeviceCapacityAnalyzerListener.h b/ManagedCapturer/SCManagedDeviceCapacityAnalyzerListener.h new file mode 100644 index 0000000..21f9beb --- /dev/null +++ b/ManagedCapturer/SCManagedDeviceCapacityAnalyzerListener.h @@ -0,0 +1,35 @@ +//#!announcer.rb +// SCManagedDeviceCapacityAnalyzerListener.h +// Snapchat +// +// Created by Liu Liu on 5/4/15. +// Copyright (c) 2015 Liu Liu. All rights reserved. +// + +#import "SCCapturerDefines.h" + +#import + +@class SCManagedDeviceCapacityAnalyzer; + +@protocol SCManagedDeviceCapacityAnalyzerListener + +@optional + +// These callbacks happen on a internal queue +- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer + didChangeLowLightCondition:(BOOL)lowLightCondition; + +- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer + didChangeAdjustingExposure:(BOOL)adjustingExposure; + +- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer + didChangeAdjustingFocus:(BOOL)adjustingFocus; + +- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer + didChangeBrightness:(float)adjustingBrightness; + +- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer + didChangeLightingCondition:(SCCapturerLightingConditionType)lightingCondition; + +@end diff --git a/ManagedCapturer/SCManagedDeviceCapacityAnalyzerListenerAnnouncer.h b/ManagedCapturer/SCManagedDeviceCapacityAnalyzerListenerAnnouncer.h new file mode 100644 index 0000000..933e0fa --- /dev/null +++ b/ManagedCapturer/SCManagedDeviceCapacityAnalyzerListenerAnnouncer.h @@ -0,0 +1,12 @@ +// Generated by the announcer.rb DO NOT EDIT!! + +#import "SCManagedDeviceCapacityAnalyzerListener.h" + +#import + +@interface SCManagedDeviceCapacityAnalyzerListenerAnnouncer : NSObject + +- (void)addListener:(id)listener; +- (void)removeListener:(id)listener; + +@end diff --git a/ManagedCapturer/SCManagedDeviceCapacityAnalyzerListenerAnnouncer.mm b/ManagedCapturer/SCManagedDeviceCapacityAnalyzerListenerAnnouncer.mm new file mode 100644 index 0000000..85581ff --- /dev/null +++ b/ManagedCapturer/SCManagedDeviceCapacityAnalyzerListenerAnnouncer.mm @@ -0,0 +1,146 @@ +// Generated by the announcer.rb DO NOT EDIT!! + +#import "SCManagedDeviceCapacityAnalyzerListenerAnnouncer.h" + +#include +using std::lock_guard; +using std::mutex; +#include +using std::find; +using std::make_shared; +using std::shared_ptr; +using std::vector; + +@implementation SCManagedDeviceCapacityAnalyzerListenerAnnouncer { + mutex _mutex; + shared_ptr>> _listeners; +} + +- (NSString *)description +{ + auto listeners = atomic_load(&self->_listeners); + NSMutableString *desc = [NSMutableString string]; + [desc appendFormat:@": [", self]; + for (int i = 0; i < listeners->size(); ++i) { + [desc appendFormat:@"%@", (*listeners)[i]]; + if (i != listeners->size() - 1) { + [desc appendString:@", "]; + } + } + [desc appendString:@"]"]; + return desc; +} + +- (void)addListener:(id)listener +{ + lock_guard lock(_mutex); + auto listeners = make_shared>>(); + if (_listeners != nil) { + // The listener we want to add already exists + if (find(_listeners->begin(), _listeners->end(), listener) != _listeners->end()) { + return; + } + for (auto &one : *_listeners) { + if (one != nil) { + listeners->push_back(one); + } + } + listeners->push_back(listener); + atomic_store(&self->_listeners, listeners); + } else { + listeners->push_back(listener); + atomic_store(&self->_listeners, listeners); + } +} + +- (void)removeListener:(id)listener +{ + lock_guard lock(_mutex); + if (_listeners == nil) { + return; + } + // If the only item in the listener list is the one we want to remove, store it back to nil again + if (_listeners->size() == 1 && (*_listeners)[0] == listener) { + atomic_store(&self->_listeners, shared_ptr>>()); + return; + } + auto listeners = make_shared>>(); + for (auto &one : *_listeners) { + if (one != nil && one != listener) { + listeners->push_back(one); + } + } + atomic_store(&self->_listeners, listeners); +} + +- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer + didChangeLowLightCondition:(BOOL)lowLightCondition +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedDeviceCapacityAnalyzer:didChangeLowLightCondition:)]) { + [listener managedDeviceCapacityAnalyzer:managedDeviceCapacityAnalyzer + didChangeLowLightCondition:lowLightCondition]; + } + } + } +} + +- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer + didChangeAdjustingExposure:(BOOL)adjustingExposure +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedDeviceCapacityAnalyzer:didChangeAdjustingExposure:)]) { + [listener managedDeviceCapacityAnalyzer:managedDeviceCapacityAnalyzer + didChangeAdjustingExposure:adjustingExposure]; + } + } + } +} + +- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer + didChangeAdjustingFocus:(BOOL)adjustingFocus +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedDeviceCapacityAnalyzer:didChangeAdjustingFocus:)]) { + [listener managedDeviceCapacityAnalyzer:managedDeviceCapacityAnalyzer + didChangeAdjustingFocus:adjustingFocus]; + } + } + } +} + +- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer + didChangeBrightness:(float)adjustingBrightness +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedDeviceCapacityAnalyzer:didChangeBrightness:)]) { + [listener managedDeviceCapacityAnalyzer:managedDeviceCapacityAnalyzer + didChangeBrightness:adjustingBrightness]; + } + } + } +} + +- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer + didChangeLightingCondition:(SCCapturerLightingConditionType)lightingCondition +{ + auto listeners = atomic_load(&self->_listeners); + if (listeners) { + for (id listener : *listeners) { + if ([listener respondsToSelector:@selector(managedDeviceCapacityAnalyzer:didChangeLightingCondition:)]) { + [listener managedDeviceCapacityAnalyzer:managedDeviceCapacityAnalyzer + didChangeLightingCondition:lightingCondition]; + } + } + } +} + +@end diff --git a/ManagedCapturer/SCManagedDroppedFramesReporter.h b/ManagedCapturer/SCManagedDroppedFramesReporter.h new file mode 100644 index 0000000..e7404fb --- /dev/null +++ b/ManagedCapturer/SCManagedDroppedFramesReporter.h @@ -0,0 +1,25 @@ +// +// SCManagedDroppedFramesReporter.h +// Snapchat +// +// Created by Michel Loenngren on 3/21/17. +// Copyright © 2017 Snapchat, Inc. All rights reserved. +// + +#import "SCManagedCapturerListener.h" + +#import + +#import + +/* + Conforms to SCManagedVideoDataSourceListener and records frame rate statistics + during recording. + */ +@interface SCManagedDroppedFramesReporter : NSObject + +- (void)reportWithKeepLateFrames:(BOOL)keepLateFrames lensesApplied:(BOOL)lensesApplied; + +- (void)didChangeCaptureDevicePosition; + +@end diff --git a/ManagedCapturer/SCManagedDroppedFramesReporter.m b/ManagedCapturer/SCManagedDroppedFramesReporter.m new file mode 100644 index 0000000..b64a679 --- /dev/null +++ b/ManagedCapturer/SCManagedDroppedFramesReporter.m @@ -0,0 +1,86 @@ +// +// SCManagedDroppedFramesReporter.m +// Snapchat +// +// Created by Michel Loenngren on 3/21/17. +// Copyright © 2017 Snapchat, Inc. All rights reserved. +// + +#import "SCManagedDroppedFramesReporter.h" + +#import "SCCameraTweaks.h" +#import "SCManagedCapturerState.h" + +#import +#import +#import +#import +#import +#import + +CGFloat const kSCCaptureTargetFramerate = 30; + +@interface SCManagedDroppedFramesReporter () + +@property (nonatomic) SCVideoFrameDropCounter *frameDropCounter; + +@end + +@implementation SCManagedDroppedFramesReporter { + SCVideoFrameDropCounter *_frameDropCounter; + NSUInteger _droppedFrames; +} + +- (SCVideoFrameDropCounter *)frameDropCounter +{ + if (_frameDropCounter == nil) { + _frameDropCounter = [[SCVideoFrameDropCounter alloc] initWithTargetFramerate:kSCCaptureTargetFramerate]; + _droppedFrames = 0; + } + return _frameDropCounter; +} + +- (void)reportWithKeepLateFrames:(BOOL)keepLateFrames lensesApplied:(BOOL)lensesApplied +{ + if (_frameDropCounter == nil) { + return; + } + + NSMutableDictionary *eventDict = [_frameDropCounter.toDict mutableCopy]; + eventDict[@"total_frame_drop_measured"] = @(_droppedFrames); + eventDict[@"keep_late_frames"] = @(keepLateFrames); + // if user select none of the lenses when activing the lenses scroll view, we still enable keepLateFrames + eventDict[@"lenses_applied"] = @(lensesApplied); + + [[SCLogger sharedInstance] logEvent:kSCCameraMetricsFramesDroppedDuringRecording parameters:eventDict]; + + // Reset + _frameDropCounter = nil; + _droppedFrames = 0; +} + +- (void)didChangeCaptureDevicePosition +{ + [_frameDropCounter didChangeCaptureDevicePosition]; +} + +#pragma mark - SCManagedVideoDataSourceListener + +- (void)managedVideoDataSource:(id)managedVideoDataSource + didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer + devicePosition:(SCManagedCaptureDevicePosition)devicePosition +{ + [self.frameDropCounter processFrameTime:CMSampleBufferGetPresentationTimeStamp(sampleBuffer)]; +} + +- (void)managedVideoDataSource:(id)managedVideoDataSource + didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer + devicePosition:(SCManagedCaptureDevicePosition)devicePosition +{ + _droppedFrames += 1; + NSDictionary *backgroundTaskScreenshot = SCBackgrounTaskScreenshotReport(); + SCLogCoreCameraInfo(@"[SCManagedDroppedFramesReporter] frame dropped, background tasks: %@", + backgroundTaskScreenshot); +} + +@end diff --git a/ManagedCapturer/SCManagedFrameHealthChecker.h b/ManagedCapturer/SCManagedFrameHealthChecker.h new file mode 100644 index 0000000..6b783b1 --- /dev/null +++ b/ManagedCapturer/SCManagedFrameHealthChecker.h @@ -0,0 +1,57 @@ +// +// SCManagedFrameHealthChecker.h +// Snapchat +// +// Created by Pinlin Chen on 30/08/2017. +// + +#import +#import + +#import +#import + +@interface SCManagedFrameHealthChecker : NSObject + ++ (SCManagedFrameHealthChecker *)sharedInstance; +/*! @abstract Use sharedInstance instead. */ +SC_INIT_AND_NEW_UNAVAILABLE; + +/* Utility method */ +- (NSMutableDictionary *)metadataForSampleBuffer:(CMSampleBufferRef)sampleBuffer extraInfo:(NSDictionary *)extraInfo; +- (NSMutableDictionary *)metadataForSampleBuffer:(CMSampleBufferRef)sampleBuffer + photoCapturerEnabled:(BOOL)photoCapturerEnabled + lensEnabled:(BOOL)lensesEnabled + lensID:(NSString *)lensID; +- (NSMutableDictionary *)metadataForMetadata:(NSDictionary *)metadata + photoCapturerEnabled:(BOOL)photoCapturerEnabled + lensEnabled:(BOOL)lensesEnabled + lensID:(NSString *)lensID; +- (NSMutableDictionary *)getPropertiesFromAsset:(AVAsset *)asset; + +/* Image snap */ +- (void)checkImageHealthForCaptureFrameImage:(UIImage *)image + captureSettings:(NSDictionary *)captureSettings + captureSessionID:(NSString *)captureSessionID; +- (void)checkImageHealthForPreTranscoding:(UIImage *)image + metadata:(NSDictionary *)metadata + captureSessionID:(NSString *)captureSessionID; +- (void)checkImageHealthForPostTranscoding:(NSData *)imageData + metadata:(NSDictionary *)metadata + captureSessionID:(NSString *)captureSessionID; + +/* Video snap */ +- (void)checkVideoHealthForCaptureFrameImage:(UIImage *)image + metedata:(NSDictionary *)metadata + captureSessionID:(NSString *)captureSessionID; +- (void)checkVideoHealthForOverlayImage:(UIImage *)image + metedata:(NSDictionary *)metadata + captureSessionID:(NSString *)captureSessionID; +- (void)checkVideoHealthForPostTranscodingThumbnail:(UIImage *)image + metedata:(NSDictionary *)metadata + properties:(NSDictionary *)properties + captureSessionID:(NSString *)captureSessionID; + +- (void)reportFrameHealthCheckForCaptureSessionID:(NSString *)captureSessionID; + +@end diff --git a/ManagedCapturer/SCManagedFrameHealthChecker.m b/ManagedCapturer/SCManagedFrameHealthChecker.m new file mode 100644 index 0000000..b261a0d --- /dev/null +++ b/ManagedCapturer/SCManagedFrameHealthChecker.m @@ -0,0 +1,709 @@ +// +// SCManagedFrameHealthChecker.m +// Snapchat +// +// Created by Pinlin Chen on 30/08/2017. +// + +#import "SCManagedFrameHealthChecker.h" + +#import "SCCameraSettingUtils.h" +#import "SCCameraTweaks.h" + +#import +#import +#import +#import +#import +#import +#import +#import +#import + +#import +@import Accelerate; + +static const char *kSCManagedFrameHealthCheckerQueueLabel = "com.snapchat.frame_health_checker"; +static const int kSCManagedFrameHealthCheckerMaxSamples = 2304; +static const float kSCManagedFrameHealthCheckerPossibleBlackThreshold = 20.0; +static const float kSCManagedFrameHealthCheckerScaledImageMaxEdgeLength = 300.0; +static const float kSCManagedFrameHealthCheckerScaledImageScale = 1.0; +// assume we could process at most of 2 RGBA images which are 2304*4096 RGBA image +static const double kSCManagedFrameHealthCheckerMinFreeMemMB = 72.0; + +typedef NS_ENUM(NSUInteger, SCManagedFrameHealthCheckType) { + SCManagedFrameHealthCheck_ImageCapture = 0, + SCManagedFrameHealthCheck_ImagePreTranscoding, + SCManagedFrameHealthCheck_ImagePostTranscoding, + SCManagedFrameHealthCheck_VideoCapture, + SCManagedFrameHealthCheck_VideoOverlayImage, + SCManagedFrameHealthCheck_VideoPostTranscoding, +}; + +typedef NS_ENUM(NSUInteger, SCManagedFrameHealthCheckErrorType) { + SCManagedFrameHealthCheckError_None = 0, + SCManagedFrameHealthCheckError_Invalid_Bitmap, + SCManagedFrameHealthCheckError_Frame_Possibly_Black, + SCManagedFrameHealthCheckError_Frame_Totally_Black, + SCManagedFrameHealthCheckError_Execution_Error, +}; + +typedef struct { + float R; + float G; + float B; + float A; +} FloatRGBA; + +@class SCManagedFrameHealthCheckerTask; +typedef NSMutableDictionary * (^sc_managed_frame_checker_block)(SCManagedFrameHealthCheckerTask *task); + +float vDspColorElementSum(const Byte *data, NSInteger stripLength, NSInteger bufferLength) +{ + float sum = 0; + float colorArray[bufferLength]; + // Convert to float for DSP registerator + vDSP_vfltu8(data, stripLength, colorArray, 1, bufferLength); + // Calculate sum of color element + vDSP_sve(colorArray, 1, &sum, bufferLength); + return sum; +} + +@interface SCManagedFrameHealthCheckerTask : NSObject + +@property (nonatomic, assign) SCManagedFrameHealthCheckType type; +@property (nonatomic, strong) id targetObject; +@property (nonatomic, assign) CGSize sourceImageSize; +@property (nonatomic, strong) UIImage *unifiedImage; +@property (nonatomic, strong) NSDictionary *metadata; +@property (nonatomic, strong) NSDictionary *videoProperties; +@property (nonatomic, assign) SCManagedFrameHealthCheckErrorType errorType; + ++ (SCManagedFrameHealthCheckerTask *)taskWithType:(SCManagedFrameHealthCheckType)type + targetObject:(id)targetObject + metadata:(NSDictionary *)metadata + videoProperties:(NSDictionary *)videoProperties; + ++ (SCManagedFrameHealthCheckerTask *)taskWithType:(SCManagedFrameHealthCheckType)type + targetObject:(id)targetObject + metadata:(NSDictionary *)metadata; + +@end + +@implementation SCManagedFrameHealthCheckerTask + ++ (SCManagedFrameHealthCheckerTask *)taskWithType:(SCManagedFrameHealthCheckType)type + targetObject:(id)targetObject + metadata:(NSDictionary *)metadata +{ + return [self taskWithType:type targetObject:targetObject metadata:metadata videoProperties:nil]; +} + ++ (SCManagedFrameHealthCheckerTask *)taskWithType:(SCManagedFrameHealthCheckType)type + targetObject:(id)targetObject + metadata:(NSDictionary *)metadata + videoProperties:(NSDictionary *)videoProperties +{ + SCManagedFrameHealthCheckerTask *task = [[SCManagedFrameHealthCheckerTask alloc] init]; + task.type = type; + task.targetObject = targetObject; + task.metadata = metadata; + task.videoProperties = videoProperties; + return task; +} + +- (NSString *)textForSnapType +{ + switch (self.type) { + case SCManagedFrameHealthCheck_ImageCapture: + case SCManagedFrameHealthCheck_ImagePreTranscoding: + case SCManagedFrameHealthCheck_ImagePostTranscoding: + return @"IMAGE"; + case SCManagedFrameHealthCheck_VideoCapture: + case SCManagedFrameHealthCheck_VideoOverlayImage: + case SCManagedFrameHealthCheck_VideoPostTranscoding: + return @"VIDEO"; + } +} + +- (NSString *)textForSource +{ + switch (self.type) { + case SCManagedFrameHealthCheck_ImageCapture: + return @"CAPTURE"; + case SCManagedFrameHealthCheck_ImagePreTranscoding: + return @"PRE_TRANSCODING"; + case SCManagedFrameHealthCheck_ImagePostTranscoding: + return @"POST_TRANSCODING"; + case SCManagedFrameHealthCheck_VideoCapture: + return @"CAPTURE"; + case SCManagedFrameHealthCheck_VideoOverlayImage: + return @"OVERLAY_IMAGE"; + case SCManagedFrameHealthCheck_VideoPostTranscoding: + return @"POST_TRANSCODING"; + } +} + +- (NSString *)textForErrorType +{ + switch (self.errorType) { + case SCManagedFrameHealthCheckError_None: + return nil; + case SCManagedFrameHealthCheckError_Invalid_Bitmap: + return @"Invalid_Bitmap"; + case SCManagedFrameHealthCheckError_Frame_Possibly_Black: + return @"Frame_Possibly_Black"; + case SCManagedFrameHealthCheckError_Frame_Totally_Black: + return @"Frame_Totally_Black"; + case SCManagedFrameHealthCheckError_Execution_Error: + return @"Execution_Error"; + } +} + +@end + +@interface SCManagedFrameHealthChecker () { + id _performer; + // Dictionary structure + // Key - NSString, captureSessionID + // Value - NSMutableArray + NSMutableDictionary *_frameCheckTasks; +} + +@end + +@implementation SCManagedFrameHealthChecker + ++ (SCManagedFrameHealthChecker *)sharedInstance +{ + SCTraceODPCompatibleStart(2); + static SCManagedFrameHealthChecker *checker; + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^{ + checker = [[SCManagedFrameHealthChecker alloc] _init]; + }); + return checker; +} + +- (instancetype)_init +{ + SCTraceODPCompatibleStart(2); + if (self = [super init]) { + // Use the lowest QoS level + _performer = [[SCQueuePerformer alloc] initWithLabel:kSCManagedFrameHealthCheckerQueueLabel + qualityOfService:QOS_CLASS_UTILITY + queueType:DISPATCH_QUEUE_SERIAL + context:SCQueuePerformerContextCamera]; + _frameCheckTasks = [NSMutableDictionary dictionary]; + } + return self; +} + +- (NSMutableDictionary *)metadataForSampleBuffer:(CMSampleBufferRef)sampleBuffer +{ + SCTraceODPCompatibleStart(2); + // add exposure, ISO, brightness + NSMutableDictionary *metadata = [NSMutableDictionary dictionary]; + if (!sampleBuffer || !CMSampleBufferDataIsReady(sampleBuffer)) { + return metadata; + } + CFDictionaryRef exifAttachments = + (CFDictionaryRef)CMGetAttachment(sampleBuffer, kCGImagePropertyExifDictionary, NULL); + NSNumber *exposureTimeNum = retrieveExposureTimeFromEXIFAttachments(exifAttachments); + if (exposureTimeNum) { + metadata[@"exposure"] = exposureTimeNum; + } + NSNumber *isoSpeedRatingNum = retrieveISOSpeedRatingFromEXIFAttachments(exifAttachments); + if (isoSpeedRatingNum) { + metadata[@"iso"] = isoSpeedRatingNum; + } + NSNumber *brightnessNum = retrieveBrightnessFromEXIFAttachments(exifAttachments); + if (brightnessNum) { + float brightness = [brightnessNum floatValue]; + metadata[@"brightness"] = isfinite(brightness) ? @(brightness) : @(0); + } + + return metadata; +} + +- (NSMutableDictionary *)metadataForMetadata:(NSDictionary *)metadata +{ + SCTraceODPCompatibleStart(2); + // add exposure, ISO, brightness + NSMutableDictionary *newMetadata = [NSMutableDictionary dictionary]; + CFDictionaryRef exifAttachments = (__bridge CFDictionaryRef)metadata; + NSNumber *exposureTimeNum = retrieveExposureTimeFromEXIFAttachments(exifAttachments); + if (exposureTimeNum) { + newMetadata[@"exposure"] = exposureTimeNum; + } + NSNumber *isoSpeedRatingNum = retrieveISOSpeedRatingFromEXIFAttachments(exifAttachments); + if (isoSpeedRatingNum) { + newMetadata[@"iso"] = isoSpeedRatingNum; + } + NSNumber *brightnessNum = retrieveBrightnessFromEXIFAttachments(exifAttachments); + if (brightnessNum) { + float brightness = [brightnessNum floatValue]; + newMetadata[@"brightness"] = isfinite(brightness) ? @(brightness) : @(0); + } + + return newMetadata; +} + +- (NSMutableDictionary *)metadataForSampleBuffer:(CMSampleBufferRef)sampleBuffer extraInfo:(NSDictionary *)extraInfo +{ + SCTraceODPCompatibleStart(2); + NSMutableDictionary *metadata = [self metadataForSampleBuffer:sampleBuffer]; + [metadata addEntriesFromDictionary:extraInfo]; + return metadata; +} + +- (NSMutableDictionary *)metadataForSampleBuffer:(CMSampleBufferRef)sampleBuffer + photoCapturerEnabled:(BOOL)photoCapturerEnabled + lensEnabled:(BOOL)lensesEnabled + lensID:(NSString *)lensID +{ + SCTraceODPCompatibleStart(2); + NSMutableDictionary *metadata = [self metadataForSampleBuffer:sampleBuffer]; + metadata[@"photo_capturer_enabled"] = @(photoCapturerEnabled); + + metadata[@"lens_enabled"] = @(lensesEnabled); + if (lensesEnabled) { + metadata[@"lens_id"] = lensID ?: @""; + } + + return metadata; +} + +- (NSMutableDictionary *)metadataForMetadata:(NSDictionary *)metadata + photoCapturerEnabled:(BOOL)photoCapturerEnabled + lensEnabled:(BOOL)lensesEnabled + lensID:(NSString *)lensID +{ + SCTraceODPCompatibleStart(2); + NSMutableDictionary *newMetadata = [self metadataForMetadata:metadata]; + newMetadata[@"photo_capturer_enabled"] = @(photoCapturerEnabled); + + newMetadata[@"lens_enabled"] = @(lensesEnabled); + if (lensesEnabled) { + newMetadata[@"lens_id"] = lensID ?: @""; + } + + return newMetadata; +} + +- (NSMutableDictionary *)getPropertiesFromAsset:(AVAsset *)asset +{ + SCTraceODPCompatibleStart(2); + SC_GUARD_ELSE_RETURN_VALUE(asset != nil, nil); + NSMutableDictionary *properties = [NSMutableDictionary dictionary]; + // file size + properties[@"file_size"] = @([asset fileSize]); + // duration + properties[@"duration"] = @(CMTimeGetSeconds(asset.duration)); + // video track count + NSArray *videoTracks = [asset tracksWithMediaType:AVMediaTypeVideo]; + properties[@"video_track_count"] = @(videoTracks.count); + if (videoTracks.count > 0) { + // video bitrate + properties[@"video_bitrate"] = @([videoTracks.firstObject estimatedDataRate]); + // frame rate + properties[@"video_frame_rate"] = @([videoTracks.firstObject nominalFrameRate]); + } + // audio track count + NSArray *audioTracks = [asset tracksWithMediaType:AVMediaTypeAudio]; + properties[@"audio_track_count"] = @(audioTracks.count); + if (audioTracks.count > 0) { + // audio bitrate + properties[@"audio_bitrate"] = @([audioTracks.firstObject estimatedDataRate]); + } + // playable + properties[@"playable"] = @(asset.isPlayable); + return properties; +} + +#pragma mark - Image snap + +- (void)checkImageHealthForCaptureFrameImage:(UIImage *)image + captureSettings:(NSDictionary *)captureSettings + captureSessionID:(NSString *)captureSessionID +{ + SCTraceODPCompatibleStart(2); + if (captureSessionID.length == 0) { + SCLogCoreCameraError(@"[FrameHealthChecker] #IMAGE:CAPTURE - captureSessionID shouldn't be empty"); + return; + } + SCManagedFrameHealthCheckerTask *task = + [SCManagedFrameHealthCheckerTask taskWithType:SCManagedFrameHealthCheck_ImageCapture + targetObject:image + metadata:captureSettings]; + [self _addTask:task withCaptureSessionID:captureSessionID]; +} + +- (void)checkImageHealthForPreTranscoding:(UIImage *)image + metadata:(NSDictionary *)metadata + captureSessionID:(NSString *)captureSessionID +{ + SCTraceODPCompatibleStart(2); + if (captureSessionID.length == 0) { + SCLogCoreCameraError(@"[FrameHealthChecker] #IMAGE:PRE_CAPTURE - captureSessionID shouldn't be empty"); + return; + } + SCManagedFrameHealthCheckerTask *task = + [SCManagedFrameHealthCheckerTask taskWithType:SCManagedFrameHealthCheck_ImagePreTranscoding + targetObject:image + metadata:metadata]; + [self _addTask:task withCaptureSessionID:captureSessionID]; +} + +- (void)checkImageHealthForPostTranscoding:(NSData *)imageData + metadata:(NSDictionary *)metadata + captureSessionID:(NSString *)captureSessionID +{ + SCTraceODPCompatibleStart(2); + if (captureSessionID.length == 0) { + SCLogCoreCameraError(@"[FrameHealthChecker] #IMAGE:POST_CAPTURE - captureSessionID shouldn't be empty"); + return; + } + SCManagedFrameHealthCheckerTask *task = + [SCManagedFrameHealthCheckerTask taskWithType:SCManagedFrameHealthCheck_ImagePostTranscoding + targetObject:imageData + metadata:metadata]; + [self _addTask:task withCaptureSessionID:captureSessionID]; +} + +#pragma mark - Video snap +- (void)checkVideoHealthForCaptureFrameImage:(UIImage *)image + metedata:(NSDictionary *)metadata + captureSessionID:(NSString *)captureSessionID +{ + SCTraceODPCompatibleStart(2); + if (captureSessionID.length == 0) { + SCLogCoreCameraError(@"[FrameHealthChecker] #VIDEO:CAPTURE - captureSessionID shouldn't be empty"); + return; + } + SCManagedFrameHealthCheckerTask *task = + [SCManagedFrameHealthCheckerTask taskWithType:SCManagedFrameHealthCheck_VideoCapture + targetObject:image + metadata:metadata]; + [self _addTask:task withCaptureSessionID:captureSessionID]; +} + +- (void)checkVideoHealthForOverlayImage:(UIImage *)image + metedata:(NSDictionary *)metadata + captureSessionID:(NSString *)captureSessionID +{ + SCTraceODPCompatibleStart(2); + if (captureSessionID.length == 0) { + SCLogCoreCameraError(@"[FrameHealthChecker] #VIDEO:OVERLAY_IMAGE - captureSessionID shouldn't be empty"); + return; + } + // Overlay image could be nil + if (!image) { + SCLogCoreCameraInfo(@"[FrameHealthChecker] #VIDEO:OVERLAY_IMAGE - overlayImage is nil."); + return; + } + SCManagedFrameHealthCheckerTask *task = + [SCManagedFrameHealthCheckerTask taskWithType:SCManagedFrameHealthCheck_VideoOverlayImage + targetObject:image + metadata:metadata]; + [self _addTask:task withCaptureSessionID:captureSessionID]; +} + +- (void)checkVideoHealthForPostTranscodingThumbnail:(UIImage *)image + metedata:(NSDictionary *)metadata + properties:(NSDictionary *)properties + captureSessionID:(NSString *)captureSessionID +{ + SCTraceODPCompatibleStart(2); + if (captureSessionID.length == 0) { + SCLogCoreCameraError(@"[FrameHealthChecker] #VIDEO:POST_TRANSCODING - captureSessionID shouldn't be empty"); + return; + } + SCManagedFrameHealthCheckerTask *task = + [SCManagedFrameHealthCheckerTask taskWithType:SCManagedFrameHealthCheck_VideoPostTranscoding + targetObject:image + metadata:metadata + videoProperties:properties]; + [self _addTask:task withCaptureSessionID:captureSessionID]; +} + +#pragma mark - Task management +- (void)reportFrameHealthCheckForCaptureSessionID:(NSString *)captureSessionID +{ + SCTraceODPCompatibleStart(2); + if (!captureSessionID) { + SCLogCoreCameraError(@"[FrameHealthChecker] report - captureSessionID shouldn't be nil"); + return; + } + [self _asynchronouslyCheckForCaptureSessionID:captureSessionID]; +} + +#pragma mark - Private functions + +/// Scale the source image to a new image with edges less than kSCManagedFrameHealthCheckerScaledImageMaxEdgeLength. +- (UIImage *)_unifyImage:(UIImage *)sourceImage +{ + CGFloat sourceWidth = sourceImage.size.width; + CGFloat sourceHeight = sourceImage.size.height; + + if (sourceWidth == 0.0 || sourceHeight == 0.0) { + SCLogCoreCameraInfo(@"[FrameHealthChecker] Tried scaling image with no size"); + return sourceImage; + } + + CGFloat maxEdgeLength = kSCManagedFrameHealthCheckerScaledImageMaxEdgeLength; + + CGFloat widthScalingFactor = maxEdgeLength / sourceWidth; + CGFloat heightScalingFactor = maxEdgeLength / sourceHeight; + + CGFloat scalingFactor = MIN(widthScalingFactor, heightScalingFactor); + + if (scalingFactor >= 1) { + SCLogCoreCameraInfo(@"[FrameHealthChecker] No need to scale image."); + return sourceImage; + } + + CGSize targetSize = CGSizeMake(sourceWidth * scalingFactor, sourceHeight * scalingFactor); + + SCLogCoreCameraInfo(@"[FrameHealthChecker] Scaling image from %@ to %@", NSStringFromCGSize(sourceImage.size), + NSStringFromCGSize(targetSize)); + return [sourceImage scaledImageToSize:targetSize scale:kSCManagedFrameHealthCheckerScaledImageScale]; +} + +- (void)_addTask:(SCManagedFrameHealthCheckerTask *)newTask withCaptureSessionID:(NSString *)captureSessionID +{ + SCTraceODPCompatibleStart(2); + if (captureSessionID.length == 0) { + return; + } + [_performer perform:^{ + SCTraceODPCompatibleStart(2); + + CFTimeInterval beforeScaling = CACurrentMediaTime(); + if (newTask.targetObject) { + if ([newTask.targetObject isKindOfClass:[UIImage class]]) { + UIImage *sourceImage = (UIImage *)newTask.targetObject; + newTask.unifiedImage = [self _unifyImage:sourceImage]; + newTask.sourceImageSize = sourceImage.size; + } else if ([newTask.targetObject isKindOfClass:[NSData class]]) { + UIImage *sourceImage = [UIImage sc_imageWithData:newTask.targetObject]; + CFTimeInterval betweenDecodingAndScaling = CACurrentMediaTime(); + SCLogCoreCameraInfo(@"[FrameHealthChecker] #Image decoding delay: %f", + betweenDecodingAndScaling - beforeScaling); + beforeScaling = betweenDecodingAndScaling; + newTask.unifiedImage = [self _unifyImage:sourceImage]; + newTask.sourceImageSize = sourceImage.size; + } else { + SCLogCoreCameraError(@"[FrameHealthChecker] Invalid targetObject class:%@", + NSStringFromClass([newTask.targetObject class])); + } + newTask.targetObject = nil; + } + SCLogCoreCameraInfo(@"[FrameHealthChecker] #Scale image delay: %f", CACurrentMediaTime() - beforeScaling); + + NSMutableArray *taskQueue = _frameCheckTasks[captureSessionID]; + if (!taskQueue) { + taskQueue = [NSMutableArray array]; + _frameCheckTasks[captureSessionID] = taskQueue; + } + // Remove previous same type task, avoid meaningless task, + // for example repeat click "Send Button" and then "Back button" + // will produce a lot of PRE_TRANSCODING and POST_TRANSCODING + for (SCManagedFrameHealthCheckerTask *task in taskQueue) { + if (task.type == newTask.type) { + [taskQueue removeObject:task]; + break; + } + } + + [taskQueue addObject:newTask]; + }]; +} + +- (void)_asynchronouslyCheckForCaptureSessionID:(NSString *)captureSessionID +{ + SCTraceODPCompatibleStart(2); + [_performer perform:^{ + SCTraceODPCompatibleStart(2); + NSMutableArray *tasksQueue = _frameCheckTasks[captureSessionID]; + if (!tasksQueue) { + return; + } + + // Check the free memory, if it is too low, drop these tasks + double memFree = [SCLogger memoryFreeMB]; + if (memFree < kSCManagedFrameHealthCheckerMinFreeMemMB) { + SCLogCoreCameraWarning( + @"[FrameHealthChecker] mem_free:%f is too low, dropped checking tasks for captureSessionID:%@", memFree, + captureSessionID); + [_frameCheckTasks removeObjectForKey:captureSessionID]; + return; + } + + __block NSMutableArray *frameHealthInfoArray = [NSMutableArray array]; + // Execute all tasks and wait for complete + [tasksQueue enumerateObjectsUsingBlock:^(id _Nonnull obj, NSUInteger idx, BOOL *_Nonnull stop) { + SCManagedFrameHealthCheckerTask *task = (SCManagedFrameHealthCheckerTask *)obj; + NSMutableDictionary *frameHealthInfo; + UIImage *image = task.unifiedImage; + + if (image) { + // Get frame health info + frameHealthInfo = [self _getFrameHealthInfoForImage:image + source:[task textForSource] + snapType:[task textForSnapType] + metadata:task.metadata + sourceImageSize:task.sourceImageSize + captureSessionID:captureSessionID]; + NSNumber *isPossibleBlackNum = frameHealthInfo[@"is_possible_black"]; + NSNumber *isTotallyBlackNum = frameHealthInfo[@"is_total_black"]; + NSNumber *hasExecutionError = frameHealthInfo[@"execution_error"]; + if ([isTotallyBlackNum boolValue]) { + task.errorType = SCManagedFrameHealthCheckError_Frame_Totally_Black; + } else if ([isPossibleBlackNum boolValue]) { + task.errorType = SCManagedFrameHealthCheckError_Frame_Possibly_Black; + } else if ([hasExecutionError boolValue]) { + task.errorType = SCManagedFrameHealthCheckError_Execution_Error; + } + } else { + frameHealthInfo = [NSMutableDictionary dictionary]; + task.errorType = SCManagedFrameHealthCheckError_Invalid_Bitmap; + } + + if (frameHealthInfo) { + frameHealthInfo[@"frame_source"] = [task textForSource]; + frameHealthInfo[@"snap_type"] = [task textForSnapType]; + frameHealthInfo[@"error_type"] = [task textForErrorType]; + frameHealthInfo[@"capture_session_id"] = captureSessionID; + frameHealthInfo[@"metadata"] = task.metadata; + if (task.videoProperties.count > 0) { + [frameHealthInfo addEntriesFromDictionary:task.videoProperties]; + } + [frameHealthInfoArray addObject:frameHealthInfo]; + } + + // Release the image as soon as possible to mitigate the memory pressure + task.unifiedImage = nil; + }]; + + for (NSDictionary *frameHealthInfo in frameHealthInfoArray) { + if ([frameHealthInfo[@"is_total_black"] boolValue] || [frameHealthInfo[@"is_possible_black"] boolValue]) { + // // TODO: Zi Kai Chen - add this back. Normally we use id for + // this but as this is a shared instance we cannot easily inject it. The work would + // involve making this not a shared instance. + // SCShakeBetaLogEvent(SCShakeBetaLoggerKeyCCamBlackSnap, + // JSONStringSerializeObjectForLogging(frameHealthInfo)); + } + + [[SCLogger sharedInstance] logUnsampledEventToEventLogger:kSCCameraMetricsFrameHealthCheckIndex + parameters:frameHealthInfo + secretParameters:nil + metrics:nil]; + } + + [_frameCheckTasks removeObjectForKey:captureSessionID]; + }]; +} + +- (NSMutableDictionary *)_getFrameHealthInfoForImage:(UIImage *)image + source:(NSString *)source + snapType:(NSString *)snapType + metadata:(NSDictionary *)metadata + sourceImageSize:(CGSize)sourceImageSize + captureSessionID:(NSString *)captureSessionID +{ + SCTraceODPCompatibleStart(2); + NSMutableDictionary *parameters = [NSMutableDictionary dictionary]; + size_t samplesCount = 0; + CFTimeInterval start = CACurrentMediaTime(); + CGImageRef imageRef = image.CGImage; + size_t imageWidth = CGImageGetWidth(imageRef); + size_t imageHeight = CGImageGetHeight(imageRef); + CFDataRef pixelData = CGDataProviderCopyData(CGImageGetDataProvider(imageRef)); + CFTimeInterval getImageDataTime = CACurrentMediaTime(); + if (pixelData) { + const Byte *imageData = CFDataGetBytePtr(pixelData); + NSInteger stripLength = 0; + NSInteger bufferLength = 0; + NSInteger imagePixels = imageWidth * imageHeight; + // Limit the max sampled frames + if (imagePixels > kSCManagedFrameHealthCheckerMaxSamples) { + stripLength = imagePixels / kSCManagedFrameHealthCheckerMaxSamples * 4; + bufferLength = kSCManagedFrameHealthCheckerMaxSamples; + } else { + stripLength = 4; + bufferLength = imagePixels; + } + samplesCount = bufferLength; + + // Avoid dividing by zero + if (samplesCount != 0) { + FloatRGBA sumRGBA = [self _getSumRGBAFromData:imageData + stripLength:stripLength + bufferLength:bufferLength + bitmapInfo:CGImageGetBitmapInfo(imageRef)]; + float averageR = sumRGBA.R / samplesCount; + float averageG = sumRGBA.G / samplesCount; + float averageB = sumRGBA.B / samplesCount; + float averageA = sumRGBA.A / samplesCount; + parameters[@"average_sampled_rgba_r"] = @(averageR); + parameters[@"average_sampled_rgba_g"] = @(averageG); + parameters[@"average_sampled_rgba_b"] = @(averageB); + parameters[@"average_sampled_rgba_a"] = @(averageA); + parameters[@"origin_frame_width"] = @(sourceImageSize.width); + parameters[@"origin_frame_height"] = @(sourceImageSize.height); + // Also report possible black to identify the intentional black snap by covering camera. + // Normally, the averageA very near 255, but for video overlay image, it is very small. + // So we use averageA > 250 to avoid considing video overlay image as possible black. + if (averageA > 250 && averageR < kSCManagedFrameHealthCheckerPossibleBlackThreshold && + averageG < kSCManagedFrameHealthCheckerPossibleBlackThreshold && + averageB < kSCManagedFrameHealthCheckerPossibleBlackThreshold) { + parameters[@"is_possible_black"] = @(YES); + // Use this parameters for BigQuery conditions in Grafana + if (averageR == 0 && averageG == 0 && averageB == 0) { + parameters[@"is_total_black"] = @(YES); + } + } + } else { + SCLogCoreCameraError(@"[FrameHealthChecker] #%@:%@ - samplesCount is zero! captureSessionID:%@", snapType, + source, captureSessionID); + parameters[@"execution_error"] = @(YES); + } + CFRelease(pixelData); + } else { + SCLogCoreCameraError(@"[FrameHealthChecker] #%@:%@ - pixelData is nil! captureSessionID:%@", snapType, source, + captureSessionID); + parameters[@"execution_error"] = @(YES); + } + parameters[@"sample_size"] = @(samplesCount); + + CFTimeInterval end = CACurrentMediaTime(); + SCLogCoreCameraInfo(@"[FrameHealthChecker] #%@:%@ - GET_IMAGE_DATA_TIME:%f SAMPLE_DATA_TIME:%f TOTAL_TIME:%f", + snapType, source, getImageDataTime - start, end - getImageDataTime, end - start); + return parameters; +} + +- (FloatRGBA)_getSumRGBAFromData:(const Byte *)imageData + stripLength:(NSInteger)stripLength + bufferLength:(NSInteger)bufferLength + bitmapInfo:(CGBitmapInfo)bitmapInfo +{ + SCTraceODPCompatibleStart(2); + FloatRGBA sumRGBA; + if ((bitmapInfo & kCGImageAlphaPremultipliedFirst) && (bitmapInfo & kCGImageByteOrder32Little)) { + // BGRA + sumRGBA.B = vDspColorElementSum(imageData, stripLength, bufferLength); + sumRGBA.G = vDspColorElementSum(imageData + 1, stripLength, bufferLength); + sumRGBA.R = vDspColorElementSum(imageData + 2, stripLength, bufferLength); + sumRGBA.A = vDspColorElementSum(imageData + 3, stripLength, bufferLength); + } else { + // TODO. support other types beside RGBA + sumRGBA.R = vDspColorElementSum(imageData, stripLength, bufferLength); + sumRGBA.G = vDspColorElementSum(imageData + 1, stripLength, bufferLength); + sumRGBA.B = vDspColorElementSum(imageData + 2, stripLength, bufferLength); + sumRGBA.A = vDspColorElementSum(imageData + 3, stripLength, bufferLength); + } + return sumRGBA; +} + +@end diff --git a/ManagedCapturer/SCManagedFrontFlashController.h b/ManagedCapturer/SCManagedFrontFlashController.h new file mode 100644 index 0000000..c5bef55 --- /dev/null +++ b/ManagedCapturer/SCManagedFrontFlashController.h @@ -0,0 +1,18 @@ +// +// SCManagedFrontFlashController.h +// Snapchat +// +// Created by Liu Liu on 5/4/15. +// Copyright (c) 2015 Liu Liu. All rights reserved. +// + +#import + +// This object is only access on SCManagedCapturer thread +@interface SCManagedFrontFlashController : NSObject + +@property (nonatomic, assign) BOOL flashActive; + +@property (nonatomic, assign) BOOL torchActive; + +@end diff --git a/ManagedCapturer/SCManagedFrontFlashController.m b/ManagedCapturer/SCManagedFrontFlashController.m new file mode 100644 index 0000000..61b4fac --- /dev/null +++ b/ManagedCapturer/SCManagedFrontFlashController.m @@ -0,0 +1,105 @@ +// +// SCManagedFrontFlashController.m +// Snapchat +// +// Created by Liu Liu on 5/4/15. +// Copyright (c) 2015 Liu Liu. All rights reserved. +// + +#import "SCManagedFrontFlashController.h" + +#import +#import +#import +#import + +@import UIKit; + +@implementation SCManagedFrontFlashController { + BOOL _active; + UIView *_brightView; + CGFloat _brightnessWhenFlashAndTorchOff; +} + +- (void)_setScreenWithFrontViewFlashActive:(BOOL)flashActive torchActive:(BOOL)torchActive +{ + SCTraceStart(); + SCAssertMainThread(); + BOOL wasActive = _active; + _active = flashActive || torchActive; + if (!wasActive && _active) { + [self _activateFlash:flashActive]; + } else if (wasActive && !_active) { + [self _deactivateFlash]; + } +} + +- (void)_activateFlash:(BOOL)flashActive +{ + UIWindow *mainWindow = [[UIApplication sharedApplication] keyWindow]; + if (!_brightView) { + CGRect frame = [mainWindow bounds]; + CGFloat maxLength = MAX(CGRectGetWidth(frame), CGRectGetHeight(frame)); + frame.size = CGSizeMake(maxLength, maxLength); + // Using the max length on either side to be compatible with different orientations + _brightView = [[UIView alloc] initWithFrame:frame]; + _brightView.userInteractionEnabled = NO; + _brightView.backgroundColor = [UIColor whiteColor]; + } + _brightnessWhenFlashAndTorchOff = [UIScreen mainScreen].brightness; + SCLogGeneralInfo(@"[SCManagedFrontFlashController] Activating flash, setting screen brightness from %f to 1.0", + _brightnessWhenFlashAndTorchOff); + [self _brightenLoop]; + _brightView.alpha = flashActive ? 1.0 : 0.75; + [mainWindow addSubview:_brightView]; +} + +- (void)_deactivateFlash +{ + SCLogGeneralInfo(@"[SCManagedFrontFlashController] Deactivating flash, setting screen brightness from %f to %f", + [UIScreen mainScreen].brightness, _brightnessWhenFlashAndTorchOff); + [UIScreen mainScreen].brightness = _brightnessWhenFlashAndTorchOff; + if (_brightView) { + [_brightView removeFromSuperview]; + } +} + +- (void)_brightenLoop +{ + if (_active) { + SCLogGeneralInfo(@"[SCManagedFrontFlashController] In brighten loop, setting brightness from %f to 1.0", + [UIScreen mainScreen].brightness); + [UIScreen mainScreen].brightness = 1.0; + dispatch_after(dispatch_time(DISPATCH_TIME_NOW, NSEC_PER_SEC / 2), dispatch_get_main_queue(), ^(void) { + [self _brightenLoop]; + }); + } else { + SCLogGeneralInfo(@"[SCManagedFrontFlashController] Recording is done, brighten loop ends"); + } +} + +- (void)setFlashActive:(BOOL)flashActive +{ + SCTraceStart(); + if (_flashActive != flashActive) { + _flashActive = flashActive; + BOOL torchActive = _torchActive; + runOnMainThreadAsynchronously(^{ + [self _setScreenWithFrontViewFlashActive:flashActive torchActive:torchActive]; + }); + } +} + +- (void)setTorchActive:(BOOL)torchActive +{ + SCTraceStart(); + if (_torchActive != torchActive) { + _torchActive = torchActive; + BOOL flashActive = _flashActive; + runOnMainThreadAsynchronously(^{ + [self _setScreenWithFrontViewFlashActive:flashActive torchActive:torchActive]; + }); + } +} + +@end diff --git a/ManagedCapturer/SCManagedLegacyStillImageCapturer.h b/ManagedCapturer/SCManagedLegacyStillImageCapturer.h new file mode 100644 index 0000000..7c2919f --- /dev/null +++ b/ManagedCapturer/SCManagedLegacyStillImageCapturer.h @@ -0,0 +1,13 @@ +// +// SCManagedLegacyStillImageCapturer.h +// Snapchat +// +// Created by Chao Pang on 10/4/16. +// Copyright © 2016 Snapchat, Inc. All rights reserved. +// + +#import "SCManagedStillImageCapturer.h" + +@interface SCManagedLegacyStillImageCapturer : SCManagedStillImageCapturer + +@end diff --git a/ManagedCapturer/SCManagedLegacyStillImageCapturer.m b/ManagedCapturer/SCManagedLegacyStillImageCapturer.m new file mode 100644 index 0000000..9e1e9d1 --- /dev/null +++ b/ManagedCapturer/SCManagedLegacyStillImageCapturer.m @@ -0,0 +1,460 @@ +// +// SCManagedLegacyStillImageCapturer.m +// Snapchat +// +// Created by Chao Pang on 10/4/16. +// Copyright © 2016 Snapchat, Inc. All rights reserved. +// + +#import "SCManagedLegacyStillImageCapturer.h" + +#import "AVCaptureConnection+InputDevice.h" +#import "SCCameraTweaks.h" +#import "SCLogger+Camera.h" +#import "SCManagedCapturer.h" +#import "SCManagedStillImageCapturer_Protected.h" +#import "SCStillImageCaptureVideoInputMethod.h" + +#import +#import +#import +#import +#import +#import +#import +#import +#import + +@import ImageIO; + +static NSString *const kSCLegacyStillImageCaptureDefaultMethodErrorDomain = + @"kSCLegacyStillImageCaptureDefaultMethodErrorDomain"; +static NSString *const kSCLegacyStillImageCaptureLensStabilizationMethodErrorDomain = + @"kSCLegacyStillImageCaptureLensStabilizationMethodErrorDomain"; + +static NSInteger const kSCLegacyStillImageCaptureDefaultMethodErrorEncounteredException = 10000; +static NSInteger const kSCLegacyStillImageCaptureLensStabilizationMethodErrorEncounteredException = 10001; + +@implementation SCManagedLegacyStillImageCapturer { +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wdeprecated-declarations" + AVCaptureStillImageOutput *_stillImageOutput; +#pragma clang diagnostic pop + + BOOL _shouldCapture; + NSUInteger _retries; + + SCStillImageCaptureVideoInputMethod *_videoFileMethod; +} + +- (instancetype)initWithSession:(AVCaptureSession *)session + performer:(id)performer + lensProcessingCore:(id)lensProcessingCore + delegate:(id)delegate +{ + SCTraceStart(); + self = [super initWithSession:session performer:performer lensProcessingCore:lensProcessingCore delegate:delegate]; + if (self) { + [self setupWithSession:session]; + } + return self; +} + +- (void)setupWithSession:(AVCaptureSession *)session +{ + SCTraceStart(); +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wdeprecated-declarations" + _stillImageOutput = [[AVCaptureStillImageOutput alloc] init]; +#pragma clang diagnostic pop + _stillImageOutput.outputSettings = @{AVVideoCodecKey : AVVideoCodecJPEG}; + [self setAsOutput:session]; +} + +- (void)setAsOutput:(AVCaptureSession *)session +{ + SCTraceStart(); + if ([session canAddOutput:_stillImageOutput]) { + [session addOutput:_stillImageOutput]; + } +} + +- (void)setHighResolutionStillImageOutputEnabled:(BOOL)highResolutionStillImageOutputEnabled +{ + SCTraceStart(); + if (_stillImageOutput.isHighResolutionStillImageOutputEnabled != highResolutionStillImageOutputEnabled) { + _stillImageOutput.highResolutionStillImageOutputEnabled = highResolutionStillImageOutputEnabled; + } +} + +- (void)setPortraitModeCaptureEnabled:(BOOL)enabled +{ + // Legacy capturer only used on devices running versions under 10.2, which don't support depth data + // so this function is never called and does not need to be implemented +} + +- (void)enableStillImageStabilization +{ + SCTraceStart(); +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wdeprecated-declarations" + if (_stillImageOutput.isLensStabilizationDuringBracketedCaptureSupported) { + _stillImageOutput.lensStabilizationDuringBracketedCaptureEnabled = YES; + } +#pragma clang diagnostic pop +} + +- (void)removeAsOutput:(AVCaptureSession *)session +{ + SCTraceStart(); + [session removeOutput:_stillImageOutput]; +} + +- (void)captureStillImageWithAspectRatio:(CGFloat)aspectRatio + atZoomFactor:(float)zoomFactor + fieldOfView:(float)fieldOfView + state:(SCManagedCapturerState *)state + captureSessionID:(NSString *)captureSessionID + shouldCaptureFromVideo:(BOOL)shouldCaptureFromVideo + completionHandler: + (sc_managed_still_image_capturer_capture_still_image_completion_handler_t)completionHandler +{ + SCTraceStart(); + SCAssert(completionHandler, @"completionHandler shouldn't be nil"); + _retries = 6; // AVFoundation Unknown Error usually resolves itself within 0.5 seconds + _aspectRatio = aspectRatio; + _zoomFactor = zoomFactor; + _fieldOfView = fieldOfView; + _state = state; + _captureSessionID = captureSessionID; + _shouldCaptureFromVideo = shouldCaptureFromVideo; + SCAssert(!_completionHandler, @"We shouldn't have a _completionHandler at this point otherwise we are destroying " + @"current completion handler."); + _completionHandler = [completionHandler copy]; + [[SCLogger sharedInstance] logCameraExposureAdjustmentDelayStart]; + if (!_adjustingExposureManualDetect) { + SCLogCoreCameraInfo(@"Capturing still image now"); + [self _captureStillImageWithExposureAdjustmentStrategy:kSCCameraExposureAdjustmentStrategyNo]; + _shouldCapture = NO; + } else { + SCLogCoreCameraInfo(@"Wait adjusting exposure (or after 0.4 seconds) and then capture still image"); + _shouldCapture = YES; + [self _deadlineCaptureStillImage]; + } +} + +#pragma mark - SCManagedDeviceCapacityAnalyzerListener + +- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer + didChangeAdjustingExposure:(BOOL)adjustingExposure +{ + SCTraceStart(); + @weakify(self); + [_performer performImmediatelyIfCurrentPerformer:^{ + // Since this is handled on a different thread, therefore, dispatch back to the queue we operated on. + @strongify(self); + SC_GUARD_ELSE_RETURN(self); + self->_adjustingExposureManualDetect = adjustingExposure; + [self _didChangeAdjustingExposure:adjustingExposure + withStrategy:kSCCameraExposureAdjustmentStrategyManualDetect]; + }]; +} + +- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer + didChangeLightingCondition:(SCCapturerLightingConditionType)lightingCondition +{ + SCTraceStart(); + @weakify(self); + [_performer performImmediatelyIfCurrentPerformer:^{ + @strongify(self); + SC_GUARD_ELSE_RETURN(self); + self->_lightingConditionType = lightingCondition; + }]; +} + +#pragma mark - SCManagedCapturerListener + +- (void)managedCapturer:(id)managedCapturer didChangeAdjustingExposure:(SCManagedCapturerState *)state +{ + SCTraceStart(); + @weakify(self); + [_performer performImmediatelyIfCurrentPerformer:^{ + @strongify(self); + SC_GUARD_ELSE_RETURN(self); + // Since this is handled on a different thread, therefore, dispatch back to the queue we operated on. + [self _didChangeAdjustingExposure:state.adjustingExposure withStrategy:kSCCameraExposureAdjustmentStrategyKVO]; + }]; +} + +#pragma mark - Private methods + +- (void)_didChangeAdjustingExposure:(BOOL)adjustingExposure withStrategy:(NSString *)strategy +{ + if (!adjustingExposure && self->_shouldCapture) { + SCLogCoreCameraInfo(@"Capturing after adjusting exposure using strategy: %@", strategy); + [self _captureStillImageWithExposureAdjustmentStrategy:strategy]; + self->_shouldCapture = NO; + } +} + +- (void)_deadlineCaptureStillImage +{ + SCTraceStart(); + // Use the SCManagedCapturer's private queue. + [_performer perform:^{ + if (_shouldCapture) { + [self _captureStillImageWithExposureAdjustmentStrategy:kSCCameraExposureAdjustmentStrategyDeadline]; + _shouldCapture = NO; + } + } + after:SCCameraTweaksExposureDeadline()]; +} + +- (void)_captureStillImageWithExposureAdjustmentStrategy:(NSString *)strategy +{ + SCTraceStart(); + [[SCLogger sharedInstance] logCameraExposureAdjustmentDelayEndWithStrategy:strategy]; + if (_shouldCaptureFromVideo) { + [self captureStillImageFromVideoBuffer]; + return; + } + SCAssert(_stillImageOutput, @"stillImageOutput shouldn't be nil"); +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wdeprecated-declarations" + AVCaptureStillImageOutput *stillImageOutput = _stillImageOutput; +#pragma clang diagnostic pop + AVCaptureConnection *captureConnection = [self _captureConnectionFromStillImageOutput:stillImageOutput]; + SCManagedCapturerState *state = [_state copy]; + dispatch_block_t legacyStillImageCaptureBlock = ^{ + SCCAssertMainThread(); + // If the application is not in background, and we have still image connection, do thecapture. Otherwise fail. + if ([UIApplication sharedApplication].applicationState == UIApplicationStateBackground) { + [_performer performImmediatelyIfCurrentPerformer:^{ + sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler = + _completionHandler; + _completionHandler = nil; + completionHandler(nil, nil, + [NSError errorWithDomain:kSCManagedStillImageCapturerErrorDomain + code:kSCManagedStillImageCapturerApplicationStateBackground + userInfo:nil]); + }]; + return; + } +#if !TARGET_IPHONE_SIMULATOR + if (!captureConnection) { + [_performer performImmediatelyIfCurrentPerformer:^{ + sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler = + _completionHandler; + _completionHandler = nil; + completionHandler(nil, nil, [NSError errorWithDomain:kSCManagedStillImageCapturerErrorDomain + code:kSCManagedStillImageCapturerNoStillImageConnection + userInfo:nil]); + }]; + return; + } +#endif + // Select appropriate image capture method + if ([_delegate managedStillImageCapturerShouldProcessFileInput:self]) { + if (!_videoFileMethod) { + _videoFileMethod = [[SCStillImageCaptureVideoInputMethod alloc] init]; + } + [[SCLogger sharedInstance] logStillImageCaptureApi:@"SCStillImageCapture"]; + [[SCCoreCameraLogger sharedInstance] + logCameraCreationDelaySplitPointStillImageCaptureApi:@"SCStillImageCapture"]; + [_videoFileMethod captureStillImageWithCapturerState:state + successBlock:^(NSData *imageData, NSDictionary *cameraInfo, NSError *error) { + [self _legacyStillImageCaptureDidSucceedWithImageData:imageData + sampleBuffer:nil + cameraInfo:cameraInfo + error:error]; + } + failureBlock:^(NSError *error) { + [self _legacyStillImageCaptureDidFailWithError:error]; + }]; + } else { +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wdeprecated-declarations" + if (stillImageOutput.isLensStabilizationDuringBracketedCaptureSupported && !state.flashActive) { + [self _captureStabilizedStillImageWithStillImageOutput:stillImageOutput + captureConnection:captureConnection + capturerState:state]; + } else { + [self _captureStillImageWithStillImageOutput:stillImageOutput + captureConnection:captureConnection + capturerState:state]; + } +#pragma clang diagnostic pop + } + }; + // We need to call this on main thread and blocking. + [[SCQueuePerformer mainQueuePerformer] performAndWait:legacyStillImageCaptureBlock]; +} + +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wdeprecated-declarations" +- (void)_captureStillImageWithStillImageOutput:(AVCaptureStillImageOutput *)stillImageOutput + captureConnection:(AVCaptureConnection *)captureConnection + capturerState:(SCManagedCapturerState *)state +{ + [[SCLogger sharedInstance] logStillImageCaptureApi:@"AVStillImageCaptureAsynchronous"]; + [[SCCoreCameraLogger sharedInstance] + logCameraCreationDelaySplitPointStillImageCaptureApi:@"AVStillImageCaptureAsynchronous"]; + @try { + [stillImageOutput + captureStillImageAsynchronouslyFromConnection:captureConnection + completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) { + if (imageDataSampleBuffer) { + NSData *imageData = [AVCaptureStillImageOutput + jpegStillImageNSDataRepresentation:imageDataSampleBuffer]; + [self + _legacyStillImageCaptureDidSucceedWithImageData:imageData + sampleBuffer: + imageDataSampleBuffer + cameraInfo: + cameraInfoForBuffer( + imageDataSampleBuffer) + error:error]; + } else { + if (error.domain == AVFoundationErrorDomain && error.code == -11800) { + // iOS 7 "unknown error"; works if we retry + [self _legacyStillImageCaptureWillRetryWithError:error]; + } else { + [self _legacyStillImageCaptureDidFailWithError:error]; + } + } + }]; + } @catch (NSException *e) { + [SCCrashLogger logHandledException:e]; + [self _legacyStillImageCaptureDidFailWithError: + [NSError errorWithDomain:kSCLegacyStillImageCaptureDefaultMethodErrorDomain + code:kSCLegacyStillImageCaptureDefaultMethodErrorEncounteredException + userInfo:@{ + @"exception" : e + }]]; + } +} + +- (void)_captureStabilizedStillImageWithStillImageOutput:(AVCaptureStillImageOutput *)stillImageOutput + captureConnection:(AVCaptureConnection *)captureConnection + capturerState:(SCManagedCapturerState *)state +{ + [[SCLogger sharedInstance] logStillImageCaptureApi:@"AVStillImageOutputCaptureBracketAsynchronously"]; + [[SCCoreCameraLogger sharedInstance] + logCameraCreationDelaySplitPointStillImageCaptureApi:@"AVStillImageOutputCaptureBracketAsynchronously"]; + NSArray *bracketArray = [self _bracketSettingsArray:captureConnection]; + @try { + [stillImageOutput + captureStillImageBracketAsynchronouslyFromConnection:captureConnection + withSettingsArray:bracketArray + completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, + AVCaptureBracketedStillImageSettings *settings, + NSError *err) { + if (!imageDataSampleBuffer) { + [self _legacyStillImageCaptureDidFailWithError:err]; + return; + } + NSData *jpegData = [AVCaptureStillImageOutput + jpegStillImageNSDataRepresentation:imageDataSampleBuffer]; + [self + _legacyStillImageCaptureDidSucceedWithImageData:jpegData + sampleBuffer: + imageDataSampleBuffer + cameraInfo: + cameraInfoForBuffer( + imageDataSampleBuffer) + error:nil]; + }]; + } @catch (NSException *e) { + [SCCrashLogger logHandledException:e]; + [self _legacyStillImageCaptureDidFailWithError: + [NSError errorWithDomain:kSCLegacyStillImageCaptureLensStabilizationMethodErrorDomain + code:kSCLegacyStillImageCaptureLensStabilizationMethodErrorEncounteredException + userInfo:@{ + @"exception" : e + }]]; + } +} +#pragma clang diagnostic pop + +- (NSArray *)_bracketSettingsArray:(AVCaptureConnection *)stillImageConnection +{ + NSInteger const stillCount = 1; + NSMutableArray *bracketSettingsArray = [NSMutableArray arrayWithCapacity:stillCount]; + AVCaptureDevice *device = [stillImageConnection inputDevice]; + AVCaptureManualExposureBracketedStillImageSettings *settings = [AVCaptureManualExposureBracketedStillImageSettings + manualExposureSettingsWithExposureDuration:device.exposureDuration + ISO:AVCaptureISOCurrent]; + for (NSInteger i = 0; i < stillCount; i++) { + [bracketSettingsArray addObject:settings]; + } + return [bracketSettingsArray copy]; +} + +- (void)_legacyStillImageCaptureDidSucceedWithImageData:(NSData *)imageData + sampleBuffer:(CMSampleBufferRef)sampleBuffer + cameraInfo:(NSDictionary *)cameraInfo + error:(NSError *)error +{ + [[SCLogger sharedInstance] logPreCaptureOperationFinishedAt:CACurrentMediaTime()]; + [[SCCoreCameraLogger sharedInstance] + logCameraCreationDelaySplitPointPreCaptureOperationFinishedAt:CACurrentMediaTime()]; + if (sampleBuffer) { + CFRetain(sampleBuffer); + } + [_performer performImmediatelyIfCurrentPerformer:^{ + UIImage *fullScreenImage = [self imageFromData:imageData + currentZoomFactor:_zoomFactor + targetAspectRatio:_aspectRatio + fieldOfView:_fieldOfView + state:_state + sampleBuffer:sampleBuffer]; + + sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler = _completionHandler; + _completionHandler = nil; + completionHandler(fullScreenImage, cameraInfo, error); + if (sampleBuffer) { + CFRelease(sampleBuffer); + } + }]; +} + +- (void)_legacyStillImageCaptureDidFailWithError:(NSError *)error +{ + [_performer performImmediatelyIfCurrentPerformer:^{ + sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler = _completionHandler; + _completionHandler = nil; + completionHandler(nil, nil, error); + }]; +} + +- (void)_legacyStillImageCaptureWillRetryWithError:(NSError *)error +{ + if (_retries-- > 0) { + [_performer perform:^{ + [self _captureStillImageWithExposureAdjustmentStrategy:kSCCameraExposureAdjustmentStrategyNo]; + } + after:kSCCameraRetryInterval]; + } else { + [self _legacyStillImageCaptureDidFailWithError:error]; + } +} + +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wdeprecated-declarations" +- (AVCaptureConnection *)_captureConnectionFromStillImageOutput:(AVCaptureStillImageOutput *)stillImageOutput +#pragma clang diagnostic pop +{ + SCTraceStart(); + SCAssert([_performer isCurrentPerformer], @""); + NSArray *connections = [stillImageOutput.connections copy]; + for (AVCaptureConnection *connection in connections) { + for (AVCaptureInputPort *port in [connection inputPorts]) { + if ([[port mediaType] isEqual:AVMediaTypeVideo]) { + return connection; + } + } + } + return nil; +} + +@end diff --git a/ManagedCapturer/SCManagedPhotoCapturer.h b/ManagedCapturer/SCManagedPhotoCapturer.h new file mode 100644 index 0000000..5e1da9b --- /dev/null +++ b/ManagedCapturer/SCManagedPhotoCapturer.h @@ -0,0 +1,13 @@ +// +// SCManagedPhotoCapturer.h +// Snapchat +// +// Created by Chao Pang on 10/5/16. +// Copyright © 2016 Snapchat, Inc. All rights reserved. +// + +#import "SCManagedStillImageCapturer.h" + +@interface SCManagedPhotoCapturer : SCManagedStillImageCapturer + +@end