Add files via upload
This commit is contained in:
parent
8fafd1b29a
commit
99f9f2a76f
36
ManagedCapturer/ARConfiguration+SCConfiguration.m
Normal file
36
ManagedCapturer/ARConfiguration+SCConfiguration.m
Normal file
@ -0,0 +1,36 @@
|
||||
//
|
||||
// ARConfiguration+SCConfiguration.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Max Goedjen on 11/7/17.
|
||||
//
|
||||
|
||||
#import "ARConfiguration+SCConfiguration.h"
|
||||
|
||||
#import "SCCapturerDefines.h"
|
||||
|
||||
@implementation ARConfiguration (SCConfiguration)
|
||||
|
||||
+ (BOOL)sc_supportedForDevicePosition:(SCManagedCaptureDevicePosition)position
|
||||
{
|
||||
return [[[self sc_configurationForDevicePosition:position] class] isSupported];
|
||||
}
|
||||
|
||||
+ (ARConfiguration *)sc_configurationForDevicePosition:(SCManagedCaptureDevicePosition)position
|
||||
{
|
||||
if (@available(iOS 11.0, *)) {
|
||||
if (position == SCManagedCaptureDevicePositionBack) {
|
||||
ARWorldTrackingConfiguration *config = [[ARWorldTrackingConfiguration alloc] init];
|
||||
config.planeDetection = ARPlaneDetectionHorizontal;
|
||||
config.lightEstimationEnabled = NO;
|
||||
return config;
|
||||
} else {
|
||||
#ifdef SC_USE_ARKIT_FACE
|
||||
return [[ARFaceTrackingConfiguration alloc] init];
|
||||
#endif
|
||||
}
|
||||
}
|
||||
return nil;
|
||||
}
|
||||
|
||||
@end
|
15
ManagedCapturer/AVCaptureConnection+InputDevice.h
Normal file
15
ManagedCapturer/AVCaptureConnection+InputDevice.h
Normal file
@ -0,0 +1,15 @@
|
||||
//
|
||||
// AVCaptureConnection+InputDevice.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by William Morriss on 1/20/15
|
||||
// Copyright (c) 2015 Snapchat, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#import <AVFoundation/AVFoundation.h>
|
||||
|
||||
@interface AVCaptureConnection (InputDevice)
|
||||
|
||||
- (AVCaptureDevice *)inputDevice;
|
||||
|
||||
@end
|
25
ManagedCapturer/AVCaptureConnection+InputDevice.m
Normal file
25
ManagedCapturer/AVCaptureConnection+InputDevice.m
Normal file
@ -0,0 +1,25 @@
|
||||
//
|
||||
// AVCaptureConnection+InputDevice.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by William Morriss on 1/20/15
|
||||
// Copyright (c) 2015 Snapchat, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#import "AVCaptureConnection+InputDevice.h"
|
||||
|
||||
#import <SCFoundation/SCAssertWrapper.h>
|
||||
|
||||
@implementation AVCaptureConnection (InputDevice)
|
||||
|
||||
- (AVCaptureDevice *)inputDevice
|
||||
{
|
||||
NSArray *inputPorts = self.inputPorts;
|
||||
AVCaptureInputPort *port = [inputPorts firstObject];
|
||||
SCAssert([port.input isKindOfClass:[AVCaptureDeviceInput class]], @"unexpected port");
|
||||
AVCaptureDeviceInput *deviceInput = (AVCaptureDeviceInput *)port.input;
|
||||
AVCaptureDevice *device = deviceInput.device;
|
||||
return device;
|
||||
}
|
||||
|
||||
@end
|
34
ManagedCapturer/AVCaptureDevice+ConfigurationLock.h
Normal file
34
ManagedCapturer/AVCaptureDevice+ConfigurationLock.h
Normal file
@ -0,0 +1,34 @@
|
||||
//
|
||||
// AVCaptureDevice+ConfigurationLock.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Derek Peirce on 4/19/17.
|
||||
// Copyright © 2017 Snapchat, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#import <AVFoundation/AVFoundation.h>
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
@interface AVCaptureDevice (ConfigurationLock)
|
||||
|
||||
/*
|
||||
The following method will lock this AVCaptureDevice, run the task, then unlock the device.
|
||||
The task is usually related to set AVCaptureDevice.
|
||||
It will return a boolean telling you whether or not your task ran successfully. You can use the boolean to adjust your
|
||||
strategy to handle this failure. For some cases, we don't have a good mechanism to handle the failure. E.g. if we want
|
||||
to re-focus, but failed to do so. What is next step? Pop up a alert view to user? If yes, it is intrusive, if not, user
|
||||
will get confused. Just because the error handling is difficulty, we would like to notify you if the task fails.
|
||||
If the task does not run successfully. We will log an event using SCLogger for better visibility.
|
||||
*/
|
||||
- (BOOL)runTask:(NSString *)taskName withLockedConfiguration:(void (^)(void))task;
|
||||
|
||||
/*
|
||||
The following method has the same function as the above one.
|
||||
The difference is that it retries the operation for certain times. Please give a number below or equal 2.
|
||||
When retry equals 0, we will only try to lock for once.
|
||||
When retry equals 1, we will retry once if the 1st try fails.
|
||||
....
|
||||
*/
|
||||
- (BOOL)runTask:(NSString *)taskName withLockedConfiguration:(void (^)(void))task retry:(NSUInteger)retryTimes;
|
||||
|
||||
@end
|
47
ManagedCapturer/AVCaptureDevice+ConfigurationLock.m
Normal file
47
ManagedCapturer/AVCaptureDevice+ConfigurationLock.m
Normal file
@ -0,0 +1,47 @@
|
||||
//
|
||||
// AVCaptureDevice+ConfigurationLock.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Derek Peirce on 4/19/17.
|
||||
// Copyright © 2017 Snapchat, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#import "AVCaptureDevice+ConfigurationLock.h"
|
||||
|
||||
#import "SCLogger+Camera.h"
|
||||
|
||||
#import <SCFoundation/SCAssertWrapper.h>
|
||||
#import <SCFoundation/SCLog.h>
|
||||
#import <SCLogger/SCLogger.h>
|
||||
|
||||
@implementation AVCaptureDevice (ConfigurationLock)
|
||||
|
||||
- (BOOL)runTask:(NSString *)taskName withLockedConfiguration:(void (^)(void))task
|
||||
{
|
||||
return [self runTask:taskName withLockedConfiguration:task retry:0];
|
||||
}
|
||||
|
||||
- (BOOL)runTask:(NSString *)taskName withLockedConfiguration:(void (^)(void))task retry:(NSUInteger)retryTimes
|
||||
{
|
||||
SCAssert(taskName, @"camera logger taskString should not be empty");
|
||||
SCAssert(retryTimes <= 2 && retryTimes >= 0, @"retry times should be equal to or below 2.");
|
||||
NSError *error = nil;
|
||||
BOOL deviceLockSuccess = NO;
|
||||
NSUInteger retryCounter = 0;
|
||||
while (retryCounter <= retryTimes && !deviceLockSuccess) {
|
||||
deviceLockSuccess = [self lockForConfiguration:&error];
|
||||
retryCounter++;
|
||||
}
|
||||
if (deviceLockSuccess) {
|
||||
task();
|
||||
[self unlockForConfiguration];
|
||||
SCLogCoreCameraInfo(@"AVCapture Device setting success, task:%@ tryCount:%zu", taskName,
|
||||
(unsigned long)retryCounter);
|
||||
} else {
|
||||
SCLogCoreCameraError(@"AVCapture Device Encountered error when %@ %@", taskName, error);
|
||||
[[SCLogger sharedInstance] logManagedCapturerSettingFailure:taskName error:error];
|
||||
}
|
||||
return deviceLockSuccess;
|
||||
}
|
||||
|
||||
@end
|
@ -0,0 +1,113 @@
|
||||
//
|
||||
// SCCaptureConfiguration.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Lin Jia on 10/3/17.
|
||||
//
|
||||
//
|
||||
|
||||
#import "SCCaptureConfigurationAnnouncer.h"
|
||||
#import "SCManagedCaptureDevice.h"
|
||||
#import "SCManagedCapturerState.h"
|
||||
#import "SCVideoCaptureSessionInfo.h"
|
||||
|
||||
#import <SCFoundation/SCQueuePerformer.h>
|
||||
|
||||
#import <Looksery/LSAGLView.h>
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
/*
|
||||
SCCaptureConfiguration is the configuration class which is going to be used for customer to configure camera. This is
|
||||
how to use it:
|
||||
|
||||
SCCaptureConfiguration *configuration = [SCCaptureConfiguration new];
|
||||
|
||||
// Conduct the setting here.
|
||||
e.g:
|
||||
configuration.torchActive = YES;
|
||||
|
||||
// Commit your configuration
|
||||
[captureConfigurator commitConfiguration:configuration
|
||||
completionHandler:handler]
|
||||
|
||||
Here are several interesting facts about SCCaptureConfiguration:
|
||||
1) Though SCCaptureConfiguration has so many parameters, you don't need to care the parameters which you do not intend
|
||||
to set. For example, if you only want to set night mode active, here is the code:
|
||||
|
||||
SCCaptureConfiguration *configuration = [SCCaptureConfiguration new];
|
||||
|
||||
configuration.isNightModeActive = YES;
|
||||
|
||||
[captureConfigurator commitConfiguration:configuration
|
||||
completionHandler:handler]
|
||||
|
||||
That is it.
|
||||
|
||||
2) you can set multiple configuration settings, then commit, before you commit, nothing will happen, e.g.:
|
||||
|
||||
SCCaptureConfiguration *configuration = [SCCaptureConfiguration new];
|
||||
|
||||
configuration.isNightModeActive = YES;
|
||||
configuration.zoomFactor = 5;
|
||||
configuration.lensesActive = YES;
|
||||
|
||||
[captureConfigurator commitConfiguration:configuration
|
||||
completionHandler:handler]
|
||||
|
||||
3) commit a configuration means the configuration is gone. If you set parameters on configuration after it is commited,
|
||||
it will crash on debug build, and on other builds such as production, the setting will be ignored, e.g.:
|
||||
|
||||
SCCaptureConfiguration *configuration = [SCCaptureConfiguration new];
|
||||
|
||||
configuration.isNightModeActive = YES;
|
||||
|
||||
[captureConfigurator commitConfiguration:configuration
|
||||
completionHandler:handler]
|
||||
|
||||
// The line below will crash on debug, and ignored on other builds.
|
||||
configuration.zoomFactor = 5;
|
||||
|
||||
4) commiting a configuration is an atomic action. That means all changes customers want to have on camera will happen
|
||||
in a group. If 2 customers commit at the same time, we will handle them one by one.
|
||||
|
||||
5) We are still figuring out what parameters should be in this configuration, parameters could be added or deleted
|
||||
later. In the end, the configuration is going to be the only way customers confige the camera.
|
||||
|
||||
*/
|
||||
|
||||
@interface SCCaptureConfiguration : NSObject
|
||||
|
||||
@property (nonatomic, assign) BOOL isRunning;
|
||||
|
||||
@property (nonatomic, assign) BOOL isNightModeActive;
|
||||
|
||||
@property (nonatomic, assign) BOOL lowLightCondition;
|
||||
|
||||
@property (nonatomic, assign) BOOL adjustingExposure;
|
||||
|
||||
@property (nonatomic, assign) SCManagedCaptureDevicePosition devicePosition;
|
||||
|
||||
@property (nonatomic, assign) CGFloat zoomFactor;
|
||||
|
||||
@property (nonatomic, assign) BOOL flashSupported;
|
||||
|
||||
@property (nonatomic, assign) BOOL torchSupported;
|
||||
|
||||
@property (nonatomic, assign) BOOL flashActive;
|
||||
|
||||
@property (nonatomic, assign) BOOL torchActive;
|
||||
|
||||
@property (nonatomic, assign) BOOL lensesActive;
|
||||
|
||||
@property (nonatomic, assign) BOOL arSessionActive;
|
||||
|
||||
@property (nonatomic, assign) BOOL liveVideoStreaming;
|
||||
|
||||
@property (nonatomic, strong) AVCaptureVideoPreviewLayer *videoPreviewLayer;
|
||||
|
||||
@property (nonatomic, strong) LSAGLView *videoPreviewGLView;
|
||||
|
||||
@property (nonatomic, assign) SCVideoCaptureSessionInfo captureSessionInfo;
|
||||
|
||||
@end
|
@ -0,0 +1,75 @@
|
||||
//
|
||||
// SCCaptureConfiguration.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Lin Jia on 10/3/17.
|
||||
//
|
||||
//
|
||||
|
||||
#import "SCCaptureConfiguration.h"
|
||||
#import "SCCaptureConfiguration_Private.h"
|
||||
|
||||
#import <SCFoundation/SCAppEnvironment.h>
|
||||
#import <SCFoundation/SCAssertWrapper.h>
|
||||
|
||||
@interface SCCaptureConfiguration () {
|
||||
BOOL _sealed;
|
||||
NSMutableSet<SCCaptureConfigurationDirtyKey *> *_dirtyKeys;
|
||||
}
|
||||
@end
|
||||
|
||||
@implementation SCCaptureConfiguration
|
||||
|
||||
- (instancetype)init
|
||||
{
|
||||
self = [super init];
|
||||
if (self) {
|
||||
_dirtyKeys = [[NSMutableSet<SCCaptureConfigurationDirtyKey *> alloc] init];
|
||||
_sealed = NO;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)setIsRunning:(BOOL)running
|
||||
{
|
||||
if ([self _configurationSealed]) {
|
||||
return;
|
||||
}
|
||||
_isRunning = running;
|
||||
[_dirtyKeys addObject:@(SCCaptureConfigurationKeyIsRunning)];
|
||||
}
|
||||
|
||||
/*
|
||||
All set methods will be added later. They follow the format of setIsRunning.
|
||||
*/
|
||||
|
||||
@end
|
||||
|
||||
@implementation SCCaptureConfiguration (privateMethods)
|
||||
|
||||
- (NSArray *)dirtyKeys
|
||||
{
|
||||
if (!_sealed && SCIsDebugBuild()) {
|
||||
SCAssert(NO, @"Configuration not sealed yet, setting is still happening!");
|
||||
}
|
||||
return [_dirtyKeys allObjects];
|
||||
}
|
||||
|
||||
- (void)seal
|
||||
{
|
||||
_sealed = YES;
|
||||
}
|
||||
|
||||
- (BOOL)_configurationSealed
|
||||
{
|
||||
if (_sealed) {
|
||||
if (SCIsDebugBuild()) {
|
||||
SCAssert(NO, @"Try to set property after commit configuration to configurator");
|
||||
}
|
||||
return YES;
|
||||
} else {
|
||||
return NO;
|
||||
}
|
||||
}
|
||||
|
||||
@end
|
@ -0,0 +1,27 @@
|
||||
//
|
||||
// SCCaptureConfigurationAnnouncer.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Lin Jia on 10/2/17.
|
||||
//
|
||||
//
|
||||
|
||||
#import "SCCaptureConfigurationListener.h"
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
/*
|
||||
All APIs are thread safe. Announcer will not retain your object. So even if customer forgets to call remove listener,
|
||||
it will not create zombie objects.
|
||||
*/
|
||||
@interface SCCaptureConfigurationAnnouncer : NSObject
|
||||
|
||||
/*
|
||||
When customer adds an object to be a listener, that object will receive an update of current truth. That is the chance
|
||||
for the object to do adjustment according to the current configuration of the camera.
|
||||
*/
|
||||
- (void)addListener:(id<SCCaptureConfigurationListener>)listener;
|
||||
|
||||
- (void)removeListener:(id<SCCaptureConfigurationListener>)listener;
|
||||
|
||||
@end
|
@ -0,0 +1,67 @@
|
||||
//
|
||||
// SCCaptureConfigurationAnnouncer.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Lin Jia on 10/2/17.
|
||||
//
|
||||
//
|
||||
|
||||
#import "SCCaptureConfigurationAnnouncer.h"
|
||||
#import "SCCaptureConfigurationAnnouncer_Private.h"
|
||||
|
||||
#import "SCCaptureConfigurator.h"
|
||||
|
||||
#import <SCFoundation/SCAssertWrapper.h>
|
||||
#import <SCFoundation/SCPerforming.h>
|
||||
|
||||
@interface SCCaptureConfigurationAnnouncer () {
|
||||
NSHashTable<id<SCCaptureConfigurationListener>> *_listeners;
|
||||
SCQueuePerformer *_performer;
|
||||
__weak SCCaptureConfigurator *_configurator;
|
||||
}
|
||||
@end
|
||||
|
||||
@implementation SCCaptureConfigurationAnnouncer
|
||||
|
||||
- (instancetype)initWithPerformer:(SCQueuePerformer *)performer configurator:(SCCaptureConfigurator *)configurator
|
||||
{
|
||||
self = [super init];
|
||||
if (self) {
|
||||
_listeners = [NSHashTable<id<SCCaptureConfigurationListener>> hashTableWithOptions:NSHashTableWeakMemory];
|
||||
SCAssert(performer, @"performer should not be nil");
|
||||
_performer = performer;
|
||||
_configurator = configurator;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)addListener:(id<SCCaptureConfigurationListener>)listener
|
||||
{
|
||||
[_performer perform:^{
|
||||
SCAssert(listener, @"listener should not be nil");
|
||||
[_listeners addObject:listener];
|
||||
[listener captureConfigurationDidChangeTo:_configurator.currentConfiguration];
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)removeListener:(id<SCCaptureConfigurationListener>)listener
|
||||
{
|
||||
[_performer perform:^{
|
||||
SCAssert(listener, @"listener should not be nil");
|
||||
[_listeners removeObject:listener];
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)deliverConfigurationChange:(id<SCManagedCapturerState>)configuration
|
||||
{
|
||||
SCAssertPerformer(_performer);
|
||||
for (id<SCCaptureConfigurationListener> listener in _listeners) {
|
||||
[listener captureConfigurationDidChangeTo:configuration];
|
||||
}
|
||||
}
|
||||
|
||||
- (void)dealloc
|
||||
{
|
||||
[_listeners removeAllObjects];
|
||||
}
|
||||
@end
|
@ -0,0 +1,33 @@
|
||||
//
|
||||
// SCCaptureConfigurationAnnouncer_Private.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Lin Jia on 10/2/17.
|
||||
//
|
||||
//
|
||||
|
||||
#import "SCCaptureConfigurationAnnouncer.h"
|
||||
#import "SCManagedCapturerState.h"
|
||||
|
||||
#import <SCFoundation/SCQueuePerformer.h>
|
||||
|
||||
@class SCCaptureConfigurator;
|
||||
|
||||
/*
|
||||
This private header is only going to be used by SCCaptureConfigurator. Other customers should only use the public
|
||||
header.
|
||||
*/
|
||||
@interface SCCaptureConfigurationAnnouncer ()
|
||||
/*
|
||||
The announcer is going to be instantiated by SCCaptureConfigurator. It will take in a queue performer. The design is
|
||||
that announcer and configurator is going to share the same serial queue to avoid racing. This is something we could
|
||||
change later.
|
||||
*/
|
||||
- (instancetype)initWithPerformer:(SCQueuePerformer *)performer configurator:(SCCaptureConfigurator *)configurator;
|
||||
|
||||
/*
|
||||
The API below is called by configurator to notify listener that configuration has changed.
|
||||
*/
|
||||
- (void)deliverConfigurationChange:(id<SCManagedCapturerState>)configuration;
|
||||
|
||||
@end
|
@ -0,0 +1,23 @@
|
||||
//
|
||||
// SCCaptureConfigurationListener.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Lin Jia on 10/2/17.
|
||||
//
|
||||
|
||||
#import "SCManagedCapturerState.h"
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
@class SCCaptureConfiguration;
|
||||
|
||||
/*
|
||||
As a listener to configuration of camera core, you will get an update whenever the configuration changes, and you will
|
||||
receive an immutable state object for the current truth.
|
||||
*/
|
||||
|
||||
@protocol SCCaptureConfigurationListener <NSObject>
|
||||
|
||||
- (void)captureConfigurationDidChangeTo:(id<SCManagedCapturerState>)state;
|
||||
|
||||
@end
|
@ -0,0 +1,46 @@
|
||||
//
|
||||
// SCCaptureConfiguration_Private.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Lin Jia on 10/3/17.
|
||||
//
|
||||
//
|
||||
|
||||
#import "SCCaptureConfiguration_Private.h"
|
||||
|
||||
typedef NSNumber SCCaptureConfigurationDirtyKey;
|
||||
|
||||
/*
|
||||
The key values to identify dirty keys in SCCaptureConfiguration.
|
||||
Dirty key is defined as the key customer changes.
|
||||
|
||||
e.g. if customer toggle device position. Dirty keys will have SCCaptureConfigurationKeyDevicePosition.
|
||||
|
||||
It is not complete, and it is only a draft now. It
|
||||
will be gradually tuned while we work on the APIs.
|
||||
*/
|
||||
|
||||
typedef NS_ENUM(NSUInteger, SCCaptureConfigurationKey) {
|
||||
SCCaptureConfigurationKeyIsRunning,
|
||||
SCCaptureConfigurationKeyIsNightModeActive,
|
||||
SCCaptureConfigurationKeyLowLightCondition,
|
||||
SCCaptureConfigurationKeyDevicePosition,
|
||||
SCCaptureConfigurationKeyZoomFactor,
|
||||
SCCaptureConfigurationKeyFlashActive,
|
||||
SCCaptureConfigurationKeyTorchActive,
|
||||
SCCaptureConfigurationKeyARSessionActive,
|
||||
SCCaptureConfigurationKeyLensesActive,
|
||||
SCCaptureConfigurationKeyVideoRecording,
|
||||
};
|
||||
|
||||
@interface SCCaptureConfiguration (internalMethods)
|
||||
|
||||
// Return dirtyKeys, which identify the parameters customer want to set.
|
||||
- (NSArray *)dirtyKeys;
|
||||
|
||||
// Called by SCCaptureConfigurator to seal a configuration, so future changes are ignored.
|
||||
- (void)seal;
|
||||
|
||||
- (BOOL)_configurationSealed;
|
||||
|
||||
@end
|
@ -0,0 +1,59 @@
|
||||
//
|
||||
// SCCaptureConfigurator.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Lin Jia on 10/2/17.
|
||||
//
|
||||
//
|
||||
|
||||
#import "SCCaptureConfiguration.h"
|
||||
#import "SCCaptureConfigurationAnnouncer.h"
|
||||
#import "SCManagedCaptureDevice.h"
|
||||
#import "SCVideoCaptureSessionInfo.h"
|
||||
|
||||
#import <SCFoundation/SCQueuePerformer.h>
|
||||
|
||||
#import <Looksery/LSAGLView.h>
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
/*
|
||||
SCCaptureConfigurator is the class you use to config the setting of the camera hardware. Such as setting the camera to
|
||||
be front or back, setting camera hardware to be certain resolution, or to activate night mode.
|
||||
|
||||
You can use this class for many things:
|
||||
|
||||
a) do 1 time poking to checkout the current camera configuration via the currentConfiguration.
|
||||
|
||||
Note that we represent configuration via id<SCManagedCapturerState>. It is going to be an immutable object.
|
||||
|
||||
b) register to be the listener of the configuration change via the announcer.
|
||||
Every time a camera configuration change, you will receive an update.
|
||||
|
||||
c) set the configuration via commitConfiguration API. You convey your setting intention via SCCaptureConfiguration.
|
||||
|
||||
You can register a completionHandler to be called after your configuration gets done.
|
||||
|
||||
Inside the completionHandler, we will pass you an error if it happens, and there will be a boolean cameraChanged. If
|
||||
your configuration already equals the current configuration of the camera, we will not change the camera, the boolean
|
||||
will be true.
|
||||
|
||||
d) All APIs are thread safe.
|
||||
*/
|
||||
|
||||
typedef void (^SCCaptureConfigurationCompletionHandler)(NSError *error, BOOL cameraChanged);
|
||||
|
||||
@interface SCCaptureConfigurator : NSObject
|
||||
|
||||
@property (nonatomic, strong, readonly) SCCaptureConfigurationAnnouncer *announcer;
|
||||
|
||||
@property (nonatomic, strong, readonly) id<SCManagedCapturerState> currentConfiguration;
|
||||
|
||||
- (instancetype)init NS_UNAVAILABLE;
|
||||
|
||||
- (instancetype)initWithPerformer:(SCQueuePerformer *)performer;
|
||||
|
||||
- (void)commitConfiguration:(SCCaptureConfiguration *)configuration
|
||||
completionHandler:(SCCaptureConfigurationCompletionHandler)completionHandler;
|
||||
|
||||
@end
|
@ -0,0 +1,56 @@
|
||||
//
|
||||
// SCCaptureConfiguration.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Lin Jia on 10/2/17.
|
||||
//
|
||||
//
|
||||
|
||||
#import "SCCaptureConfigurator.h"
|
||||
|
||||
#import "SCCaptureConfigurationAnnouncer_Private.h"
|
||||
#import "SCCaptureConfiguration_Private.h"
|
||||
|
||||
#import <SCFoundation/SCAssertWrapper.h>
|
||||
|
||||
@interface SCCaptureConfigurator () {
|
||||
SCQueuePerformer *_performer;
|
||||
}
|
||||
@end
|
||||
|
||||
@implementation SCCaptureConfigurator
|
||||
|
||||
- (instancetype)initWithPerformer:(SCQueuePerformer *)performer
|
||||
{
|
||||
self = [super init];
|
||||
if (self) {
|
||||
_announcer = [[SCCaptureConfigurationAnnouncer alloc] initWithPerformer:performer configurator:self];
|
||||
_performer = performer;
|
||||
// TODO: initialize _currentConfiguration
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)commitConfiguration:(SCCaptureConfiguration *)configuration
|
||||
completionHandler:(SCCaptureConfigurationCompletionHandler)completionHandler
|
||||
{
|
||||
[configuration seal];
|
||||
[_performer perform:^() {
|
||||
SCAssert(configuration, @"Configuration must be a valid input parameter");
|
||||
NSArray<SCCaptureConfigurationDirtyKey *> *dirtyKeys = [configuration dirtyKeys];
|
||||
for (SCCaptureConfigurationDirtyKey *key in dirtyKeys) {
|
||||
[self _processKey:[key integerValue] configuration:configuration];
|
||||
}
|
||||
if (completionHandler) {
|
||||
// TODO: passing in right parameters.
|
||||
completionHandler(NULL, YES);
|
||||
}
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)_processKey:(SCCaptureConfigurationKey)key configuration:(SCCaptureConfiguration *)configuration
|
||||
{
|
||||
// Tune the hardware depending on what key is dirty, and what is the value is inside configuration.
|
||||
}
|
||||
|
||||
@end
|
42
ManagedCapturer/CapturerV2/Core/SCCaptureCore.h
Normal file
42
ManagedCapturer/CapturerV2/Core/SCCaptureCore.h
Normal file
@ -0,0 +1,42 @@
|
||||
//
|
||||
// SCCaptureCore.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Lin Jia on 10/2/17.
|
||||
//
|
||||
//
|
||||
|
||||
#import "SCCaptureStateMachineContext.h"
|
||||
#import "SCCapturer.h"
|
||||
|
||||
#import <SCFoundation/SCPerforming.h>
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
@class SCCaptureConfigurator;
|
||||
|
||||
/*
|
||||
SCCaptureCore abstracts away the hardware aspect of a camera. SCCaptureCore is the V2 version of the
|
||||
SCManagedCapturerV1.
|
||||
|
||||
SCCaptureCore itself does very little things actually. Its main job is to expose APIs of camera hardware to outside
|
||||
customers. The actual heavy lifting is done via delegating the jobs to multiple worker classes.
|
||||
|
||||
We generally categorize the operation of camera hardware into 2 categories:
|
||||
|
||||
1) make camera hardware do state transition. Such as what is shown in this graph:
|
||||
https://docs.google.com/presentation/d/1KWk-XSgO0wFAjBZXsl_OnHBGpi_pd9-ds6Wje8vX-0s/edit#slide=id.g2017e46295_1_10
|
||||
|
||||
2) config camera hardware setting, such as setting the camera to be front or back, such as setting camera hardware to
|
||||
be certain resolution, or to activate night mode.
|
||||
|
||||
Indeed, we create 2 working classes to do the heavy lifting. Both of them are under construction. Feel free to checkout
|
||||
SCCaptureConfigurator, which is responsible for 2).
|
||||
|
||||
*/
|
||||
|
||||
@interface SCCaptureCore : NSObject <SCCapturer>
|
||||
|
||||
@property (nonatomic, strong, readonly) SCCaptureStateMachineContext *stateMachine;
|
||||
|
||||
@end
|
475
ManagedCapturer/CapturerV2/Core/SCCaptureCore.m
Normal file
475
ManagedCapturer/CapturerV2/Core/SCCaptureCore.m
Normal file
@ -0,0 +1,475 @@
|
||||
//
|
||||
// SCCaptureCore.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Lin Jia on 10/2/17.
|
||||
//
|
||||
//
|
||||
|
||||
#import "SCCaptureCore.h"
|
||||
|
||||
#import "SCCaptureDeviceAuthorizationChecker.h"
|
||||
#import "SCCaptureResource.h"
|
||||
#import "SCCaptureWorker.h"
|
||||
#import "SCManagedCapturePreviewLayerController.h"
|
||||
#import "SCManagedCapturerGLViewManagerAPI.h"
|
||||
#import "SCManagedCapturerLSAComponentTrackerAPI.h"
|
||||
#import "SCManagedCapturerV1_Private.h"
|
||||
|
||||
#import <SCAudio/SCAudioConfiguration.h>
|
||||
#import <SCFoundation/SCAssertWrapper.h>
|
||||
|
||||
static const char *kSCCaptureDeviceAuthorizationManagerQueueLabel =
|
||||
"com.snapchat.capture_device_authorization_checker_queue";
|
||||
|
||||
@implementation SCCaptureCore {
|
||||
SCManagedCapturerV1 *_managedCapturerV1;
|
||||
SCQueuePerformer *_queuePerformer;
|
||||
SCCaptureDeviceAuthorizationChecker *_authorizationChecker;
|
||||
}
|
||||
@synthesize blackCameraDetector = _blackCameraDetector;
|
||||
|
||||
- (instancetype)init
|
||||
{
|
||||
SCTraceStart();
|
||||
SCAssertMainThread();
|
||||
self = [super init];
|
||||
if (self) {
|
||||
_managedCapturerV1 = [SCManagedCapturerV1 sharedInstance];
|
||||
SCCaptureResource *resource = _managedCapturerV1.captureResource;
|
||||
_queuePerformer = resource.queuePerformer;
|
||||
_stateMachine = [[SCCaptureStateMachineContext alloc] initWithResource:resource];
|
||||
SCQueuePerformer *authorizationCheckPerformer =
|
||||
[[SCQueuePerformer alloc] initWithLabel:kSCCaptureDeviceAuthorizationManagerQueueLabel
|
||||
qualityOfService:QOS_CLASS_USER_INTERACTIVE
|
||||
queueType:DISPATCH_QUEUE_SERIAL
|
||||
context:SCQueuePerformerContextCamera];
|
||||
_authorizationChecker =
|
||||
[[SCCaptureDeviceAuthorizationChecker alloc] initWithPerformer:authorizationCheckPerformer];
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (id<SCManagedCapturerLensAPI>)lensProcessingCore
|
||||
{
|
||||
return _managedCapturerV1.lensProcessingCore;
|
||||
}
|
||||
|
||||
// For APIs inside protocol SCCapture, if they are related to capture state machine, we delegate to state machine.
|
||||
- (void)setupWithDevicePositionAsynchronously:(SCManagedCaptureDevicePosition)devicePosition
|
||||
completionHandler:(dispatch_block_t)completionHandler
|
||||
context:(NSString *)context
|
||||
{
|
||||
[_stateMachine initializeCaptureWithDevicePositionAsynchronously:devicePosition
|
||||
completionHandler:completionHandler
|
||||
context:context];
|
||||
}
|
||||
|
||||
- (SCCapturerToken *)startRunningAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler
|
||||
context:(NSString *)context
|
||||
{
|
||||
return [_stateMachine startRunningWithContext:context completionHandler:completionHandler];
|
||||
}
|
||||
|
||||
#pragma mark - Recording / Capture
|
||||
|
||||
- (void)captureStillImageAsynchronouslyWithAspectRatio:(CGFloat)aspectRatio
|
||||
captureSessionID:(NSString *)captureSessionID
|
||||
completionHandler:
|
||||
(sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler
|
||||
context:(NSString *)context
|
||||
{
|
||||
[_stateMachine captureStillImageAsynchronouslyWithAspectRatio:aspectRatio
|
||||
captureSessionID:captureSessionID
|
||||
completionHandler:completionHandler
|
||||
context:context];
|
||||
}
|
||||
|
||||
- (void)stopRunningAsynchronously:(SCCapturerToken *)token
|
||||
completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler
|
||||
context:(NSString *)context
|
||||
{
|
||||
[_stateMachine stopRunningWithCapturerToken:token completionHandler:completionHandler context:context];
|
||||
}
|
||||
|
||||
- (void)stopRunningAsynchronously:(SCCapturerToken *)token
|
||||
completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler
|
||||
after:(NSTimeInterval)delay
|
||||
context:(NSString *)context
|
||||
{
|
||||
[_stateMachine stopRunningWithCapturerToken:token after:delay completionHandler:completionHandler context:context];
|
||||
}
|
||||
|
||||
#pragma mark - Scanning
|
||||
|
||||
- (void)startScanAsynchronouslyWithScanConfiguration:(SCScanConfiguration *)configuration context:(NSString *)context
|
||||
{
|
||||
[_stateMachine startScanAsynchronouslyWithScanConfiguration:configuration context:context];
|
||||
}
|
||||
|
||||
- (void)stopScanAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler context:(NSString *)context
|
||||
{
|
||||
[_stateMachine stopScanAsynchronouslyWithCompletionHandler:completionHandler context:context];
|
||||
}
|
||||
|
||||
- (void)prepareForRecordingAsynchronouslyWithContext:(NSString *)context
|
||||
audioConfiguration:(SCAudioConfiguration *)configuration
|
||||
{
|
||||
[_stateMachine prepareForRecordingAsynchronouslyWithAudioConfiguration:configuration context:context];
|
||||
}
|
||||
|
||||
- (void)startRecordingAsynchronouslyWithOutputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings
|
||||
audioConfiguration:(SCAudioConfiguration *)configuration
|
||||
maxDuration:(NSTimeInterval)maxDuration
|
||||
fileURL:(NSURL *)fileURL
|
||||
captureSessionID:(NSString *)captureSessionID
|
||||
completionHandler:
|
||||
(sc_managed_capturer_start_recording_completion_handler_t)completionHandler
|
||||
context:(NSString *)context
|
||||
{
|
||||
[_stateMachine startRecordingWithOutputSettings:outputSettings
|
||||
audioConfiguration:configuration
|
||||
maxDuration:maxDuration
|
||||
fileURL:fileURL
|
||||
captureSessionID:captureSessionID
|
||||
completionHandler:completionHandler
|
||||
context:context];
|
||||
}
|
||||
|
||||
- (void)stopRecordingAsynchronouslyWithContext:(NSString *)context
|
||||
{
|
||||
[_stateMachine stopRecordingWithContext:context];
|
||||
}
|
||||
|
||||
- (void)cancelRecordingAsynchronouslyWithContext:(NSString *)context
|
||||
{
|
||||
[_stateMachine cancelRecordingWithContext:context];
|
||||
[[self snapCreationTriggers] markSnapCreationEndWithContext:context];
|
||||
}
|
||||
|
||||
#pragma mark -
|
||||
|
||||
- (void)startStreamingAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler
|
||||
context:(NSString *)context
|
||||
{
|
||||
[_managedCapturerV1 startStreamingAsynchronouslyWithCompletionHandler:completionHandler context:context];
|
||||
}
|
||||
- (void)addSampleBufferDisplayController:(id<SCManagedSampleBufferDisplayController>)sampleBufferDisplayController
|
||||
context:(NSString *)context
|
||||
{
|
||||
[_managedCapturerV1 addSampleBufferDisplayController:sampleBufferDisplayController context:context];
|
||||
}
|
||||
|
||||
#pragma mark - Utilities
|
||||
|
||||
- (void)convertViewCoordinates:(CGPoint)viewCoordinates
|
||||
completionHandler:(sc_managed_capturer_convert_view_coordniates_completion_handler_t)completionHandler
|
||||
context:(NSString *)context
|
||||
{
|
||||
[_managedCapturerV1 convertViewCoordinates:viewCoordinates completionHandler:completionHandler context:context];
|
||||
}
|
||||
|
||||
- (void)detectLensCategoryOnNextFrame:(CGPoint)point
|
||||
lenses:(NSArray<SCLens *> *)lenses
|
||||
completion:(sc_managed_lenses_processor_category_point_completion_handler_t)completion
|
||||
context:(NSString *)context
|
||||
{
|
||||
[_managedCapturerV1 detectLensCategoryOnNextFrame:point lenses:lenses completion:completion context:context];
|
||||
}
|
||||
|
||||
#pragma mark - Configurations
|
||||
|
||||
- (void)setDevicePositionAsynchronously:(SCManagedCaptureDevicePosition)devicePosition
|
||||
completionHandler:(dispatch_block_t)completionHandler
|
||||
context:(NSString *)context
|
||||
{
|
||||
[_managedCapturerV1 setDevicePositionAsynchronously:devicePosition
|
||||
completionHandler:completionHandler
|
||||
context:context];
|
||||
}
|
||||
|
||||
- (void)setFlashActive:(BOOL)flashActive
|
||||
completionHandler:(dispatch_block_t)completionHandler
|
||||
context:(NSString *)context
|
||||
{
|
||||
[_managedCapturerV1 setFlashActive:flashActive completionHandler:completionHandler context:context];
|
||||
}
|
||||
|
||||
- (void)setLensesActive:(BOOL)lensesActive
|
||||
completionHandler:(dispatch_block_t)completionHandler
|
||||
context:(NSString *)context
|
||||
{
|
||||
[_managedCapturerV1 setLensesActive:lensesActive completionHandler:completionHandler context:context];
|
||||
}
|
||||
|
||||
- (void)setLensesActive:(BOOL)lensesActive
|
||||
filterFactory:(SCLookseryFilterFactory *)filterFactory
|
||||
completionHandler:(dispatch_block_t)completionHandler
|
||||
context:(NSString *)context
|
||||
{
|
||||
[_managedCapturerV1 setLensesActive:lensesActive
|
||||
filterFactory:filterFactory
|
||||
completionHandler:completionHandler
|
||||
context:context];
|
||||
}
|
||||
|
||||
- (void)setLensesInTalkActive:(BOOL)lensesActive
|
||||
completionHandler:(dispatch_block_t)completionHandler
|
||||
context:(NSString *)context
|
||||
{
|
||||
[_managedCapturerV1 setLensesInTalkActive:lensesActive completionHandler:completionHandler context:context];
|
||||
}
|
||||
|
||||
- (void)setTorchActiveAsynchronously:(BOOL)torchActive
|
||||
completionHandler:(dispatch_block_t)completionHandler
|
||||
context:(NSString *)context
|
||||
{
|
||||
[_managedCapturerV1 setTorchActiveAsynchronously:torchActive completionHandler:completionHandler context:context];
|
||||
}
|
||||
|
||||
- (void)setNightModeActiveAsynchronously:(BOOL)active
|
||||
completionHandler:(dispatch_block_t)completionHandler
|
||||
context:(NSString *)context
|
||||
{
|
||||
[_managedCapturerV1 setNightModeActiveAsynchronously:active completionHandler:completionHandler context:context];
|
||||
}
|
||||
|
||||
- (void)lockZoomWithContext:(NSString *)context
|
||||
{
|
||||
[_managedCapturerV1 lockZoomWithContext:context];
|
||||
}
|
||||
|
||||
- (void)unlockZoomWithContext:(NSString *)context
|
||||
{
|
||||
[_managedCapturerV1 unlockZoomWithContext:context];
|
||||
}
|
||||
|
||||
- (void)setZoomFactorAsynchronously:(CGFloat)zoomFactor context:(NSString *)context
|
||||
{
|
||||
[_managedCapturerV1 setZoomFactorAsynchronously:zoomFactor context:context];
|
||||
}
|
||||
|
||||
- (void)resetZoomFactorAsynchronously:(CGFloat)zoomFactor
|
||||
devicePosition:(SCManagedCaptureDevicePosition)devicePosition
|
||||
context:(NSString *)context
|
||||
{
|
||||
[_managedCapturerV1 resetZoomFactorAsynchronously:zoomFactor devicePosition:devicePosition context:context];
|
||||
}
|
||||
|
||||
- (void)setExposurePointOfInterestAsynchronously:(CGPoint)pointOfInterest
|
||||
fromUser:(BOOL)fromUser
|
||||
completionHandler:(dispatch_block_t)completionHandler
|
||||
context:(NSString *)context
|
||||
{
|
||||
[_managedCapturerV1 setExposurePointOfInterestAsynchronously:pointOfInterest
|
||||
fromUser:fromUser
|
||||
completionHandler:completionHandler
|
||||
context:context];
|
||||
}
|
||||
|
||||
- (void)setAutofocusPointOfInterestAsynchronously:(CGPoint)pointOfInterest
|
||||
completionHandler:(dispatch_block_t)completionHandler
|
||||
context:(NSString *)context
|
||||
{
|
||||
[_managedCapturerV1 setAutofocusPointOfInterestAsynchronously:pointOfInterest
|
||||
completionHandler:completionHandler
|
||||
context:context];
|
||||
}
|
||||
|
||||
- (void)setPortraitModePointOfInterestAsynchronously:(CGPoint)pointOfInterest
|
||||
completionHandler:(dispatch_block_t)completionHandler
|
||||
context:(NSString *)context
|
||||
{
|
||||
[_managedCapturerV1 setPortraitModePointOfInterestAsynchronously:pointOfInterest
|
||||
completionHandler:completionHandler
|
||||
context:context];
|
||||
}
|
||||
|
||||
- (void)continuousAutofocusAndExposureAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler
|
||||
context:(NSString *)context
|
||||
{
|
||||
[_managedCapturerV1 continuousAutofocusAndExposureAsynchronouslyWithCompletionHandler:completionHandler
|
||||
context:context];
|
||||
}
|
||||
|
||||
// I need to call these three methods from SCAppDelegate explicitly so that I get the latest information.
|
||||
- (void)applicationDidEnterBackground
|
||||
{
|
||||
[_managedCapturerV1 applicationDidEnterBackground];
|
||||
}
|
||||
|
||||
- (void)applicationWillEnterForeground
|
||||
{
|
||||
[_managedCapturerV1 applicationWillEnterForeground];
|
||||
}
|
||||
|
||||
- (void)applicationDidBecomeActive
|
||||
{
|
||||
[_managedCapturerV1 applicationDidBecomeActive];
|
||||
}
|
||||
- (void)applicationWillResignActive
|
||||
{
|
||||
[_managedCapturerV1 applicationWillResignActive];
|
||||
}
|
||||
|
||||
- (void)mediaServicesWereReset
|
||||
{
|
||||
[_managedCapturerV1 mediaServicesWereReset];
|
||||
}
|
||||
|
||||
- (void)mediaServicesWereLost
|
||||
{
|
||||
[_managedCapturerV1 mediaServicesWereLost];
|
||||
}
|
||||
|
||||
#pragma mark - Add / Remove Listener
|
||||
|
||||
- (void)addListener:(id<SCManagedCapturerListener>)listener
|
||||
{
|
||||
[_managedCapturerV1 addListener:listener];
|
||||
}
|
||||
|
||||
- (void)removeListener:(id<SCManagedCapturerListener>)listener
|
||||
{
|
||||
[_managedCapturerV1 removeListener:listener];
|
||||
}
|
||||
|
||||
- (void)addVideoDataSourceListener:(id<SCManagedVideoDataSourceListener>)listener
|
||||
{
|
||||
[_managedCapturerV1 addVideoDataSourceListener:listener];
|
||||
}
|
||||
|
||||
- (void)removeVideoDataSourceListener:(id<SCManagedVideoDataSourceListener>)listener
|
||||
{
|
||||
[_managedCapturerV1 removeVideoDataSourceListener:listener];
|
||||
}
|
||||
|
||||
- (void)addDeviceCapacityAnalyzerListener:(id<SCManagedDeviceCapacityAnalyzerListener>)listener
|
||||
{
|
||||
[_managedCapturerV1 addDeviceCapacityAnalyzerListener:listener];
|
||||
}
|
||||
|
||||
- (void)removeDeviceCapacityAnalyzerListener:(id<SCManagedDeviceCapacityAnalyzerListener>)listener
|
||||
{
|
||||
[_managedCapturerV1 removeDeviceCapacityAnalyzerListener:listener];
|
||||
}
|
||||
|
||||
- (NSString *)debugInfo
|
||||
{
|
||||
return [_managedCapturerV1 debugInfo];
|
||||
}
|
||||
|
||||
- (id<SCManagedVideoDataSource>)currentVideoDataSource
|
||||
{
|
||||
return [_managedCapturerV1 currentVideoDataSource];
|
||||
}
|
||||
|
||||
// For APIs inside protocol SCCapture, if they are not related to capture state machine, we directly delegate to V1.
|
||||
- (void)checkRestrictedCamera:(void (^)(BOOL, BOOL, AVAuthorizationStatus))callback
|
||||
{
|
||||
[_managedCapturerV1 checkRestrictedCamera:callback];
|
||||
}
|
||||
|
||||
- (void)recreateAVCaptureSession
|
||||
{
|
||||
[_managedCapturerV1 recreateAVCaptureSession];
|
||||
}
|
||||
|
||||
#pragma mark -
|
||||
- (CMTime)firstWrittenAudioBufferDelay
|
||||
{
|
||||
return [SCCaptureWorker firstWrittenAudioBufferDelay:_managedCapturerV1.captureResource];
|
||||
}
|
||||
|
||||
- (BOOL)audioQueueStarted
|
||||
{
|
||||
return [SCCaptureWorker audioQueueStarted:_managedCapturerV1.captureResource];
|
||||
}
|
||||
|
||||
- (BOOL)isLensApplied
|
||||
{
|
||||
return [SCCaptureWorker isLensApplied:_managedCapturerV1.captureResource];
|
||||
}
|
||||
|
||||
- (BOOL)isVideoMirrored
|
||||
{
|
||||
return [SCCaptureWorker isVideoMirrored:_managedCapturerV1.captureResource];
|
||||
}
|
||||
|
||||
- (SCVideoCaptureSessionInfo)activeSession
|
||||
{
|
||||
return _managedCapturerV1.activeSession;
|
||||
}
|
||||
|
||||
- (void)setBlackCameraDetector:(SCBlackCameraDetector *)blackCameraDetector
|
||||
deviceMotionProvider:(id<SCDeviceMotionProvider>)deviceMotionProvider
|
||||
fileInputDecider:(id<SCFileInputDecider>)fileInputDecider
|
||||
arImageCaptureProvider:(id<SCManagedCapturerARImageCaptureProvider>)arImageCaptureProvider
|
||||
glviewManager:(id<SCManagedCapturerGLViewManagerAPI>)glViewManager
|
||||
lensAPIProvider:(id<SCManagedCapturerLensAPIProvider>)lensAPIProvider
|
||||
lsaComponentTracker:(id<SCManagedCapturerLSAComponentTrackerAPI>)lsaComponentTracker
|
||||
managedCapturerPreviewLayerControllerDelegate:
|
||||
(id<SCManagedCapturePreviewLayerControllerDelegate>)previewLayerControllerDelegate
|
||||
{
|
||||
_managedCapturerV1.captureResource.blackCameraDetector = blackCameraDetector;
|
||||
_managedCapturerV1.captureResource.deviceMotionProvider = deviceMotionProvider;
|
||||
_managedCapturerV1.captureResource.fileInputDecider = fileInputDecider;
|
||||
_managedCapturerV1.captureResource.arImageCaptureProvider = arImageCaptureProvider;
|
||||
_managedCapturerV1.captureResource.videoPreviewGLViewManager = glViewManager;
|
||||
[_managedCapturerV1.captureResource.videoPreviewGLViewManager
|
||||
configureWithCaptureResource:_managedCapturerV1.captureResource];
|
||||
_managedCapturerV1.captureResource.lensAPIProvider = lensAPIProvider;
|
||||
_managedCapturerV1.captureResource.lsaTrackingComponentHandler = lsaComponentTracker;
|
||||
[_managedCapturerV1.captureResource.lsaTrackingComponentHandler
|
||||
configureWithCaptureResource:_managedCapturerV1.captureResource];
|
||||
_managedCapturerV1.captureResource.previewLayerControllerDelegate = previewLayerControllerDelegate;
|
||||
[SCManagedCapturePreviewLayerController sharedInstance].delegate =
|
||||
_managedCapturerV1.captureResource.previewLayerControllerDelegate;
|
||||
}
|
||||
|
||||
- (SCBlackCameraDetector *)blackCameraDetector
|
||||
{
|
||||
return _managedCapturerV1.captureResource.blackCameraDetector;
|
||||
}
|
||||
|
||||
- (void)captureSingleVideoFrameAsynchronouslyWithCompletionHandler:
|
||||
(sc_managed_capturer_capture_video_frame_completion_handler_t)completionHandler
|
||||
context:(NSString *)context
|
||||
{
|
||||
[_managedCapturerV1 captureSingleVideoFrameAsynchronouslyWithCompletionHandler:completionHandler context:context];
|
||||
}
|
||||
|
||||
- (void)sampleFrameWithCompletionHandler:(void (^)(UIImage *frame, CMTime presentationTime))completionHandler
|
||||
context:(NSString *)context
|
||||
{
|
||||
[_managedCapturerV1 sampleFrameWithCompletionHandler:completionHandler context:context];
|
||||
}
|
||||
|
||||
- (void)addTimedTask:(SCTimedTask *)task context:(NSString *)context
|
||||
{
|
||||
[_managedCapturerV1 addTimedTask:task context:context];
|
||||
}
|
||||
|
||||
- (void)clearTimedTasksWithContext:(NSString *)context
|
||||
{
|
||||
[_managedCapturerV1 clearTimedTasksWithContext:context];
|
||||
}
|
||||
|
||||
- (BOOL)authorizedForVideoCapture
|
||||
{
|
||||
return [_authorizationChecker authorizedForVideoCapture];
|
||||
}
|
||||
|
||||
- (void)preloadVideoCaptureAuthorization
|
||||
{
|
||||
[_authorizationChecker preloadVideoCaptureAuthorization];
|
||||
}
|
||||
|
||||
#pragma mark - Snap Creation triggers
|
||||
|
||||
- (SCSnapCreationTriggers *)snapCreationTriggers
|
||||
{
|
||||
return [_managedCapturerV1 snapCreationTriggers];
|
||||
}
|
||||
|
||||
@end
|
47
ManagedCapturer/ImageProcessing/SCDepthBlurMetalModule.metal
Normal file
47
ManagedCapturer/ImageProcessing/SCDepthBlurMetalModule.metal
Normal file
@ -0,0 +1,47 @@
|
||||
//
|
||||
// SCDepthBlurMetalModule.metal
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Brian Ng on 10/31/17.
|
||||
//
|
||||
|
||||
#include <metal_stdlib>
|
||||
using namespace metal;
|
||||
|
||||
struct DepthBlurRenderData {
|
||||
float depthRange;
|
||||
float depthOffset;
|
||||
float depthBlurForegroundThreshold;
|
||||
float depthBlurBackgroundThreshold;
|
||||
};
|
||||
|
||||
kernel void kernel_depth_blur(texture2d<float, access::read> sourceYTexture [[texture(0)]],
|
||||
texture2d<float, access::read> sourceUVTexture [[texture(1)]],
|
||||
texture2d<float, access::read> sourceDepthTexture[[texture(2)]],
|
||||
texture2d<float, access::read> sourceBlurredYTexture [[texture(3)]],
|
||||
texture2d<float, access::write> destinationYTexture [[texture(4)]],
|
||||
texture2d<float, access::write> destinationUVTexture [[texture(5)]],
|
||||
constant DepthBlurRenderData &renderData [[buffer(0)]],
|
||||
uint2 gid [[thread_position_in_grid]],
|
||||
uint2 size [[threads_per_grid]]) {
|
||||
float2 valueUV = sourceUVTexture.read(gid).rg;
|
||||
float depthValue = sourceDepthTexture.read(uint2(gid.x/4, gid.y/4)).r;
|
||||
float normalizedDepthValue = (depthValue - renderData.depthOffset) / renderData.depthRange;
|
||||
float valueYUnblurred = sourceYTexture.read(gid).r;
|
||||
float valueYBlurred = sourceBlurredYTexture.read(gid).r;
|
||||
|
||||
float valueY = 0;
|
||||
if (normalizedDepthValue > renderData.depthBlurForegroundThreshold) {
|
||||
valueY = valueYUnblurred;
|
||||
} else if (normalizedDepthValue < renderData.depthBlurBackgroundThreshold) {
|
||||
valueY = valueYBlurred;
|
||||
} else {
|
||||
float blendRange = renderData.depthBlurForegroundThreshold - renderData.depthBlurBackgroundThreshold;
|
||||
float normalizedBlendDepthValue = (normalizedDepthValue - renderData.depthBlurBackgroundThreshold) / blendRange;
|
||||
valueY = valueYUnblurred * normalizedBlendDepthValue + valueYBlurred * (1 - normalizedBlendDepthValue);
|
||||
}
|
||||
|
||||
destinationYTexture.write(valueY, gid);
|
||||
destinationUVTexture.write(float4(valueUV.r, valueUV.g, 0, 0), gid);
|
||||
}
|
||||
|
@ -0,0 +1,21 @@
|
||||
//
|
||||
// SCDepthBlurMetalRenderCommand.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Brian Ng on 11/8/17.
|
||||
//
|
||||
//
|
||||
|
||||
#import "SCMetalModule.h"
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
/*
|
||||
@class SCDepthBlurMetalRenderCommand
|
||||
Prepares the command buffer for the SCDepthBlurMetalModule.metal shader.
|
||||
*/
|
||||
@interface SCDepthBlurMetalRenderCommand : NSObject <SCMetalRenderCommand>
|
||||
|
||||
@property (nonatomic, readonly) NSString *functionName;
|
||||
|
||||
@end
|
@ -0,0 +1,90 @@
|
||||
//
|
||||
// SCDepthBlurMetalRenderCommand.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Brian Ng on 11/8/17.
|
||||
//
|
||||
//
|
||||
|
||||
#import "SCDepthBlurMetalRenderCommand.h"
|
||||
|
||||
#import "SCCameraTweaks.h"
|
||||
#import "SCMetalUtils.h"
|
||||
|
||||
#import <SCFoundation/NSString+SCFormat.h>
|
||||
|
||||
@import MetalPerformanceShaders;
|
||||
|
||||
@implementation SCDepthBlurMetalRenderCommand
|
||||
|
||||
typedef struct DepthBlurRenderData {
|
||||
float depthRange;
|
||||
float depthOffset;
|
||||
float depthBlurForegroundThreshold;
|
||||
float depthBlurBackgroundThreshold;
|
||||
} DepthBlurRenderData;
|
||||
|
||||
#pragma mark - SCMetalRenderCommand
|
||||
|
||||
- (id<MTLComputeCommandEncoder>)encodeMetalCommand:(id<MTLCommandBuffer>)commandBuffer
|
||||
pipelineState:(id<MTLComputePipelineState>)pipelineState
|
||||
textureResource:(SCMetalTextureResource *)textureResource
|
||||
{
|
||||
#if !TARGET_IPHONE_SIMULATOR
|
||||
CGFloat depthBlurForegroundThreshold = textureResource.depthBlurForegroundThreshold;
|
||||
CGFloat depthBlurBackgroundThreshold =
|
||||
textureResource.depthBlurForegroundThreshold > SCCameraTweaksDepthBlurBackgroundThreshold()
|
||||
? SCCameraTweaksDepthBlurBackgroundThreshold()
|
||||
: 0;
|
||||
DepthBlurRenderData depthBlurRenderData = {
|
||||
.depthRange = textureResource.depthRange,
|
||||
.depthOffset = textureResource.depthOffset,
|
||||
.depthBlurBackgroundThreshold = depthBlurBackgroundThreshold,
|
||||
.depthBlurForegroundThreshold = depthBlurForegroundThreshold,
|
||||
};
|
||||
id<MTLBuffer> depthBlurRenderDataBuffer =
|
||||
[textureResource.device newBufferWithLength:sizeof(DepthBlurRenderData)
|
||||
options:MTLResourceOptionCPUCacheModeDefault];
|
||||
memcpy(depthBlurRenderDataBuffer.contents, &depthBlurRenderData, sizeof(DepthBlurRenderData));
|
||||
|
||||
MPSImageGaussianBlur *kernel =
|
||||
[[MPSImageGaussianBlur alloc] initWithDevice:textureResource.device sigma:SCCameraTweaksBlurSigma()];
|
||||
[kernel encodeToCommandBuffer:commandBuffer
|
||||
sourceTexture:textureResource.sourceYTexture
|
||||
destinationTexture:textureResource.sourceBlurredYTexture];
|
||||
|
||||
id<MTLComputeCommandEncoder> commandEncoder = [commandBuffer computeCommandEncoder];
|
||||
[commandEncoder setComputePipelineState:pipelineState];
|
||||
|
||||
[commandEncoder setTexture:textureResource.sourceYTexture atIndex:0];
|
||||
[commandEncoder setTexture:textureResource.sourceUVTexture atIndex:1];
|
||||
[commandEncoder setTexture:textureResource.sourceDepthTexture atIndex:2];
|
||||
[commandEncoder setTexture:textureResource.sourceBlurredYTexture atIndex:3];
|
||||
[commandEncoder setTexture:textureResource.destinationYTexture atIndex:4];
|
||||
[commandEncoder setTexture:textureResource.destinationUVTexture atIndex:5];
|
||||
[commandEncoder setBuffer:depthBlurRenderDataBuffer offset:0 atIndex:0];
|
||||
|
||||
return commandEncoder;
|
||||
#else
|
||||
return nil;
|
||||
#endif
|
||||
}
|
||||
|
||||
- (BOOL)requiresDepthData
|
||||
{
|
||||
return YES;
|
||||
}
|
||||
|
||||
#pragma mark - SCMetalModuleFunctionProvider
|
||||
|
||||
- (NSString *)functionName
|
||||
{
|
||||
return @"kernel_depth_blur";
|
||||
}
|
||||
|
||||
- (NSString *)description
|
||||
{
|
||||
return [NSString sc_stringWithFormat:@"SCDepthBlurMetalRenderCommand (shader function = %@)", self.functionName];
|
||||
}
|
||||
|
||||
@end
|
@ -0,0 +1,29 @@
|
||||
//
|
||||
// SCDepthToGrayscaleMetalModule.metal
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Brian Ng on 12/7/17.
|
||||
//
|
||||
|
||||
#include <metal_stdlib>
|
||||
using namespace metal;
|
||||
|
||||
typedef struct DepthToGrayscaleRenderData {
|
||||
float depthRange;
|
||||
float depthOffset;
|
||||
} DepthToGrayscaleRenderData;
|
||||
|
||||
kernel void kernel_depth_to_grayscale(texture2d<float, access::read> sourceDepthTexture[[texture(0)]],
|
||||
texture2d<float, access::write> destinationYTexture [[texture(1)]],
|
||||
texture2d<float, access::write> destinationUVTexture [[texture(2)]],
|
||||
constant DepthToGrayscaleRenderData &renderData [[buffer(0)]],
|
||||
uint2 gid [[thread_position_in_grid]],
|
||||
uint2 size [[threads_per_grid]]) {
|
||||
float depthValue = sourceDepthTexture.read(uint2(gid.x/4, gid.y/4)).r;
|
||||
float normalizedDepthValue = (depthValue - renderData.depthOffset) / renderData.depthRange;
|
||||
|
||||
destinationYTexture.write(normalizedDepthValue, gid);
|
||||
destinationUVTexture.write(float4(0.5, 0.5, 0, 0), gid);
|
||||
}
|
||||
|
||||
|
@ -0,0 +1,21 @@
|
||||
//
|
||||
// SCDepthToGrayscaleMetalRenderCommand.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Brian Ng on 12/7/17.
|
||||
//
|
||||
//
|
||||
|
||||
#import "SCMetalModule.h"
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
/*
|
||||
@class SCDepthToGrayscaleMetalRenderCommand
|
||||
Prepares the command buffer for the SCDepthToGrayscaleMetalModule.metal shader.
|
||||
*/
|
||||
@interface SCDepthToGrayscaleMetalRenderCommand : NSObject <SCMetalRenderCommand>
|
||||
|
||||
@property (nonatomic, readonly) NSString *functionName;
|
||||
|
||||
@end
|
@ -0,0 +1,72 @@
|
||||
//
|
||||
// SCDepthToGrayscaleMetalRenderCommand.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Brian Ng on 12/7/17.
|
||||
//
|
||||
//
|
||||
|
||||
#import "SCDepthToGrayscaleMetalRenderCommand.h"
|
||||
|
||||
#import "SCCameraTweaks.h"
|
||||
#import "SCMetalUtils.h"
|
||||
|
||||
#import <SCFoundation/NSString+SCFormat.h>
|
||||
|
||||
@import MetalPerformanceShaders;
|
||||
|
||||
@implementation SCDepthToGrayscaleMetalRenderCommand
|
||||
|
||||
typedef struct DepthToGrayscaleRenderData {
|
||||
float depthRange;
|
||||
float depthOffset;
|
||||
} DepthToGrayscaleRenderData;
|
||||
|
||||
#pragma mark - SCMetalRenderCommand
|
||||
|
||||
- (id<MTLComputeCommandEncoder>)encodeMetalCommand:(id<MTLCommandBuffer>)commandBuffer
|
||||
pipelineState:(id<MTLComputePipelineState>)pipelineState
|
||||
textureResource:(SCMetalTextureResource *)textureResource
|
||||
{
|
||||
#if !TARGET_IPHONE_SIMULATOR
|
||||
DepthToGrayscaleRenderData depthToGrayscaleRenderData = {
|
||||
.depthRange = textureResource.depthRange, .depthOffset = textureResource.depthOffset,
|
||||
};
|
||||
id<MTLBuffer> depthToGrayscaleDataBuffer =
|
||||
[textureResource.device newBufferWithLength:sizeof(DepthToGrayscaleRenderData)
|
||||
options:MTLResourceOptionCPUCacheModeDefault];
|
||||
memcpy(depthToGrayscaleDataBuffer.contents, &depthToGrayscaleRenderData, sizeof(DepthToGrayscaleRenderData));
|
||||
|
||||
id<MTLComputeCommandEncoder> commandEncoder = [commandBuffer computeCommandEncoder];
|
||||
[commandEncoder setComputePipelineState:pipelineState];
|
||||
|
||||
[commandEncoder setTexture:textureResource.sourceDepthTexture atIndex:0];
|
||||
[commandEncoder setTexture:textureResource.destinationYTexture atIndex:1];
|
||||
[commandEncoder setTexture:textureResource.destinationUVTexture atIndex:2];
|
||||
[commandEncoder setBuffer:depthToGrayscaleDataBuffer offset:0 atIndex:0];
|
||||
|
||||
return commandEncoder;
|
||||
#else
|
||||
return nil;
|
||||
#endif
|
||||
}
|
||||
|
||||
- (BOOL)requiresDepthData
|
||||
{
|
||||
return YES;
|
||||
}
|
||||
|
||||
#pragma mark - SCMetalModuleFunctionProvider
|
||||
|
||||
- (NSString *)functionName
|
||||
{
|
||||
return @"kernel_depth_to_grayscale";
|
||||
}
|
||||
|
||||
- (NSString *)description
|
||||
{
|
||||
return [NSString
|
||||
sc_stringWithFormat:@"SCDepthToGrayscaleMetalRenderCommand (shader function = %@)", self.functionName];
|
||||
}
|
||||
|
||||
@end
|
28
ManagedCapturer/ImageProcessing/SCDigitalExposureHandler.h
Normal file
28
ManagedCapturer/ImageProcessing/SCDigitalExposureHandler.h
Normal file
@ -0,0 +1,28 @@
|
||||
//
|
||||
// SCDigitalExposureHandler.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Yu-Kuan (Anthony) Lai on 6/15/17.
|
||||
// Copyright © 2017 Snapchat, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#import <CoreGraphics/CoreGraphics.h>
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
@class SCExposureAdjustProcessingModule;
|
||||
|
||||
/*
|
||||
@class SCDigitalExposureHandler
|
||||
The SCDigitalExposureHandler will be built by the SCProcessingBuilder when the user indicates that he/she
|
||||
wants to add SCExposureAdjustProcessingModule to the processing pipeline. The builder will take care
|
||||
of initializing the handler by linking the processing module. Caller of the builder can then link up
|
||||
the handler to the UI element (in this case, SCExposureSlider) so that user's control is hooked up to
|
||||
the processing module.
|
||||
|
||||
*/
|
||||
@interface SCDigitalExposureHandler : NSObject
|
||||
|
||||
- (instancetype)initWithProcessingModule:(SCExposureAdjustProcessingModule *)processingModule;
|
||||
- (void)setExposureParameter:(CGFloat)value;
|
||||
|
||||
@end
|
30
ManagedCapturer/ImageProcessing/SCDigitalExposureHandler.m
Normal file
30
ManagedCapturer/ImageProcessing/SCDigitalExposureHandler.m
Normal file
@ -0,0 +1,30 @@
|
||||
//
|
||||
// SCDigitalExposureHandler.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Yu-Kuan (Anthony) Lai on 6/15/17.
|
||||
// Copyright © 2017 Snapchat, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#import "SCDigitalExposureHandler.h"
|
||||
|
||||
#import "SCExposureAdjustProcessingModule.h"
|
||||
|
||||
@implementation SCDigitalExposureHandler {
|
||||
__weak SCExposureAdjustProcessingModule *_processingModule;
|
||||
}
|
||||
|
||||
- (instancetype)initWithProcessingModule:(SCExposureAdjustProcessingModule *)processingModule
|
||||
{
|
||||
if (self = [super init]) {
|
||||
_processingModule = processingModule;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)setExposureParameter:(CGFloat)value
|
||||
{
|
||||
[_processingModule setEVValue:value];
|
||||
}
|
||||
|
||||
@end
|
@ -0,0 +1,60 @@
|
||||
//
|
||||
// SCExposureAdjustMetalModule.metal
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Michel Loenngren on 7/11/17.
|
||||
//
|
||||
//
|
||||
|
||||
#include <metal_stdlib>
|
||||
using namespace metal;
|
||||
|
||||
kernel void kernel_exposure_adjust(texture2d<float, access::read> sourceYTexture [[texture(0)]],
|
||||
texture2d<float, access::read> sourceUVTexture [[texture(1)]],
|
||||
texture2d<float, access::write> destinationYTexture [[texture(2)]],
|
||||
texture2d<float, access::write> destinationUVTexture [[texture(3)]],
|
||||
uint2 gid [[thread_position_in_grid]],
|
||||
uint2 size [[threads_per_grid]]) {
|
||||
float valueY = sourceYTexture.read(gid).r;
|
||||
float2 valueUV = sourceUVTexture.read(gid).rg;
|
||||
|
||||
float factor = 1.0 / pow(1.0 + valueY, 5) + 1.0;
|
||||
valueY *= factor;
|
||||
destinationYTexture.write(valueY, gid);
|
||||
destinationUVTexture.write(float4(valueUV.r, valueUV.g, 0, 0), gid);
|
||||
|
||||
}
|
||||
|
||||
kernel void kernel_exposure_adjust_nightvision(texture2d<float, access::read> sourceYTexture [[texture(0)]],
|
||||
texture2d<float, access::read> sourceUVTexture [[texture(1)]],
|
||||
texture2d<float, access::write> destinationYTexture [[texture(2)]],
|
||||
texture2d<float, access::write> destinationUVTexture [[texture(3)]],
|
||||
uint2 gid [[thread_position_in_grid]],
|
||||
uint2 size [[threads_per_grid]]) {
|
||||
float valueY = sourceYTexture.read(gid).r;
|
||||
|
||||
float u = 0.5 - 0.368;
|
||||
float v = 0.5 - 0.291;
|
||||
|
||||
destinationYTexture.write(valueY, gid);
|
||||
destinationUVTexture.write(float4(u, v, 0, 0), gid);
|
||||
|
||||
}
|
||||
|
||||
kernel void kernel_exposure_adjust_inverted_nightvision(texture2d<float, access::read> sourceYTexture [[texture(0)]],
|
||||
texture2d<float, access::read> sourceUVTexture [[texture(1)]],
|
||||
texture2d<float, access::write> destinationYTexture [[texture(2)]],
|
||||
texture2d<float, access::write> destinationUVTexture [[texture(3)]],
|
||||
uint2 gid [[thread_position_in_grid]],
|
||||
uint2 size [[threads_per_grid]]) {
|
||||
float valueY = sourceYTexture.read(gid).r;
|
||||
|
||||
valueY = 1.0 - valueY;
|
||||
|
||||
float u = 0.5 - 0.368;
|
||||
float v = 0.5 - 0.291;
|
||||
|
||||
destinationYTexture.write(valueY, gid);
|
||||
destinationUVTexture.write(float4(u, v, 0, 0), gid);
|
||||
|
||||
}
|
@ -0,0 +1,21 @@
|
||||
//
|
||||
// SCExposureAdjustMetalRenderCommand.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Michel Loenngren on 7/11/17.
|
||||
//
|
||||
//
|
||||
|
||||
#import "SCMetalModule.h"
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
/*
|
||||
@class SCExposureAdjustProcessingModule
|
||||
Prepares the command buffer for the SCExposureAdjustProcessingModule.metal shader.
|
||||
*/
|
||||
@interface SCExposureAdjustMetalRenderCommand : SCMetalModule <SCMetalRenderCommand>
|
||||
|
||||
@property (nonatomic, readonly) NSString *functionName;
|
||||
|
||||
@end
|
@ -0,0 +1,66 @@
|
||||
//
|
||||
// SCExposureAdjustMetalRenderCommand.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Michel Loenngren on 7/11/17.
|
||||
//
|
||||
//
|
||||
|
||||
#import "SCExposureAdjustMetalRenderCommand.h"
|
||||
|
||||
#import "SCCameraTweaks.h"
|
||||
#import "SCMetalUtils.h"
|
||||
|
||||
#import <SCFoundation/SCAssertWrapper.h>
|
||||
|
||||
@import Metal;
|
||||
|
||||
@implementation SCExposureAdjustMetalRenderCommand
|
||||
|
||||
#pragma mark - SCMetalRenderCommand
|
||||
|
||||
- (id<MTLComputeCommandEncoder>)encodeMetalCommand:(id<MTLCommandBuffer>)commandBuffer
|
||||
pipelineState:(id<MTLComputePipelineState>)pipelineState
|
||||
textureResource:(SCMetalTextureResource *)textureResource
|
||||
{
|
||||
id<MTLComputeCommandEncoder> commandEncoder = [commandBuffer computeCommandEncoder];
|
||||
[commandEncoder setComputePipelineState:pipelineState];
|
||||
#if !TARGET_IPHONE_SIMULATOR
|
||||
[commandEncoder setTexture:textureResource.sourceYTexture atIndex:0];
|
||||
[commandEncoder setTexture:textureResource.sourceUVTexture atIndex:1];
|
||||
[commandEncoder setTexture:textureResource.destinationYTexture atIndex:2];
|
||||
[commandEncoder setTexture:textureResource.destinationUVTexture atIndex:3];
|
||||
#endif
|
||||
|
||||
return commandEncoder;
|
||||
}
|
||||
|
||||
#pragma mark - SCMetalModuleFunctionProvider
|
||||
|
||||
- (NSString *)functionName
|
||||
{
|
||||
if (SCCameraExposureAdjustmentMode() == 1) {
|
||||
return @"kernel_exposure_adjust";
|
||||
} else if (SCCameraExposureAdjustmentMode() == 2) {
|
||||
return @"kernel_exposure_adjust_nightvision";
|
||||
} else if (SCCameraExposureAdjustmentMode() == 3) {
|
||||
return @"kernel_exposure_adjust_inverted_nightvision";
|
||||
} else {
|
||||
SCAssertFail(@"Incorrect value from SCCameraExposureAdjustmentMode() %ld",
|
||||
(long)SCCameraExposureAdjustmentMode());
|
||||
return nil;
|
||||
}
|
||||
}
|
||||
|
||||
- (BOOL)requiresDepthData
|
||||
{
|
||||
return NO;
|
||||
}
|
||||
|
||||
- (NSString *)description
|
||||
{
|
||||
return
|
||||
[NSString sc_stringWithFormat:@"SCExposureAdjustMetalRenderCommand (shader function = %@)", self.functionName];
|
||||
}
|
||||
|
||||
@end
|
@ -0,0 +1,28 @@
|
||||
//
|
||||
// SCExposureAdjustProcessingModule.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Yu-Kuan (Anthony) Lai on 6/1/17.
|
||||
// Copyright © 2017 Snapchat, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#import "SCProcessingModule.h"
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
/**
|
||||
NOTE: If we start chaining multiple CIImage modules we should
|
||||
not run them back to back but instead in one CIImage pass
|
||||
as CoreImage will merge the shaders for best performance
|
||||
*/
|
||||
|
||||
/*
|
||||
@class SCExposureAdjustProcessingModule
|
||||
This module use the CIExposureAdjust CIFilter to process the frames. It use the value provided by
|
||||
the SCDigitalExposurehandler as evValue (default is 0).
|
||||
*/
|
||||
@interface SCExposureAdjustProcessingModule : NSObject <SCProcessingModule>
|
||||
|
||||
- (void)setEVValue:(CGFloat)value;
|
||||
|
||||
@end
|
@ -0,0 +1,67 @@
|
||||
//
|
||||
// SCExposureAdjustProcessingModule.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Yu-Kuan (Anthony) Lai on 6/1/17.
|
||||
// Copyright © 2017 Snapchat, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#import "SCExposureAdjustProcessingModule.h"
|
||||
|
||||
#import "SCProcessingModuleUtils.h"
|
||||
|
||||
@import CoreImage;
|
||||
@import CoreMedia;
|
||||
|
||||
static const CGFloat kSCExposureAdjustProcessingModuleMaxEVValue = 2.0;
|
||||
|
||||
@implementation SCExposureAdjustProcessingModule {
|
||||
CIContext *_context;
|
||||
CIFilter *_filter;
|
||||
CFMutableDictionaryRef _attributes;
|
||||
CVPixelBufferPoolRef _bufferPool;
|
||||
}
|
||||
|
||||
- (instancetype)init
|
||||
{
|
||||
if (self = [super init]) {
|
||||
_context = [CIContext context];
|
||||
_filter = [CIFilter filterWithName:@"CIExposureAdjust"];
|
||||
[_filter setValue:@0.0 forKey:@"inputEV"];
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)setEVValue:(CGFloat)value
|
||||
{
|
||||
CGFloat newEVValue = value * kSCExposureAdjustProcessingModuleMaxEVValue;
|
||||
[_filter setValue:@(newEVValue) forKey:@"inputEV"];
|
||||
}
|
||||
|
||||
- (void)dealloc
|
||||
{
|
||||
CVPixelBufferPoolFlush(_bufferPool, kCVPixelBufferPoolFlushExcessBuffers);
|
||||
CVPixelBufferPoolRelease(_bufferPool);
|
||||
}
|
||||
|
||||
- (BOOL)requiresDepthData
|
||||
{
|
||||
return NO;
|
||||
}
|
||||
|
||||
- (CMSampleBufferRef)render:(RenderData)renderData
|
||||
{
|
||||
CMSampleBufferRef input = renderData.sampleBuffer;
|
||||
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(input);
|
||||
CIImage *image = [CIImage imageWithCVPixelBuffer:pixelBuffer];
|
||||
|
||||
[_filter setValue:image forKey:kCIInputImageKey];
|
||||
CIImage *result = [_filter outputImage];
|
||||
|
||||
return [SCProcessingModuleUtils sampleBufferFromImage:result
|
||||
oldSampleBuffer:input
|
||||
bufferPool:_bufferPool
|
||||
context:_context];
|
||||
}
|
||||
|
||||
@end
|
48
ManagedCapturer/ImageProcessing/SCMetalModule.h
Normal file
48
ManagedCapturer/ImageProcessing/SCMetalModule.h
Normal file
@ -0,0 +1,48 @@
|
||||
//
|
||||
// SCMetalModule.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Michel Loenngren on 7/19/17.
|
||||
//
|
||||
//
|
||||
|
||||
#import "SCMetalTextureResource.h"
|
||||
#import "SCMetalUtils.h"
|
||||
#import "SCProcessingModule.h"
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
@protocol SCMetalModuleFunctionProvider <NSObject>
|
||||
|
||||
@property (nonatomic, readonly) NSString *functionName;
|
||||
|
||||
@end
|
||||
|
||||
@protocol SCMetalRenderCommand <SCMetalModuleFunctionProvider>
|
||||
|
||||
/**
|
||||
Sets textures and parameters for the shader function. When implementing this function, the command encoder must be
|
||||
computed and the pipeline state set. That is, ensure that there are calls to: [commandBuffer computeCommandEncoder]
|
||||
and [commandEncoder setComputePipelineState:pipelineState].
|
||||
*/
|
||||
#if !TARGET_IPHONE_SIMULATOR
|
||||
- (id<MTLComputeCommandEncoder>)encodeMetalCommand:(id<MTLCommandBuffer>)commandBuffer
|
||||
pipelineState:(id<MTLComputePipelineState>)pipelineState
|
||||
textureResource:(SCMetalTextureResource *)textureResource;
|
||||
#endif
|
||||
|
||||
- (BOOL)requiresDepthData;
|
||||
|
||||
@end
|
||||
|
||||
/**
|
||||
NOTE: If we start chaining multiple metal modules we should
|
||||
not run them back to back but instead chain different render
|
||||
passes.
|
||||
*/
|
||||
@interface SCMetalModule : NSObject <SCProcessingModule>
|
||||
|
||||
// Designated initializer: SCMetalModule should always have a SCMetalRenderCommand
|
||||
- (instancetype)initWithMetalRenderCommand:(id<SCMetalRenderCommand>)metalRenderCommand;
|
||||
|
||||
@end
|
155
ManagedCapturer/ImageProcessing/SCMetalModule.m
Normal file
155
ManagedCapturer/ImageProcessing/SCMetalModule.m
Normal file
@ -0,0 +1,155 @@
|
||||
//
|
||||
// SCMetalModule.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Michel Loenngren on 7/19/17.
|
||||
//
|
||||
//
|
||||
|
||||
#import "SCMetalModule.h"
|
||||
|
||||
#import "SCCameraTweaks.h"
|
||||
|
||||
#import <SCFoundation/SCAssertWrapper.h>
|
||||
#import <SCFoundation/SCLog.h>
|
||||
|
||||
@interface SCMetalModule ()
|
||||
#if !TARGET_IPHONE_SIMULATOR
|
||||
@property (nonatomic, readonly) id<MTLLibrary> library;
|
||||
@property (nonatomic, readonly) id<MTLDevice> device;
|
||||
@property (nonatomic, readonly) id<MTLFunction> function;
|
||||
@property (nonatomic, readonly) id<MTLComputePipelineState> computePipelineState;
|
||||
@property (nonatomic, readonly) id<MTLCommandQueue> commandQueue;
|
||||
@property (nonatomic, readonly) CVMetalTextureCacheRef textureCache;
|
||||
#endif
|
||||
@end
|
||||
|
||||
@implementation SCMetalModule {
|
||||
id<SCMetalRenderCommand> _metalRenderCommand;
|
||||
}
|
||||
|
||||
#if !TARGET_IPHONE_SIMULATOR
|
||||
@synthesize library = _library;
|
||||
@synthesize function = _function;
|
||||
@synthesize computePipelineState = _computePipelineState;
|
||||
@synthesize commandQueue = _commandQueue;
|
||||
@synthesize textureCache = _textureCache;
|
||||
#endif
|
||||
|
||||
- (instancetype)initWithMetalRenderCommand:(id<SCMetalRenderCommand>)metalRenderCommand
|
||||
{
|
||||
self = [super init];
|
||||
if (self) {
|
||||
_metalRenderCommand = metalRenderCommand;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
#pragma mark - SCProcessingModule
|
||||
|
||||
- (CMSampleBufferRef)render:(RenderData)renderData
|
||||
{
|
||||
CMSampleBufferRef input = renderData.sampleBuffer;
|
||||
#if !TARGET_IPHONE_SIMULATOR
|
||||
id<MTLComputePipelineState> pipelineState = self.computePipelineState;
|
||||
SC_GUARD_ELSE_RETURN_VALUE(pipelineState, input);
|
||||
|
||||
CVMetalTextureCacheRef textureCache = self.textureCache;
|
||||
SC_GUARD_ELSE_RETURN_VALUE(textureCache, input);
|
||||
|
||||
id<MTLCommandQueue> commandQueue = self.commandQueue;
|
||||
SC_GUARD_ELSE_RETURN_VALUE(commandQueue, input);
|
||||
|
||||
SCMetalTextureResource *textureResource =
|
||||
[[SCMetalTextureResource alloc] initWithRenderData:renderData textureCache:textureCache device:self.device];
|
||||
id<MTLCommandBuffer> commandBuffer = [commandQueue commandBuffer];
|
||||
if (!_metalRenderCommand) {
|
||||
SCAssertFail(@"Metal module must be initialized with an SCMetalRenderCommand");
|
||||
}
|
||||
id<MTLComputeCommandEncoder> commandEncoder = [_metalRenderCommand encodeMetalCommand:commandBuffer
|
||||
pipelineState:pipelineState
|
||||
textureResource:textureResource];
|
||||
|
||||
NSUInteger w = pipelineState.threadExecutionWidth;
|
||||
NSUInteger h = pipelineState.maxTotalThreadsPerThreadgroup / w;
|
||||
|
||||
MTLSize threadsPerThreadgroup = MTLSizeMake(w, h, 1);
|
||||
MTLSize threadgroupsPerGrid = MTLSizeMake((textureResource.sourceYTexture.width + w - 1) / w,
|
||||
(textureResource.sourceYTexture.height + h - 1) / h, 1);
|
||||
|
||||
[commandEncoder dispatchThreadgroups:threadgroupsPerGrid threadsPerThreadgroup:threadsPerThreadgroup];
|
||||
|
||||
[commandEncoder endEncoding];
|
||||
[commandBuffer commit];
|
||||
[commandBuffer waitUntilCompleted];
|
||||
|
||||
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(renderData.sampleBuffer);
|
||||
SCMetalCopyTexture(textureResource.destinationYTexture, imageBuffer, 0);
|
||||
SCMetalCopyTexture(textureResource.destinationUVTexture, imageBuffer, 1);
|
||||
#endif
|
||||
return input;
|
||||
}
|
||||
|
||||
- (BOOL)requiresDepthData
|
||||
{
|
||||
return [_metalRenderCommand requiresDepthData];
|
||||
}
|
||||
|
||||
#pragma mark - Lazy properties
|
||||
|
||||
#if !TARGET_IPHONE_SIMULATOR
|
||||
|
||||
- (id<MTLLibrary>)library
|
||||
{
|
||||
if (!_library) {
|
||||
NSString *libPath = [[NSBundle mainBundle] pathForResource:@"sccamera-default" ofType:@"metallib"];
|
||||
NSError *error = nil;
|
||||
_library = [self.device newLibraryWithFile:libPath error:&error];
|
||||
if (error) {
|
||||
SCLogGeneralError(@"Create metallib error: %@", error.description);
|
||||
}
|
||||
}
|
||||
return _library;
|
||||
}
|
||||
|
||||
- (id<MTLDevice>)device
|
||||
{
|
||||
return SCGetManagedCaptureMetalDevice();
|
||||
}
|
||||
|
||||
- (id<MTLFunction>)function
|
||||
{
|
||||
return [self.library newFunctionWithName:[_metalRenderCommand functionName]];
|
||||
}
|
||||
|
||||
- (id<MTLComputePipelineState>)computePipelineState
|
||||
{
|
||||
if (!_computePipelineState) {
|
||||
NSError *error = nil;
|
||||
_computePipelineState = [self.device newComputePipelineStateWithFunction:self.function error:&error];
|
||||
if (error) {
|
||||
SCLogGeneralError(@"Error while creating compute pipeline state %@", error.description);
|
||||
}
|
||||
}
|
||||
return _computePipelineState;
|
||||
}
|
||||
|
||||
- (id<MTLCommandQueue>)commandQueue
|
||||
{
|
||||
if (!_commandQueue) {
|
||||
_commandQueue = [self.device newCommandQueue];
|
||||
}
|
||||
return _commandQueue;
|
||||
}
|
||||
|
||||
- (CVMetalTextureCacheRef)textureCache
|
||||
{
|
||||
if (!_textureCache) {
|
||||
CVMetalTextureCacheCreate(kCFAllocatorDefault, nil, self.device, nil, &_textureCache);
|
||||
}
|
||||
return _textureCache;
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
@end
|
54
ManagedCapturer/ImageProcessing/SCMetalTextureResource.h
Normal file
54
ManagedCapturer/ImageProcessing/SCMetalTextureResource.h
Normal file
@ -0,0 +1,54 @@
|
||||
//
|
||||
// SCMetalTextureResource.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Brian Ng on 11/7/17.
|
||||
//
|
||||
|
||||
#import "SCProcessingModule.h"
|
||||
#import "SCCapturerDefines.h"
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
#if !TARGET_IPHONE_SIMULATOR
|
||||
#import <Metal/Metal.h>
|
||||
#endif
|
||||
|
||||
/*
|
||||
@class SCMetalTextureResource
|
||||
The SCMetalTextureResource is created by SCMetalModule and is passed to a SCMetalRenderCommand.
|
||||
This resource provides a collection of textures for rendering, where a SCMetalRenderCommand
|
||||
selects which textures it needs. Textures are lazily initialiazed to optimize performance.
|
||||
Additionally, information pertaining to depth is provided if normalizing depth is desired:
|
||||
depthRange is the range of possible depth values [depthOffset, depthOffset + depthRange],
|
||||
where depthOffset is the min depth value in the given depth map.
|
||||
NOTE: This class is NOT thread safe -- ensure any calls are made by a performer by calling
|
||||
SCAssertPerformer before actually accessing any textures
|
||||
*/
|
||||
@interface SCMetalTextureResource : NSObject
|
||||
|
||||
#if !TARGET_IPHONE_SIMULATOR
|
||||
@property (nonatomic, readonly) id<MTLTexture> sourceYTexture;
|
||||
@property (nonatomic, readonly) id<MTLTexture> sourceUVTexture;
|
||||
@property (nonatomic, readonly) id<MTLTexture> destinationYTexture;
|
||||
@property (nonatomic, readonly) id<MTLTexture> destinationUVTexture;
|
||||
|
||||
// Textures for SCDepthBlurMetalCommand
|
||||
@property (nonatomic, readonly) id<MTLTexture> sourceBlurredYTexture;
|
||||
@property (nonatomic, readonly) id<MTLTexture> sourceDepthTexture;
|
||||
|
||||
@property (nonatomic, readonly) id<MTLDevice> device;
|
||||
#endif
|
||||
|
||||
// Available depth-related auxiliary resources (when depth data is provided)
|
||||
@property (nonatomic, readonly) float depthRange;
|
||||
@property (nonatomic, readonly) float depthOffset;
|
||||
@property (nonatomic, readonly) CGFloat depthBlurForegroundThreshold;
|
||||
@property (nonatomic, readonly) SampleBufferMetadata sampleBufferMetadata;
|
||||
|
||||
#if !TARGET_IPHONE_SIMULATOR
|
||||
- (instancetype)initWithRenderData:(RenderData)renderData
|
||||
textureCache:(CVMetalTextureCacheRef)textureCache
|
||||
device:(id<MTLDevice>)device;
|
||||
#endif
|
||||
|
||||
@end
|
215
ManagedCapturer/ImageProcessing/SCMetalTextureResource.m
Normal file
215
ManagedCapturer/ImageProcessing/SCMetalTextureResource.m
Normal file
@ -0,0 +1,215 @@
|
||||
//
|
||||
// SCMetalTextureResource.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Brian Ng on 11/7/17.
|
||||
//
|
||||
|
||||
#import "SCMetalTextureResource.h"
|
||||
|
||||
#import "SCCameraSettingUtils.h"
|
||||
#import "SCCameraTweaks.h"
|
||||
#import "SCMetalUtils.h"
|
||||
|
||||
@import CoreImage;
|
||||
|
||||
#if !TARGET_IPHONE_SIMULATOR
|
||||
static NSInteger const kSCFocusRectSize = 4;
|
||||
#endif
|
||||
|
||||
@interface SCMetalTextureResource ()
|
||||
#if !TARGET_IPHONE_SIMULATOR
|
||||
@property (nonatomic, readonly) CVMetalTextureCacheRef textureCache;
|
||||
#endif
|
||||
@end
|
||||
|
||||
@implementation SCMetalTextureResource {
|
||||
RenderData _renderData;
|
||||
CVImageBufferRef _imageBuffer;
|
||||
CIContext *_context;
|
||||
}
|
||||
|
||||
#if !TARGET_IPHONE_SIMULATOR
|
||||
@synthesize sourceYTexture = _sourceYTexture;
|
||||
@synthesize sourceUVTexture = _sourceUVTexture;
|
||||
@synthesize destinationYTexture = _destinationYTexture;
|
||||
@synthesize destinationUVTexture = _destinationUVTexture;
|
||||
@synthesize sourceBlurredYTexture = _sourceBlurredYTexture;
|
||||
@synthesize sourceDepthTexture = _sourceDepthTexture;
|
||||
@synthesize depthRange = _depthRange;
|
||||
@synthesize depthOffset = _depthOffset;
|
||||
@synthesize depthBlurForegroundThreshold = _depthBlurForegroundThreshold;
|
||||
@synthesize device = _device;
|
||||
@synthesize sampleBufferMetadata = _sampleBufferMetadata;
|
||||
|
||||
- (instancetype)initWithRenderData:(RenderData)renderData
|
||||
textureCache:(CVMetalTextureCacheRef)textureCache
|
||||
device:(id<MTLDevice>)device
|
||||
{
|
||||
self = [super init];
|
||||
if (self) {
|
||||
_imageBuffer = CMSampleBufferGetImageBuffer(renderData.sampleBuffer);
|
||||
_renderData = renderData;
|
||||
_textureCache = textureCache;
|
||||
_device = device;
|
||||
_context = [CIContext contextWithOptions:@{ kCIContextWorkingFormat : @(kCIFormatRGBAh) }];
|
||||
}
|
||||
return self;
|
||||
}
|
||||
#endif
|
||||
|
||||
#if !TARGET_IPHONE_SIMULATOR
|
||||
|
||||
- (id<MTLTexture>)sourceYTexture
|
||||
{
|
||||
if (!_sourceYTexture) {
|
||||
CVPixelBufferLockBaseAddress(_imageBuffer, kCVPixelBufferLock_ReadOnly);
|
||||
_sourceYTexture = SCMetalTextureFromPixelBuffer(_imageBuffer, 0, MTLPixelFormatR8Unorm, _textureCache);
|
||||
CVPixelBufferUnlockBaseAddress(_imageBuffer, kCVPixelBufferLock_ReadOnly);
|
||||
}
|
||||
return _sourceYTexture;
|
||||
}
|
||||
|
||||
- (id<MTLTexture>)sourceUVTexture
|
||||
{
|
||||
if (!_sourceUVTexture) {
|
||||
CVPixelBufferLockBaseAddress(_imageBuffer, kCVPixelBufferLock_ReadOnly);
|
||||
_sourceUVTexture = SCMetalTextureFromPixelBuffer(_imageBuffer, 1, MTLPixelFormatRG8Unorm, _textureCache);
|
||||
CVPixelBufferUnlockBaseAddress(_imageBuffer, kCVPixelBufferLock_ReadOnly);
|
||||
}
|
||||
return _sourceUVTexture;
|
||||
}
|
||||
|
||||
- (id<MTLTexture>)destinationYTexture
|
||||
{
|
||||
if (!_destinationYTexture) {
|
||||
MTLTextureDescriptor *textureDescriptor =
|
||||
[MTLTextureDescriptor texture2DDescriptorWithPixelFormat:MTLPixelFormatR8Unorm
|
||||
width:CVPixelBufferGetWidthOfPlane(_imageBuffer, 0)
|
||||
height:CVPixelBufferGetHeightOfPlane(_imageBuffer, 0)
|
||||
mipmapped:NO];
|
||||
textureDescriptor.usage |= MTLTextureUsageShaderWrite;
|
||||
_destinationYTexture = [_device newTextureWithDescriptor:textureDescriptor];
|
||||
}
|
||||
return _destinationYTexture;
|
||||
}
|
||||
|
||||
- (id<MTLTexture>)destinationUVTexture
|
||||
{
|
||||
if (!_destinationUVTexture) {
|
||||
MTLTextureDescriptor *textureDescriptor =
|
||||
[MTLTextureDescriptor texture2DDescriptorWithPixelFormat:MTLPixelFormatRG8Unorm
|
||||
width:CVPixelBufferGetWidthOfPlane(_imageBuffer, 1)
|
||||
height:CVPixelBufferGetHeightOfPlane(_imageBuffer, 1)
|
||||
mipmapped:NO];
|
||||
textureDescriptor.usage |= MTLTextureUsageShaderWrite;
|
||||
_destinationUVTexture = [_device newTextureWithDescriptor:textureDescriptor];
|
||||
}
|
||||
return _destinationUVTexture;
|
||||
}
|
||||
|
||||
- (id<MTLTexture>)sourceBlurredYTexture
|
||||
{
|
||||
if (!_sourceBlurredYTexture) {
|
||||
MTLTextureDescriptor *textureDescriptor =
|
||||
[MTLTextureDescriptor texture2DDescriptorWithPixelFormat:MTLPixelFormatR8Unorm
|
||||
width:CVPixelBufferGetWidthOfPlane(_imageBuffer, 0)
|
||||
height:CVPixelBufferGetHeightOfPlane(_imageBuffer, 0)
|
||||
mipmapped:NO];
|
||||
textureDescriptor.usage |= MTLTextureUsageShaderWrite;
|
||||
_sourceBlurredYTexture = [_device newTextureWithDescriptor:textureDescriptor];
|
||||
}
|
||||
return _sourceBlurredYTexture;
|
||||
}
|
||||
|
||||
- (id<MTLTexture>)sourceDepthTexture
|
||||
{
|
||||
if (!_sourceDepthTexture) {
|
||||
CVPixelBufferLockBaseAddress(_imageBuffer, kCVPixelBufferLock_ReadOnly);
|
||||
_sourceDepthTexture =
|
||||
SCMetalTextureFromPixelBuffer(_renderData.depthDataMap, 0, MTLPixelFormatR16Float, _textureCache);
|
||||
CVPixelBufferUnlockBaseAddress(_imageBuffer, kCVPixelBufferLock_ReadOnly);
|
||||
}
|
||||
return _sourceDepthTexture;
|
||||
}
|
||||
|
||||
- (float)depthRange
|
||||
{
|
||||
if (_depthRange == 0) {
|
||||
// Get min/max values of depth image to normalize
|
||||
size_t bufferWidth = CVPixelBufferGetWidth(_renderData.depthDataMap);
|
||||
size_t bufferHeight = CVPixelBufferGetHeight(_renderData.depthDataMap);
|
||||
size_t bufferBytesPerRow = CVPixelBufferGetBytesPerRow(_renderData.depthDataMap);
|
||||
|
||||
CVPixelBufferLockBaseAddress(_renderData.depthDataMap, kCVPixelBufferLock_ReadOnly);
|
||||
unsigned char *pixelBufferPointer = CVPixelBufferGetBaseAddress(_renderData.depthDataMap);
|
||||
__fp16 *bufferPtr = (__fp16 *)pixelBufferPointer;
|
||||
uint32_t ptrInc = (int)bufferBytesPerRow / sizeof(__fp16);
|
||||
|
||||
float depthMin = MAXFLOAT;
|
||||
float depthMax = -MAXFLOAT;
|
||||
for (int j = 0; j < bufferHeight; j++) {
|
||||
for (int i = 0; i < bufferWidth; i++) {
|
||||
float value = bufferPtr[i];
|
||||
if (!isnan(value)) {
|
||||
depthMax = MAX(depthMax, value);
|
||||
depthMin = MIN(depthMin, value);
|
||||
}
|
||||
}
|
||||
bufferPtr += ptrInc;
|
||||
}
|
||||
CVPixelBufferUnlockBaseAddress(_renderData.depthDataMap, kCVPixelBufferLock_ReadOnly);
|
||||
_depthRange = depthMax - depthMin;
|
||||
_depthOffset = depthMin;
|
||||
}
|
||||
return _depthRange;
|
||||
}
|
||||
|
||||
- (float)depthOffset
|
||||
{
|
||||
if (_depthRange == 0) {
|
||||
[self depthRange];
|
||||
}
|
||||
return _depthOffset;
|
||||
}
|
||||
|
||||
- (CGFloat)depthBlurForegroundThreshold
|
||||
{
|
||||
if (_renderData.depthBlurPointOfInterest) {
|
||||
CGPoint point = *_renderData.depthBlurPointOfInterest;
|
||||
CIImage *disparityImage = [CIImage imageWithCVPixelBuffer:_renderData.depthDataMap];
|
||||
CIVector *vector =
|
||||
[CIVector vectorWithX:point.x * CVPixelBufferGetWidth(_renderData.depthDataMap) - kSCFocusRectSize / 2
|
||||
Y:point.y * CVPixelBufferGetHeight(_renderData.depthDataMap) - kSCFocusRectSize / 2
|
||||
Z:kSCFocusRectSize
|
||||
W:kSCFocusRectSize];
|
||||
CIImage *minMaxImage =
|
||||
[[disparityImage imageByClampingToExtent] imageByApplyingFilter:@"CIAreaMinMaxRed"
|
||||
withInputParameters:@{kCIInputExtentKey : vector}];
|
||||
UInt8 pixel[4] = {0, 0, 0, 0};
|
||||
[_context render:minMaxImage
|
||||
toBitmap:&pixel
|
||||
rowBytes:4
|
||||
bounds:CGRectMake(0, 0, 1, 1)
|
||||
format:kCIFormatRGBA8
|
||||
colorSpace:nil];
|
||||
CGFloat disparity = pixel[1] / 255.0;
|
||||
CGFloat normalizedDisparity = (disparity - self.depthOffset) / self.depthRange;
|
||||
return normalizedDisparity;
|
||||
} else {
|
||||
return SCCameraTweaksDepthBlurForegroundThreshold();
|
||||
}
|
||||
}
|
||||
|
||||
- (SampleBufferMetadata)sampleBufferMetadata
|
||||
{
|
||||
SampleBufferMetadata sampleMetadata = {
|
||||
.isoSpeedRating = 0, .exposureTime = 0.033, .brightness = 0,
|
||||
};
|
||||
retrieveSampleBufferMetadata(_renderData.sampleBuffer, &sampleMetadata);
|
||||
return sampleMetadata;
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
@end
|
@ -0,0 +1,37 @@
|
||||
//
|
||||
// SCNightModeEnhancementMetalModule.metal
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Chao Pang on 12/21/17.
|
||||
//
|
||||
//
|
||||
|
||||
#include <metal_stdlib>
|
||||
using namespace metal;
|
||||
|
||||
typedef struct SampleBufferMetadata {
|
||||
int iosSpeedRating;
|
||||
float exposureTime;
|
||||
float brightness;
|
||||
}SampleBufferMetadata;
|
||||
|
||||
kernel void kernel_night_mode_enhancement(texture2d<float, access::read> sourceYTexture [[texture(0)]],
|
||||
texture2d<float, access::read> sourceUVTexture [[texture(1)]],
|
||||
texture2d<float, access::write> destinationYTexture [[texture(2)]],
|
||||
texture2d<float, access::write> destinationUVTexture [[texture(3)]],
|
||||
constant SampleBufferMetadata &metaData [[buffer(0)]],
|
||||
uint2 gid [[thread_position_in_grid]],
|
||||
uint2 size [[threads_per_grid]]) {
|
||||
float valueY = sourceYTexture.read(gid).r;
|
||||
float2 valueUV = sourceUVTexture.read(gid).rg;
|
||||
|
||||
float factor = 1.0 - metaData.brightness * 0.1;
|
||||
factor = max(min(factor, 1.3), 1.0);
|
||||
|
||||
valueY = min(valueY * factor, 1.0);
|
||||
valueUV.rg = max(min((valueUV.rg - 0.5) * factor + 0.5, 1.0), 0.0);
|
||||
|
||||
destinationYTexture.write(valueY, gid);
|
||||
destinationUVTexture.write(float4(valueUV.r, valueUV.g, 0, 0), gid);
|
||||
|
||||
}
|
@ -0,0 +1,19 @@
|
||||
//
|
||||
// SCNightModeEnhancementMetalRenderCommand.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Chao Pang on 12/21/17.
|
||||
//
|
||||
|
||||
#import "SCMetalModule.h"
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
/*
|
||||
Prepares the command buffer for the SCNightModeEnhancementMetalModule.metal.
|
||||
*/
|
||||
@interface SCNightModeEnhancementMetalRenderCommand : SCMetalModule <SCMetalRenderCommand>
|
||||
|
||||
@property (nonatomic, readonly) NSString *functionName;
|
||||
|
||||
@end
|
@ -0,0 +1,64 @@
|
||||
//
|
||||
// SCNightModeEnhancementMetalRenderCommand.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Chao Pang on 12/21/17.
|
||||
//
|
||||
|
||||
#import "SCNightModeEnhancementMetalRenderCommand.h"
|
||||
|
||||
#import "SCCameraTweaks.h"
|
||||
#import "SCMetalUtils.h"
|
||||
|
||||
#import <SCFoundation/NSString+SCFormat.h>
|
||||
|
||||
@import Metal;
|
||||
|
||||
@implementation SCNightModeEnhancementMetalRenderCommand
|
||||
|
||||
#pragma mark - SCMetalRenderCommand
|
||||
|
||||
- (id<MTLComputeCommandEncoder>)encodeMetalCommand:(id<MTLCommandBuffer>)commandBuffer
|
||||
pipelineState:(id<MTLComputePipelineState>)pipelineState
|
||||
textureResource:(SCMetalTextureResource *)textureResource
|
||||
{
|
||||
id<MTLComputeCommandEncoder> commandEncoder = [commandBuffer computeCommandEncoder];
|
||||
[commandEncoder setComputePipelineState:pipelineState];
|
||||
#if !TARGET_IPHONE_SIMULATOR
|
||||
SampleBufferMetadata sampleBufferMetadata = {
|
||||
.isoSpeedRating = textureResource.sampleBufferMetadata.isoSpeedRating,
|
||||
.exposureTime = textureResource.sampleBufferMetadata.exposureTime,
|
||||
.brightness = textureResource.sampleBufferMetadata.brightness,
|
||||
};
|
||||
id<MTLBuffer> metadataBuffer = [textureResource.device newBufferWithLength:sizeof(SampleBufferMetadata)
|
||||
options:MTLResourceOptionCPUCacheModeDefault];
|
||||
memcpy(metadataBuffer.contents, &sampleBufferMetadata, sizeof(SampleBufferMetadata));
|
||||
[commandEncoder setTexture:textureResource.sourceYTexture atIndex:0];
|
||||
[commandEncoder setTexture:textureResource.sourceUVTexture atIndex:1];
|
||||
[commandEncoder setTexture:textureResource.destinationYTexture atIndex:2];
|
||||
[commandEncoder setTexture:textureResource.destinationUVTexture atIndex:3];
|
||||
[commandEncoder setBuffer:metadataBuffer offset:0 atIndex:0];
|
||||
#endif
|
||||
|
||||
return commandEncoder;
|
||||
}
|
||||
|
||||
#pragma mark - SCMetalModuleFunctionProvider
|
||||
|
||||
- (NSString *)functionName
|
||||
{
|
||||
return @"kernel_night_mode_enhancement";
|
||||
}
|
||||
|
||||
- (BOOL)requiresDepthData
|
||||
{
|
||||
return NO;
|
||||
}
|
||||
|
||||
- (NSString *)description
|
||||
{
|
||||
return [NSString
|
||||
sc_stringWithFormat:@"SCNightModeEnhancementMetalRenderCommand (shader function = %@)", self.functionName];
|
||||
}
|
||||
|
||||
@end
|
32
ManagedCapturer/ImageProcessing/SCProcessingModule.h
Normal file
32
ManagedCapturer/ImageProcessing/SCProcessingModule.h
Normal file
@ -0,0 +1,32 @@
|
||||
//
|
||||
// SCProcessingModule.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Yu-Kuan (Anthony) Lai on 5/30/17.
|
||||
// Copyright © 2017 Snapchat, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#import <AVFoundation/AVFoundation.h>
|
||||
#import <CoreMedia/CoreMedia.h>
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
typedef struct RenderData {
|
||||
CMSampleBufferRef sampleBuffer;
|
||||
CVPixelBufferRef depthDataMap; // Optional - for depth blur rendering
|
||||
CGPoint *depthBlurPointOfInterest; // Optional - for depth blur rendering
|
||||
} RenderData;
|
||||
|
||||
/*
|
||||
@protocol SCProcessingModule
|
||||
A single module that is responsible for the actual image processing work. Multiple modules can be chained
|
||||
together by the SCProcessingPipelineBuilder and the frame can be passed through the entire
|
||||
SCProcessingPipeline.
|
||||
*/
|
||||
@protocol SCProcessingModule <NSObject>
|
||||
|
||||
- (CMSampleBufferRef)render:(RenderData)renderData;
|
||||
|
||||
// Needed to protect against depth data potentially being nil during the render pass
|
||||
- (BOOL)requiresDepthData;
|
||||
|
||||
@end
|
22
ManagedCapturer/ImageProcessing/SCProcessingModuleUtils.h
Normal file
22
ManagedCapturer/ImageProcessing/SCProcessingModuleUtils.h
Normal file
@ -0,0 +1,22 @@
|
||||
//
|
||||
// SCProcessingModuleUtils.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Brian Ng on 11/10/17.
|
||||
//
|
||||
|
||||
#import <CoreImage/CoreImage.h>
|
||||
#import <CoreMedia/CoreMedia.h>
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
@interface SCProcessingModuleUtils : NSObject
|
||||
|
||||
+ (CVPixelBufferRef)pixelBufferFromImage:(CIImage *)image
|
||||
bufferPool:(CVPixelBufferPoolRef)bufferPool
|
||||
context:(CIContext *)context;
|
||||
|
||||
+ (CMSampleBufferRef)sampleBufferFromImage:(CIImage *)image
|
||||
oldSampleBuffer:(CMSampleBufferRef)oldSampleBuffer
|
||||
bufferPool:(CVPixelBufferPoolRef)bufferPool
|
||||
context:(CIContext *)context;
|
||||
@end
|
84
ManagedCapturer/ImageProcessing/SCProcessingModuleUtils.m
Normal file
84
ManagedCapturer/ImageProcessing/SCProcessingModuleUtils.m
Normal file
@ -0,0 +1,84 @@
|
||||
//
|
||||
// SCProcessingModuleUtils.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Brian Ng on 11/10/17.
|
||||
//
|
||||
|
||||
#import "SCProcessingModuleUtils.h"
|
||||
|
||||
#import <SCFoundation/SCLog.h>
|
||||
|
||||
@import CoreImage;
|
||||
|
||||
@implementation SCProcessingModuleUtils
|
||||
|
||||
+ (CVPixelBufferRef)pixelBufferFromImage:(CIImage *)image
|
||||
bufferPool:(CVPixelBufferPoolRef)bufferPool
|
||||
context:(CIContext *)context
|
||||
{
|
||||
CVReturn result;
|
||||
|
||||
if (bufferPool == NULL) {
|
||||
NSDictionary *pixelAttributes = @{
|
||||
(NSString *) kCVPixelBufferIOSurfacePropertiesKey : @{}, (NSString *)
|
||||
kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange), (NSString *)
|
||||
kCVPixelBufferWidthKey : @(image.extent.size.width), (NSString *)
|
||||
kCVPixelBufferHeightKey : @(image.extent.size.height)
|
||||
};
|
||||
result = CVPixelBufferPoolCreate(kCFAllocatorDefault, NULL,
|
||||
(__bridge CFDictionaryRef _Nullable)(pixelAttributes), &bufferPool);
|
||||
if (result != kCVReturnSuccess) {
|
||||
SCLogGeneralError(@"[Processing Pipeline] Error creating pixel buffer pool %i", result);
|
||||
return NULL;
|
||||
}
|
||||
}
|
||||
|
||||
CVPixelBufferRef resultBuffer = NULL;
|
||||
result = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, bufferPool, &resultBuffer);
|
||||
|
||||
if (result == kCVReturnSuccess) {
|
||||
[context render:image toCVPixelBuffer:resultBuffer];
|
||||
} else {
|
||||
SCLogGeneralError(@"[Processing Pipeline] Error creating pixel buffer from pool %i", result);
|
||||
}
|
||||
return resultBuffer;
|
||||
}
|
||||
|
||||
+ (CMSampleBufferRef)sampleBufferFromImage:(CIImage *)image
|
||||
oldSampleBuffer:(CMSampleBufferRef)oldSampleBuffer
|
||||
bufferPool:(CVPixelBufferPoolRef)bufferPool
|
||||
context:(CIContext *)context
|
||||
{
|
||||
CVPixelBufferRef pixelBuffer =
|
||||
[SCProcessingModuleUtils pixelBufferFromImage:image bufferPool:bufferPool context:context];
|
||||
if (!pixelBuffer) {
|
||||
SCLogGeneralError(@"[Processing Pipeline] Error creating new pixel buffer from image");
|
||||
return oldSampleBuffer;
|
||||
}
|
||||
|
||||
CMSampleBufferRef newSampleBuffer = NULL;
|
||||
CMSampleTimingInfo timimgInfo = kCMTimingInfoInvalid;
|
||||
CMSampleBufferGetSampleTimingInfo(oldSampleBuffer, 0, &timimgInfo);
|
||||
|
||||
CMVideoFormatDescriptionRef videoInfo = NULL;
|
||||
OSStatus status = CMVideoFormatDescriptionCreateForImageBuffer(NULL, pixelBuffer, &videoInfo);
|
||||
if (status != noErr) {
|
||||
SCLogGeneralError(@"[Processing Pipeline] Error creating video format description %i", (int)status);
|
||||
CVPixelBufferRelease(pixelBuffer);
|
||||
return oldSampleBuffer;
|
||||
}
|
||||
|
||||
status = CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, true, NULL, NULL, videoInfo,
|
||||
&timimgInfo, &newSampleBuffer);
|
||||
if (status != noErr) {
|
||||
SCLogGeneralError(@"[Processing Pipeline] Error creating CMSampleBuffer %i", (int)status);
|
||||
CVPixelBufferRelease(pixelBuffer);
|
||||
return oldSampleBuffer;
|
||||
}
|
||||
|
||||
CVPixelBufferRelease(pixelBuffer);
|
||||
return newSampleBuffer;
|
||||
}
|
||||
|
||||
@end
|
23
ManagedCapturer/ImageProcessing/SCProcessingPipeline.h
Normal file
23
ManagedCapturer/ImageProcessing/SCProcessingPipeline.h
Normal file
@ -0,0 +1,23 @@
|
||||
//
|
||||
// SCProcessingPipeline.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Yu-Kuan (Anthony) Lai on 5/30/17.
|
||||
// Copyright © 2017 Snapchat, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#import "SCProcessingModule.h"
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
/*
|
||||
@class SCProcessingPipeline
|
||||
The SCProcessingPipeline chains together a series of SCProcessingModules and passes the frame through
|
||||
each of them in a pre-determined order. This is done through a chain of command, where the resulting
|
||||
frame from the the first module is passed to the second, then to the third, etc.
|
||||
*/
|
||||
@interface SCProcessingPipeline : NSObject <SCProcessingModule>
|
||||
|
||||
@property (nonatomic, strong) NSMutableArray<id<SCProcessingModule>> *processingModules;
|
||||
|
||||
@end
|
46
ManagedCapturer/ImageProcessing/SCProcessingPipeline.m
Normal file
46
ManagedCapturer/ImageProcessing/SCProcessingPipeline.m
Normal file
@ -0,0 +1,46 @@
|
||||
//
|
||||
// SCProcessingPipeline.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Yu-Kuan (Anthony) Lai on 5/30/17.
|
||||
// Copyright © 2017 Snapchat, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#import "SCProcessingPipeline.h"
|
||||
|
||||
#import <SCFoundation/NSString+Helpers.h>
|
||||
|
||||
@import CoreMedia;
|
||||
|
||||
@implementation SCProcessingPipeline
|
||||
|
||||
- (CMSampleBufferRef)render:(RenderData)renderData
|
||||
{
|
||||
for (id<SCProcessingModule> module in self.processingModules) {
|
||||
if (![module requiresDepthData] || ([module requiresDepthData] && renderData.depthDataMap)) {
|
||||
renderData.sampleBuffer = [module render:renderData];
|
||||
}
|
||||
}
|
||||
|
||||
return renderData.sampleBuffer;
|
||||
}
|
||||
|
||||
- (NSString *)description
|
||||
{
|
||||
NSMutableString *desc = [NSMutableString new];
|
||||
[desc appendString:@"ProcessingPipeline, modules: "];
|
||||
for (id<SCProcessingModule> module in self.processingModules) {
|
||||
[desc appendFormat:@"%@, ", [module description]];
|
||||
}
|
||||
if (self.processingModules.count > 0) {
|
||||
return [desc substringToIndex:desc.lengthOfCharacterSequences - 2];
|
||||
}
|
||||
return desc;
|
||||
}
|
||||
|
||||
- (BOOL)requiresDepthData
|
||||
{
|
||||
return NO;
|
||||
}
|
||||
|
||||
@end
|
@ -0,0 +1,29 @@
|
||||
//
|
||||
// SCProcessingPipelineBuilder.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Yu-Kuan (Anthony) Lai on 6/1/17.
|
||||
// Copyright © 2017 Snapchat, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
@class SCDigitalExposureHandler;
|
||||
@class SCProcessingPipeline;
|
||||
|
||||
/*
|
||||
@class SCProcessingPipelineBuilder
|
||||
The builder object is responsible for creating the SCProcessingPipeline, the underneath
|
||||
SCProcessingModules, and eventually chaining the SCProcessingModules together in a pre-determined
|
||||
order. The builder is also responsible for providing consumers with handler objects.
|
||||
|
||||
*/
|
||||
@interface SCProcessingPipelineBuilder : NSObject
|
||||
|
||||
@property (nonatomic) BOOL useExposureAdjust;
|
||||
@property (nonatomic) BOOL portraitModeEnabled;
|
||||
@property (nonatomic) BOOL enhancedNightMode;
|
||||
|
||||
- (SCProcessingPipeline *)build;
|
||||
|
||||
@end
|
@ -0,0 +1,57 @@
|
||||
//
|
||||
// SCProcessingPipelineBuilder.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Yu-Kuan (Anthony) Lai on 6/1/17.
|
||||
// Copyright © 2017 Snapchat, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#import "SCProcessingPipelineBuilder.h"
|
||||
|
||||
#import "SCCameraTweaks.h"
|
||||
#import "SCDepthBlurMetalRenderCommand.h"
|
||||
#import "SCDepthToGrayscaleMetalRenderCommand.h"
|
||||
#import "SCDigitalExposureHandler.h"
|
||||
#import "SCExposureAdjustMetalRenderCommand.h"
|
||||
#import "SCMetalUtils.h"
|
||||
#import "SCNightModeEnhancementMetalRenderCommand.h"
|
||||
#import "SCProcessingPipeline.h"
|
||||
|
||||
@implementation SCProcessingPipelineBuilder
|
||||
|
||||
- (SCProcessingPipeline *)build
|
||||
{
|
||||
if (!_useExposureAdjust && !_portraitModeEnabled && !_enhancedNightMode) { // in the future: && !useA && !useB ...
|
||||
return nil;
|
||||
}
|
||||
|
||||
SCProcessingPipeline *processingPipeline = [[SCProcessingPipeline alloc] init];
|
||||
NSMutableArray<id<SCProcessingModule>> *processingModules = [NSMutableArray array];
|
||||
|
||||
// order of adding module matters!
|
||||
if (_useExposureAdjust && SCDeviceSupportsMetal()) {
|
||||
// this check looks redundant right now, but when we have more modules it will be necessary
|
||||
SCMetalModule *exposureAdjustMetalModule =
|
||||
[[SCMetalModule alloc] initWithMetalRenderCommand:[SCExposureAdjustMetalRenderCommand new]];
|
||||
[processingModules addObject:exposureAdjustMetalModule];
|
||||
}
|
||||
|
||||
if (_portraitModeEnabled) {
|
||||
id<SCMetalRenderCommand> renderCommand = SCCameraTweaksDepthToGrayscaleOverride()
|
||||
? [SCDepthToGrayscaleMetalRenderCommand new]
|
||||
: [SCDepthBlurMetalRenderCommand new];
|
||||
SCMetalModule *depthBlurMetalModule = [[SCMetalModule alloc] initWithMetalRenderCommand:renderCommand];
|
||||
[processingModules addObject:depthBlurMetalModule];
|
||||
}
|
||||
|
||||
if (_enhancedNightMode && SCDeviceSupportsMetal()) {
|
||||
SCMetalModule *nightModeEnhancementModule =
|
||||
[[SCMetalModule alloc] initWithMetalRenderCommand:[SCNightModeEnhancementMetalRenderCommand new]];
|
||||
[processingModules addObject:nightModeEnhancementModule];
|
||||
}
|
||||
|
||||
processingPipeline.processingModules = processingModules;
|
||||
return processingPipeline;
|
||||
}
|
||||
|
||||
@end
|
@ -0,0 +1,23 @@
|
||||
//
|
||||
// SCStillImageDepthBlurFilter.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Brian Ng on 10/11/17.
|
||||
//
|
||||
|
||||
#import "SCProcessingModule.h"
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
/*
|
||||
@class SCStillImageDepthBlurFilter
|
||||
This module uses the CIDepthBlurEffect CIFilter that uses rgb and depth information to produce an image with
|
||||
the portrait mode effect (background blurred, foreground sharp).
|
||||
*/
|
||||
@interface SCStillImageDepthBlurFilter : NSObject
|
||||
|
||||
// Applies the CIDepthBlurEffect filter to a still image capture photo. If an error occured, the original
|
||||
// photoData will be returned
|
||||
- (NSData *)renderWithPhotoData:(NSData *)photoData renderData:(RenderData)renderData NS_AVAILABLE_IOS(11_0);
|
||||
|
||||
@end
|
@ -0,0 +1,68 @@
|
||||
//
|
||||
// SCStillImageDepthBlurFilter.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Brian Ng on 10/11/17.
|
||||
//
|
||||
|
||||
#import "SCStillImageDepthBlurFilter.h"
|
||||
|
||||
#import "SCCameraTweaks.h"
|
||||
#import "SCProcessingModuleUtils.h"
|
||||
|
||||
@import CoreMedia;
|
||||
|
||||
@implementation SCStillImageDepthBlurFilter {
|
||||
CIContext *_context;
|
||||
CIFilter *_filter;
|
||||
CVPixelBufferPoolRef _bufferPool;
|
||||
}
|
||||
|
||||
- (instancetype)init
|
||||
{
|
||||
if (self = [super init]) {
|
||||
_context = [CIContext contextWithOptions:@{ kCIContextWorkingFormat : @(kCIFormatRGBAh) }];
|
||||
_filter = [CIFilter filterWithName:@"CIDepthBlurEffect"];
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)dealloc
|
||||
{
|
||||
CVPixelBufferPoolFlush(_bufferPool, kCVPixelBufferPoolFlushExcessBuffers);
|
||||
CVPixelBufferPoolRelease(_bufferPool);
|
||||
}
|
||||
|
||||
- (NSData *)renderWithPhotoData:(NSData *)photoData renderData:(RenderData)renderData NS_AVAILABLE_IOS(11_0)
|
||||
{
|
||||
CIImage *mainImage = [CIImage imageWithData:photoData];
|
||||
CVPixelBufferRef disparityImagePixelBuffer = renderData.depthDataMap;
|
||||
CIImage *disparityImage = [CIImage imageWithCVPixelBuffer:disparityImagePixelBuffer];
|
||||
if (!disparityImage) {
|
||||
return photoData;
|
||||
}
|
||||
[_filter setValue:mainImage forKey:kCIInputImageKey];
|
||||
[_filter setValue:disparityImage forKey:kCIInputDisparityImageKey];
|
||||
if (renderData.depthBlurPointOfInterest && SCCameraTweaksEnableFilterInputFocusRect()) {
|
||||
CGPoint pointOfInterest = *renderData.depthBlurPointOfInterest;
|
||||
[_filter setValue:[CIVector vectorWithX:pointOfInterest.x Y:pointOfInterest.y Z:1 W:1]
|
||||
forKey:@"inputFocusRect"];
|
||||
}
|
||||
CIImage *result = [_filter outputImage];
|
||||
if (!result) {
|
||||
return photoData;
|
||||
}
|
||||
CGColorSpaceRef deviceRGBColorSpace = CGColorSpaceCreateDeviceRGB();
|
||||
NSData *processedPhotoData = [_context JPEGRepresentationOfImage:result colorSpace:deviceRGBColorSpace options:@{}];
|
||||
CGColorSpaceRelease(deviceRGBColorSpace);
|
||||
if (!processedPhotoData) {
|
||||
return photoData;
|
||||
}
|
||||
renderData.sampleBuffer = [SCProcessingModuleUtils sampleBufferFromImage:result
|
||||
oldSampleBuffer:renderData.sampleBuffer
|
||||
bufferPool:_bufferPool
|
||||
context:_context];
|
||||
return processedPhotoData;
|
||||
}
|
||||
|
||||
@end
|
21
ManagedCapturer/NSURL+Asset.h
Normal file
21
ManagedCapturer/NSURL+Asset.h
Normal file
@ -0,0 +1,21 @@
|
||||
//
|
||||
// NSURL+NSURL_Asset.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Michel Loenngren on 4/30/17.
|
||||
// Copyright © 2017 Snapchat, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
@interface NSURL (Asset)
|
||||
|
||||
/**
|
||||
In case the media server is reset while recording AVFoundation
|
||||
gets in a weird state. Even though we reload our AVFoundation
|
||||
object we still need to reload the assetkeys on the
|
||||
outputfile. If we don't the AVAssetWriter will fail when started.
|
||||
*/
|
||||
- (void)reloadAssetKeys;
|
||||
|
||||
@end
|
23
ManagedCapturer/NSURL+Asset.m
Normal file
23
ManagedCapturer/NSURL+Asset.m
Normal file
@ -0,0 +1,23 @@
|
||||
//
|
||||
// NSURL+NSURL_Asset.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Michel Loenngren on 4/30/17.
|
||||
// Copyright © 2017 Snapchat, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#import "NSURL+Asset.h"
|
||||
|
||||
#import <SCBase/SCMacros.h>
|
||||
|
||||
@import AVFoundation;
|
||||
|
||||
@implementation NSURL (Asset)
|
||||
|
||||
- (void)reloadAssetKeys
|
||||
{
|
||||
AVAsset *videoAsset = [AVAsset assetWithURL:self];
|
||||
[videoAsset loadValuesAsynchronouslyForKeys:@[ @keypath(videoAsset.duration) ] completionHandler:nil];
|
||||
}
|
||||
|
||||
@end
|
39
ManagedCapturer/SCAudioCaptureSession.h
Normal file
39
ManagedCapturer/SCAudioCaptureSession.h
Normal file
@ -0,0 +1,39 @@
|
||||
//
|
||||
// SCAudioCaptureSession.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Liu Liu on 3/5/15.
|
||||
// Copyright (c) 2015 Snapchat, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#import <CoreMedia/CoreMedia.h>
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
extern double const kSCAudioCaptureSessionDefaultSampleRate;
|
||||
|
||||
typedef void (^audio_capture_session_block)(NSError *error);
|
||||
|
||||
@protocol SCAudioCaptureSession;
|
||||
|
||||
@protocol SCAudioCaptureSessionDelegate <NSObject>
|
||||
|
||||
- (void)audioCaptureSession:(id<SCAudioCaptureSession>)audioCaptureSession
|
||||
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer;
|
||||
|
||||
@end
|
||||
|
||||
@protocol SCAudioCaptureSession <NSObject>
|
||||
|
||||
@property (nonatomic, weak) id<SCAudioCaptureSessionDelegate> delegate;
|
||||
|
||||
// Return detail informantions dictionary if error occured, else return nil
|
||||
- (void)beginAudioRecordingAsynchronouslyWithSampleRate:(double)sampleRate
|
||||
completionHandler:(audio_capture_session_block)completionHandler;
|
||||
|
||||
- (void)disposeAudioRecordingSynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler;
|
||||
|
||||
@end
|
||||
|
||||
@interface SCAudioCaptureSession : NSObject <SCAudioCaptureSession>
|
||||
|
||||
@end
|
289
ManagedCapturer/SCAudioCaptureSession.m
Normal file
289
ManagedCapturer/SCAudioCaptureSession.m
Normal file
@ -0,0 +1,289 @@
|
||||
//
|
||||
// SCAudioCaptureSession.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Liu Liu on 3/5/15.
|
||||
// Copyright (c) 2015 Snapchat, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#import "SCAudioCaptureSession.h"
|
||||
|
||||
#import <SCAudio/SCAudioSession.h>
|
||||
#import <SCFoundation/SCLog.h>
|
||||
#import <SCFoundation/SCQueuePerformer.h>
|
||||
#import <SCFoundation/SCTrace.h>
|
||||
|
||||
#import <mach/mach.h>
|
||||
#import <mach/mach_time.h>
|
||||
|
||||
@import AVFoundation;
|
||||
|
||||
double const kSCAudioCaptureSessionDefaultSampleRate = 44100;
|
||||
NSString *const SCAudioCaptureSessionErrorDomain = @"SCAudioCaptureSessionErrorDomain";
|
||||
|
||||
static NSInteger const kNumberOfAudioBuffersInQueue = 15;
|
||||
static float const kAudioBufferDurationInSeconds = 0.2;
|
||||
|
||||
static char *const kSCAudioCaptureSessionQueueLabel = "com.snapchat.audio-capture-session";
|
||||
|
||||
@implementation SCAudioCaptureSession {
|
||||
SCQueuePerformer *_performer;
|
||||
|
||||
AudioQueueRef _audioQueue;
|
||||
AudioQueueBufferRef _audioQueueBuffers[kNumberOfAudioBuffersInQueue];
|
||||
CMAudioFormatDescriptionRef _audioFormatDescription;
|
||||
}
|
||||
|
||||
@synthesize delegate = _delegate;
|
||||
|
||||
- (instancetype)init
|
||||
{
|
||||
SCTraceStart();
|
||||
self = [super init];
|
||||
if (self) {
|
||||
_performer = [[SCQueuePerformer alloc] initWithLabel:kSCAudioCaptureSessionQueueLabel
|
||||
qualityOfService:QOS_CLASS_USER_INTERACTIVE
|
||||
queueType:DISPATCH_QUEUE_SERIAL
|
||||
context:SCQueuePerformerContextCamera];
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)dealloc
|
||||
{
|
||||
[self disposeAudioRecordingSynchronouslyWithCompletionHandler:NULL];
|
||||
}
|
||||
|
||||
static AudioStreamBasicDescription setupAudioFormat(UInt32 inFormatID, Float64 sampleRate)
|
||||
{
|
||||
SCTraceStart();
|
||||
AudioStreamBasicDescription recordFormat = {0};
|
||||
|
||||
recordFormat.mSampleRate = sampleRate;
|
||||
recordFormat.mChannelsPerFrame = (UInt32)[SCAudioSession sharedInstance].inputNumberOfChannels;
|
||||
|
||||
recordFormat.mFormatID = inFormatID;
|
||||
if (inFormatID == kAudioFormatLinearPCM) {
|
||||
// if we want pcm, default to signed 16-bit little-endian
|
||||
recordFormat.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked;
|
||||
recordFormat.mBitsPerChannel = 16;
|
||||
recordFormat.mBytesPerPacket = recordFormat.mBytesPerFrame =
|
||||
(recordFormat.mBitsPerChannel / 8) * recordFormat.mChannelsPerFrame;
|
||||
recordFormat.mFramesPerPacket = 1;
|
||||
}
|
||||
return recordFormat;
|
||||
}
|
||||
|
||||
static int computeRecordBufferSize(const AudioStreamBasicDescription *format, const AudioQueueRef audioQueue,
|
||||
float seconds)
|
||||
{
|
||||
SCTraceStart();
|
||||
int packets, frames, bytes = 0;
|
||||
frames = (int)ceil(seconds * format->mSampleRate);
|
||||
|
||||
if (format->mBytesPerFrame > 0) {
|
||||
bytes = frames * format->mBytesPerFrame;
|
||||
} else {
|
||||
UInt32 maxPacketSize;
|
||||
if (format->mBytesPerPacket > 0)
|
||||
maxPacketSize = format->mBytesPerPacket; // constant packet size
|
||||
else {
|
||||
UInt32 propertySize = sizeof(maxPacketSize);
|
||||
AudioQueueGetProperty(audioQueue, kAudioQueueProperty_MaximumOutputPacketSize, &maxPacketSize,
|
||||
&propertySize);
|
||||
}
|
||||
if (format->mFramesPerPacket > 0)
|
||||
packets = frames / format->mFramesPerPacket;
|
||||
else
|
||||
packets = frames; // worst-case scenario: 1 frame in a packet
|
||||
if (packets == 0) // sanity check
|
||||
packets = 1;
|
||||
bytes = packets * maxPacketSize;
|
||||
}
|
||||
return bytes;
|
||||
}
|
||||
|
||||
static NSTimeInterval machHostTimeToSeconds(UInt64 mHostTime)
|
||||
{
|
||||
static dispatch_once_t onceToken;
|
||||
static mach_timebase_info_data_t timebase_info;
|
||||
dispatch_once(&onceToken, ^{
|
||||
(void)mach_timebase_info(&timebase_info);
|
||||
});
|
||||
return (double)mHostTime * timebase_info.numer / timebase_info.denom / NSEC_PER_SEC;
|
||||
}
|
||||
|
||||
static void audioQueueBufferHandler(void *inUserData, AudioQueueRef inAQ, AudioQueueBufferRef inBuffer,
|
||||
const AudioTimeStamp *nStartTime, UInt32 inNumPackets,
|
||||
const AudioStreamPacketDescription *inPacketDesc)
|
||||
{
|
||||
SCTraceStart();
|
||||
SCAudioCaptureSession *audioCaptureSession = (__bridge SCAudioCaptureSession *)inUserData;
|
||||
if (inNumPackets > 0) {
|
||||
CMTime PTS = CMTimeMakeWithSeconds(machHostTimeToSeconds(nStartTime->mHostTime), 600);
|
||||
[audioCaptureSession appendAudioQueueBuffer:inBuffer
|
||||
numPackets:inNumPackets
|
||||
PTS:PTS
|
||||
packetDescriptions:inPacketDesc];
|
||||
}
|
||||
|
||||
AudioQueueEnqueueBuffer(inAQ, inBuffer, 0, NULL);
|
||||
}
|
||||
|
||||
- (void)appendAudioQueueBuffer:(AudioQueueBufferRef)audioQueueBuffer
|
||||
numPackets:(UInt32)numPackets
|
||||
PTS:(CMTime)PTS
|
||||
packetDescriptions:(const AudioStreamPacketDescription *)packetDescriptions
|
||||
{
|
||||
SCTraceStart();
|
||||
CMBlockBufferRef dataBuffer = NULL;
|
||||
CMBlockBufferCreateWithMemoryBlock(NULL, NULL, audioQueueBuffer->mAudioDataByteSize, NULL, NULL, 0,
|
||||
audioQueueBuffer->mAudioDataByteSize, 0, &dataBuffer);
|
||||
if (dataBuffer) {
|
||||
CMBlockBufferReplaceDataBytes(audioQueueBuffer->mAudioData, dataBuffer, 0,
|
||||
audioQueueBuffer->mAudioDataByteSize);
|
||||
CMSampleBufferRef sampleBuffer = NULL;
|
||||
CMAudioSampleBufferCreateWithPacketDescriptions(NULL, dataBuffer, true, NULL, NULL, _audioFormatDescription,
|
||||
numPackets, PTS, packetDescriptions, &sampleBuffer);
|
||||
if (sampleBuffer) {
|
||||
[self processAudioSampleBuffer:sampleBuffer];
|
||||
CFRelease(sampleBuffer);
|
||||
}
|
||||
CFRelease(dataBuffer);
|
||||
}
|
||||
}
|
||||
|
||||
- (void)processAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer
|
||||
{
|
||||
SCTraceStart();
|
||||
[_delegate audioCaptureSession:self didOutputSampleBuffer:sampleBuffer];
|
||||
}
|
||||
|
||||
- (NSError *)_generateErrorForType:(NSString *)errorType
|
||||
errorCode:(int)errorCode
|
||||
format:(AudioStreamBasicDescription)format
|
||||
{
|
||||
NSDictionary *errorInfo = @{
|
||||
@"error_type" : errorType,
|
||||
@"error_code" : @(errorCode),
|
||||
@"record_format" : @{
|
||||
@"format_id" : @(format.mFormatID),
|
||||
@"format_flags" : @(format.mFormatFlags),
|
||||
@"sample_rate" : @(format.mSampleRate),
|
||||
@"bytes_per_packet" : @(format.mBytesPerPacket),
|
||||
@"frames_per_packet" : @(format.mFramesPerPacket),
|
||||
@"bytes_per_frame" : @(format.mBytesPerFrame),
|
||||
@"channels_per_frame" : @(format.mChannelsPerFrame),
|
||||
@"bits_per_channel" : @(format.mBitsPerChannel)
|
||||
}
|
||||
};
|
||||
SCLogGeneralInfo(@"Audio queue error occured. ErrorInfo: %@", errorInfo);
|
||||
return [NSError errorWithDomain:SCAudioCaptureSessionErrorDomain code:errorCode userInfo:errorInfo];
|
||||
}
|
||||
|
||||
- (NSError *)beginAudioRecordingWithSampleRate:(Float64)sampleRate
|
||||
{
|
||||
SCTraceStart();
|
||||
if ([SCAudioSession sharedInstance].inputAvailable) {
|
||||
// SCAudioSession should be activated already
|
||||
SCTraceSignal(@"Set audio session to be active");
|
||||
AudioStreamBasicDescription recordFormat = setupAudioFormat(kAudioFormatLinearPCM, sampleRate);
|
||||
OSStatus audioQueueCreationStatus = AudioQueueNewInput(&recordFormat, audioQueueBufferHandler,
|
||||
(__bridge void *)self, NULL, NULL, 0, &_audioQueue);
|
||||
if (audioQueueCreationStatus != 0) {
|
||||
NSError *error = [self _generateErrorForType:@"audio_queue_create_error"
|
||||
errorCode:audioQueueCreationStatus
|
||||
format:recordFormat];
|
||||
return error;
|
||||
}
|
||||
SCTraceSignal(@"Initialize audio queue with new input");
|
||||
UInt32 bufferByteSize = computeRecordBufferSize(
|
||||
&recordFormat, _audioQueue, kAudioBufferDurationInSeconds); // Enough bytes for half a second
|
||||
for (int i = 0; i < kNumberOfAudioBuffersInQueue; i++) {
|
||||
AudioQueueAllocateBuffer(_audioQueue, bufferByteSize, &_audioQueueBuffers[i]);
|
||||
AudioQueueEnqueueBuffer(_audioQueue, _audioQueueBuffers[i], 0, NULL);
|
||||
}
|
||||
SCTraceSignal(@"Allocate audio buffer");
|
||||
UInt32 size = sizeof(recordFormat);
|
||||
audioQueueCreationStatus =
|
||||
AudioQueueGetProperty(_audioQueue, kAudioQueueProperty_StreamDescription, &recordFormat, &size);
|
||||
if (0 != audioQueueCreationStatus) {
|
||||
NSError *error = [self _generateErrorForType:@"audio_queue_get_property_error"
|
||||
errorCode:audioQueueCreationStatus
|
||||
format:recordFormat];
|
||||
[self disposeAudioRecording];
|
||||
return error;
|
||||
}
|
||||
SCTraceSignal(@"Audio queue sample rate %lf", recordFormat.mSampleRate);
|
||||
AudioChannelLayout acl;
|
||||
bzero(&acl, sizeof(acl));
|
||||
acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
|
||||
audioQueueCreationStatus = CMAudioFormatDescriptionCreate(NULL, &recordFormat, sizeof(acl), &acl, 0, NULL, NULL,
|
||||
&_audioFormatDescription);
|
||||
if (0 != audioQueueCreationStatus) {
|
||||
NSError *error = [self _generateErrorForType:@"audio_queue_audio_format_error"
|
||||
errorCode:audioQueueCreationStatus
|
||||
format:recordFormat];
|
||||
[self disposeAudioRecording];
|
||||
return error;
|
||||
}
|
||||
SCTraceSignal(@"Start audio queue");
|
||||
audioQueueCreationStatus = AudioQueueStart(_audioQueue, NULL);
|
||||
if (0 != audioQueueCreationStatus) {
|
||||
NSError *error = [self _generateErrorForType:@"audio_queue_start_error"
|
||||
errorCode:audioQueueCreationStatus
|
||||
format:recordFormat];
|
||||
[self disposeAudioRecording];
|
||||
return error;
|
||||
}
|
||||
}
|
||||
return nil;
|
||||
}
|
||||
|
||||
- (void)disposeAudioRecording
|
||||
{
|
||||
SCTraceStart();
|
||||
SCLogGeneralInfo(@"dispose audio recording");
|
||||
if (_audioQueue) {
|
||||
AudioQueueStop(_audioQueue, true);
|
||||
AudioQueueDispose(_audioQueue, true);
|
||||
for (int i = 0; i < kNumberOfAudioBuffersInQueue; i++) {
|
||||
_audioQueueBuffers[i] = NULL;
|
||||
}
|
||||
_audioQueue = NULL;
|
||||
}
|
||||
if (_audioFormatDescription) {
|
||||
CFRelease(_audioFormatDescription);
|
||||
_audioFormatDescription = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
#pragma mark - Public methods
|
||||
|
||||
- (void)beginAudioRecordingAsynchronouslyWithSampleRate:(double)sampleRate
|
||||
completionHandler:(audio_capture_session_block)completionHandler
|
||||
{
|
||||
SCTraceStart();
|
||||
// Request audio session change for recording mode.
|
||||
[_performer perform:^{
|
||||
SCTraceStart();
|
||||
NSError *error = [self beginAudioRecordingWithSampleRate:sampleRate];
|
||||
if (completionHandler) {
|
||||
completionHandler(error);
|
||||
}
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)disposeAudioRecordingSynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler
|
||||
{
|
||||
SCTraceStart();
|
||||
[_performer performAndWait:^{
|
||||
SCTraceStart();
|
||||
[self disposeAudioRecording];
|
||||
if (completionHandler) {
|
||||
completionHandler();
|
||||
}
|
||||
}];
|
||||
}
|
||||
|
||||
@end
|
23
ManagedCapturer/SCCameraSettingUtils.h
Normal file
23
ManagedCapturer/SCCameraSettingUtils.h
Normal file
@ -0,0 +1,23 @@
|
||||
//
|
||||
// SCCameraSettingUtils.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Pinlin Chen on 12/09/2017.
|
||||
//
|
||||
|
||||
#import <SCBase/SCMacros.h>
|
||||
|
||||
#import <SCCapturerDefines.h>
|
||||
|
||||
#import <CoreMedia/CoreMedia.h>
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
SC_EXTERN_C_BEGIN
|
||||
|
||||
// Return the value if metadata attribute is found; otherwise, return nil
|
||||
extern NSNumber *retrieveExposureTimeFromEXIFAttachments(CFDictionaryRef exifAttachments);
|
||||
extern NSNumber *retrieveBrightnessFromEXIFAttachments(CFDictionaryRef exifAttachments);
|
||||
extern NSNumber *retrieveISOSpeedRatingFromEXIFAttachments(CFDictionaryRef exifAttachments);
|
||||
extern void retrieveSampleBufferMetadata(CMSampleBufferRef sampleBuffer, SampleBufferMetadata *metadata);
|
||||
|
||||
SC_EXTERN_C_END
|
79
ManagedCapturer/SCCameraSettingUtils.m
Normal file
79
ManagedCapturer/SCCameraSettingUtils.m
Normal file
@ -0,0 +1,79 @@
|
||||
//
|
||||
// SCCameraSettingUtils.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Pinlin Chen on 12/09/2017.
|
||||
//
|
||||
|
||||
#import "SCCameraSettingUtils.h"
|
||||
|
||||
#import <SCFoundation/SCLog.h>
|
||||
|
||||
#import <ImageIO/CGImageProperties.h>
|
||||
|
||||
NSNumber *retrieveExposureTimeFromEXIFAttachments(CFDictionaryRef exifAttachments)
|
||||
{
|
||||
if (!exifAttachments) {
|
||||
return nil;
|
||||
}
|
||||
id value = CFDictionaryGetValue(exifAttachments, kCGImagePropertyExifExposureTime);
|
||||
// Fetching exposure time from the sample buffer
|
||||
if ([value isKindOfClass:[NSNumber class]]) {
|
||||
return (NSNumber *)value;
|
||||
}
|
||||
return nil;
|
||||
}
|
||||
|
||||
NSNumber *retrieveBrightnessFromEXIFAttachments(CFDictionaryRef exifAttachments)
|
||||
{
|
||||
if (!exifAttachments) {
|
||||
return nil;
|
||||
}
|
||||
id value = CFDictionaryGetValue(exifAttachments, kCGImagePropertyExifBrightnessValue);
|
||||
if ([value isKindOfClass:[NSNumber class]]) {
|
||||
return (NSNumber *)value;
|
||||
}
|
||||
return nil;
|
||||
}
|
||||
|
||||
NSNumber *retrieveISOSpeedRatingFromEXIFAttachments(CFDictionaryRef exifAttachments)
|
||||
{
|
||||
if (!exifAttachments) {
|
||||
return nil;
|
||||
}
|
||||
NSArray *ISOSpeedRatings = CFDictionaryGetValue(exifAttachments, kCGImagePropertyExifISOSpeedRatings);
|
||||
if ([ISOSpeedRatings respondsToSelector:@selector(count)] &&
|
||||
[ISOSpeedRatings respondsToSelector:@selector(firstObject)] && ISOSpeedRatings.count > 0) {
|
||||
id value = [ISOSpeedRatings firstObject];
|
||||
if ([value isKindOfClass:[NSNumber class]]) {
|
||||
return (NSNumber *)value;
|
||||
}
|
||||
}
|
||||
return nil;
|
||||
}
|
||||
|
||||
void retrieveSampleBufferMetadata(CMSampleBufferRef sampleBuffer, SampleBufferMetadata *metadata)
|
||||
{
|
||||
CFDictionaryRef exifAttachments = CMGetAttachment(sampleBuffer, kCGImagePropertyExifDictionary, NULL);
|
||||
if (exifAttachments == nil) {
|
||||
SCLogCoreCameraWarning(@"SampleBuffer exifAttachment is nil");
|
||||
}
|
||||
// Fetching exposure time from the sample buffer
|
||||
NSNumber *currentExposureTimeNum = retrieveExposureTimeFromEXIFAttachments(exifAttachments);
|
||||
if (currentExposureTimeNum) {
|
||||
metadata->exposureTime = [currentExposureTimeNum floatValue];
|
||||
}
|
||||
NSNumber *currentISOSpeedRatingNum = retrieveISOSpeedRatingFromEXIFAttachments(exifAttachments);
|
||||
if (currentISOSpeedRatingNum) {
|
||||
metadata->isoSpeedRating = (int)[currentISOSpeedRatingNum integerValue];
|
||||
}
|
||||
NSNumber *currentBrightnessNum = retrieveBrightnessFromEXIFAttachments(exifAttachments);
|
||||
if (currentBrightnessNum) {
|
||||
float currentBrightness = [currentBrightnessNum floatValue];
|
||||
if (isfinite(currentBrightness)) {
|
||||
metadata->brightness = currentBrightness;
|
||||
} else {
|
||||
metadata->brightness = 0;
|
||||
}
|
||||
}
|
||||
}
|
74
ManagedCapturer/SCCaptureCommon.h
Normal file
74
ManagedCapturer/SCCaptureCommon.h
Normal file
@ -0,0 +1,74 @@
|
||||
//
|
||||
// SCCaptureCommon.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Lin Jia on 9/29/17.
|
||||
//
|
||||
//
|
||||
|
||||
#import "SCManagedCaptureDevice.h"
|
||||
#import "SCManagedDeviceCapacityAnalyzerListener.h"
|
||||
#import "SCVideoCaptureSessionInfo.h"
|
||||
|
||||
#import <SCCameraFoundation/SCManagedVideoDataSourceListener.h>
|
||||
|
||||
#import <AVFoundation/AVFoundation.h>
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
@class SCManagedCapturerState;
|
||||
@class SCManagedLensesProcessor;
|
||||
@class SCManagedVideoDataSource;
|
||||
@class SCManagedVideoCapturerOutputSettings;
|
||||
@class SCLens;
|
||||
@class SCLensCategory;
|
||||
@class SCLookseryFilterFactory;
|
||||
@class SCSnapScannedData;
|
||||
@class SCCraftResourceManager;
|
||||
@class SCScanConfiguration;
|
||||
@class SCCapturerToken;
|
||||
@class SCProcessingPipeline;
|
||||
@class SCTimedTask;
|
||||
@protocol SCManagedSampleBufferDisplayController;
|
||||
|
||||
typedef void (^sc_managed_capturer_capture_still_image_completion_handler_t)(UIImage *fullScreenImage,
|
||||
NSDictionary *metadata, NSError *error,
|
||||
SCManagedCapturerState *state);
|
||||
|
||||
typedef void (^sc_managed_capturer_capture_video_frame_completion_handler_t)(UIImage *image);
|
||||
|
||||
typedef void (^sc_managed_capturer_start_recording_completion_handler_t)(SCVideoCaptureSessionInfo session,
|
||||
NSError *error);
|
||||
|
||||
typedef void (^sc_managed_capturer_convert_view_coordniates_completion_handler_t)(CGPoint pointOfInterest);
|
||||
|
||||
typedef void (^sc_managed_capturer_unsafe_changes_t)(AVCaptureSession *session, AVCaptureDevice *front,
|
||||
AVCaptureDeviceInput *frontInput, AVCaptureDevice *back,
|
||||
AVCaptureDeviceInput *backInput, SCManagedCapturerState *state);
|
||||
|
||||
typedef void (^sc_managed_capturer_stop_running_completion_handler_t)(BOOL succeed);
|
||||
|
||||
typedef void (^sc_managed_capturer_scan_results_handler_t)(NSObject *resultObject);
|
||||
|
||||
typedef void (^sc_managed_lenses_processor_category_point_completion_handler_t)(SCLensCategory *category,
|
||||
NSInteger categoriesCount);
|
||||
extern CGFloat const kSCManagedCapturerAspectRatioUnspecified;
|
||||
|
||||
extern CGFloat const kSCManagedCapturerDefaultVideoActiveFormatWidth;
|
||||
|
||||
extern CGFloat const kSCManagedCapturerDefaultVideoActiveFormatHeight;
|
||||
|
||||
extern CGFloat const kSCManagedCapturerVideoActiveFormatWidth1080p;
|
||||
|
||||
extern CGFloat const kSCManagedCapturerVideoActiveFormatHeight1080p;
|
||||
|
||||
extern CGFloat const kSCManagedCapturerNightVideoHighResActiveFormatWidth;
|
||||
|
||||
extern CGFloat const kSCManagedCapturerNightVideoHighResActiveFormatHeight;
|
||||
|
||||
extern CGFloat const kSCManagedCapturerNightVideoDefaultResActiveFormatWidth;
|
||||
|
||||
extern CGFloat const kSCManagedCapturerNightVideoDefaultResActiveFormatHeight;
|
||||
|
||||
extern CGFloat const kSCManagedCapturerLiveStreamingVideoActiveFormatWidth;
|
||||
|
||||
extern CGFloat const kSCManagedCapturerLiveStreamingVideoActiveFormatHeight;
|
31
ManagedCapturer/SCCaptureCommon.m
Normal file
31
ManagedCapturer/SCCaptureCommon.m
Normal file
@ -0,0 +1,31 @@
|
||||
//
|
||||
// SCCaptureCommon.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Lin Jia on 9/29/17.
|
||||
//
|
||||
//
|
||||
|
||||
#import "SCCaptureCommon.h"
|
||||
|
||||
CGFloat const kSCManagedCapturerAspectRatioUnspecified = -1.0;
|
||||
|
||||
CGFloat const kSCManagedCapturerDefaultVideoActiveFormatWidth = 1280;
|
||||
|
||||
CGFloat const kSCManagedCapturerDefaultVideoActiveFormatHeight = 720;
|
||||
|
||||
CGFloat const kSCManagedCapturerVideoActiveFormatWidth1080p = 1920;
|
||||
|
||||
CGFloat const kSCManagedCapturerVideoActiveFormatHeight1080p = 1080;
|
||||
|
||||
CGFloat const kSCManagedCapturerNightVideoHighResActiveFormatWidth = 2592;
|
||||
|
||||
CGFloat const kSCManagedCapturerNightVideoHighResActiveFormatHeight = 1936;
|
||||
|
||||
CGFloat const kSCManagedCapturerNightVideoDefaultResActiveFormatWidth = 640;
|
||||
|
||||
CGFloat const kSCManagedCapturerNightVideoDefaultResActiveFormatHeight = 480;
|
||||
|
||||
CGFloat const kSCManagedCapturerLiveStreamingVideoActiveFormatWidth = 1280;
|
||||
|
||||
CGFloat const kSCManagedCapturerLiveStreamingVideoActiveFormatHeight = 720;
|
22
ManagedCapturer/SCCaptureCoreImageFaceDetector.h
Normal file
22
ManagedCapturer/SCCaptureCoreImageFaceDetector.h
Normal file
@ -0,0 +1,22 @@
|
||||
//
|
||||
// SCCaptureCoreImageFaceDetector.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Jiyang Zhu on 3/27/18.
|
||||
// Copyright © 2018 Snapchat, Inc. All rights reserved.
|
||||
//
|
||||
// This class is intended to detect faces in Camera. It receives CMSampleBuffer, process the face detection using
|
||||
// CIDetector, and announce the bounds and faceIDs.
|
||||
|
||||
#import "SCCaptureFaceDetector.h"
|
||||
|
||||
#import <SCBase/SCMacros.h>
|
||||
#import <SCCameraFoundation/SCManagedVideoDataSourceListener.h>
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
@interface SCCaptureCoreImageFaceDetector : NSObject <SCCaptureFaceDetector, SCManagedVideoDataSourceListener>
|
||||
|
||||
SC_INIT_AND_NEW_UNAVAILABLE;
|
||||
|
||||
@end
|
205
ManagedCapturer/SCCaptureCoreImageFaceDetector.m
Normal file
205
ManagedCapturer/SCCaptureCoreImageFaceDetector.m
Normal file
@ -0,0 +1,205 @@
|
||||
//
|
||||
// SCCaptureCoreImageFaceDetector.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Jiyang Zhu on 3/27/18.
|
||||
// Copyright © 2018 Snapchat, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#import "SCCaptureCoreImageFaceDetector.h"
|
||||
|
||||
#import "SCCameraTweaks.h"
|
||||
#import "SCCaptureFaceDetectionParser.h"
|
||||
#import "SCCaptureFaceDetectorTrigger.h"
|
||||
#import "SCCaptureResource.h"
|
||||
#import "SCManagedCapturer.h"
|
||||
|
||||
#import <SCFoundation/NSArray+Helpers.h>
|
||||
#import <SCFoundation/SCAssertWrapper.h>
|
||||
#import <SCFoundation/SCLog.h>
|
||||
#import <SCFoundation/SCQueuePerformer.h>
|
||||
#import <SCFoundation/SCTrace.h>
|
||||
#import <SCFoundation/SCTraceODPCompatible.h>
|
||||
#import <SCFoundation/SCZeroDependencyExperiments.h>
|
||||
#import <SCFoundation/UIImage+CVPixelBufferRef.h>
|
||||
|
||||
@import ImageIO;
|
||||
|
||||
static const NSTimeInterval kSCCaptureCoreImageFaceDetectorMaxAllowedLatency =
|
||||
1; // Drop the face detection result if it is 1 second late.
|
||||
static const NSInteger kDefaultNumberOfSequentialOutputSampleBuffer = -1; // -1 means no sequential sample buffers.
|
||||
|
||||
static char *const kSCCaptureCoreImageFaceDetectorProcessQueue =
|
||||
"com.snapchat.capture-core-image-face-detector-process";
|
||||
|
||||
@implementation SCCaptureCoreImageFaceDetector {
|
||||
CIDetector *_detector;
|
||||
SCCaptureResource *_captureResource;
|
||||
|
||||
BOOL _isDetecting;
|
||||
BOOL _hasDetectedFaces;
|
||||
NSInteger _numberOfSequentialOutputSampleBuffer;
|
||||
NSUInteger _detectionFrequency;
|
||||
NSDictionary *_detectorOptions;
|
||||
SCManagedCaptureDevicePosition _devicePosition;
|
||||
CIContext *_context;
|
||||
|
||||
SCQueuePerformer *_callbackPerformer;
|
||||
SCQueuePerformer *_processPerformer;
|
||||
|
||||
SCCaptureFaceDetectionParser *_parser;
|
||||
SCCaptureFaceDetectorTrigger *_trigger;
|
||||
}
|
||||
|
||||
@synthesize trigger = _trigger;
|
||||
@synthesize parser = _parser;
|
||||
|
||||
- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource
|
||||
{
|
||||
SCTraceODPCompatibleStart(2);
|
||||
self = [super init];
|
||||
if (self) {
|
||||
SCAssert(captureResource, @"SCCaptureResource should not be nil");
|
||||
SCAssert(captureResource.queuePerformer, @"SCQueuePerformer should not be nil");
|
||||
_callbackPerformer = captureResource.queuePerformer;
|
||||
_captureResource = captureResource;
|
||||
_parser = [[SCCaptureFaceDetectionParser alloc]
|
||||
initWithFaceBoundsAreaThreshold:pow(SCCameraFaceFocusMinFaceSize(), 2)];
|
||||
_processPerformer = [[SCQueuePerformer alloc] initWithLabel:kSCCaptureCoreImageFaceDetectorProcessQueue
|
||||
qualityOfService:QOS_CLASS_USER_INITIATED
|
||||
queueType:DISPATCH_QUEUE_SERIAL
|
||||
context:SCQueuePerformerContextCamera];
|
||||
_detectionFrequency = SCExperimentWithFaceDetectionFrequency();
|
||||
_devicePosition = captureResource.device.position;
|
||||
_trigger = [[SCCaptureFaceDetectorTrigger alloc] initWithDetector:self];
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)_setupDetectionIfNeeded
|
||||
{
|
||||
SCTraceODPCompatibleStart(2);
|
||||
SC_GUARD_ELSE_RETURN(!_detector);
|
||||
if (!_context) {
|
||||
_context = [CIContext context];
|
||||
}
|
||||
// For CIDetectorMinFeatureSize, the valid range is [0.0100, 0.5000], otherwise, it will cause a crash.
|
||||
if (!_detectorOptions) {
|
||||
_detectorOptions = @{
|
||||
CIDetectorAccuracy : CIDetectorAccuracyLow,
|
||||
CIDetectorTracking : @(YES),
|
||||
CIDetectorMaxFeatureCount : @(2),
|
||||
CIDetectorMinFeatureSize : @(SCCameraFaceFocusMinFaceSize()),
|
||||
CIDetectorNumberOfAngles : @(3)
|
||||
};
|
||||
}
|
||||
@try {
|
||||
_detector = [CIDetector detectorOfType:CIDetectorTypeFace context:_context options:_detectorOptions];
|
||||
} @catch (NSException *exception) {
|
||||
SCLogCoreCameraError(@"Failed to create CIDetector with exception:%@", exception);
|
||||
}
|
||||
}
|
||||
|
||||
- (void)_resetDetection
|
||||
{
|
||||
SCTraceODPCompatibleStart(2);
|
||||
_detector = nil;
|
||||
[self _setupDetectionIfNeeded];
|
||||
}
|
||||
|
||||
- (SCQueuePerformer *)detectionPerformer
|
||||
{
|
||||
return _processPerformer;
|
||||
}
|
||||
|
||||
- (void)startDetection
|
||||
{
|
||||
SCTraceODPCompatibleStart(2);
|
||||
SCAssert([[self detectionPerformer] isCurrentPerformer], @"Calling -startDetection in an invalid queue.");
|
||||
[self _setupDetectionIfNeeded];
|
||||
_isDetecting = YES;
|
||||
_hasDetectedFaces = NO;
|
||||
_numberOfSequentialOutputSampleBuffer = kDefaultNumberOfSequentialOutputSampleBuffer;
|
||||
}
|
||||
|
||||
- (void)stopDetection
|
||||
{
|
||||
SCTraceODPCompatibleStart(2);
|
||||
SCAssert([[self detectionPerformer] isCurrentPerformer], @"Calling -stopDetection in an invalid queue.");
|
||||
_isDetecting = NO;
|
||||
}
|
||||
|
||||
- (NSDictionary<NSNumber *, NSValue *> *)_detectFaceFeaturesInImage:(CIImage *)image
|
||||
withOrientation:(CGImagePropertyOrientation)orientation
|
||||
{
|
||||
SCTraceODPCompatibleStart(2);
|
||||
NSDictionary *opts =
|
||||
@{ CIDetectorImageOrientation : @(orientation),
|
||||
CIDetectorEyeBlink : @(NO),
|
||||
CIDetectorSmile : @(NO) };
|
||||
NSArray<CIFeature *> *features = [_detector featuresInImage:image options:opts];
|
||||
return [_parser parseFaceBoundsByFaceIDFromCIFeatures:features
|
||||
withImageSize:image.extent.size
|
||||
imageOrientation:orientation];
|
||||
}
|
||||
|
||||
#pragma mark - SCManagedVideoDataSourceListener
|
||||
|
||||
- (void)managedVideoDataSource:(id<SCManagedVideoDataSource>)managedVideoDataSource
|
||||
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
|
||||
devicePosition:(SCManagedCaptureDevicePosition)devicePosition
|
||||
{
|
||||
SCTraceODPCompatibleStart(2);
|
||||
SC_GUARD_ELSE_RETURN(_isDetecting);
|
||||
|
||||
// Reset detection if the device position changes. Resetting detection should execute in _processPerformer, so we
|
||||
// just set a flag here, and then do it later in the perform block.
|
||||
BOOL shouldForceResetDetection = NO;
|
||||
if (devicePosition != _devicePosition) {
|
||||
_devicePosition = devicePosition;
|
||||
shouldForceResetDetection = YES;
|
||||
_numberOfSequentialOutputSampleBuffer = kDefaultNumberOfSequentialOutputSampleBuffer;
|
||||
}
|
||||
|
||||
_numberOfSequentialOutputSampleBuffer++;
|
||||
SC_GUARD_ELSE_RETURN(_numberOfSequentialOutputSampleBuffer % _detectionFrequency == 0);
|
||||
@weakify(self);
|
||||
CFRetain(sampleBuffer);
|
||||
[_processPerformer perform:^{
|
||||
SCTraceStart();
|
||||
@strongify(self);
|
||||
SC_GUARD_ELSE_RETURN(self);
|
||||
|
||||
if (shouldForceResetDetection) {
|
||||
// Resetting detection usually costs no more than 1ms.
|
||||
[self _resetDetection];
|
||||
}
|
||||
|
||||
CGImagePropertyOrientation orientation =
|
||||
(devicePosition == SCManagedCaptureDevicePositionBack ? kCGImagePropertyOrientationRight
|
||||
: kCGImagePropertyOrientationLeftMirrored);
|
||||
CIImage *image = [CIImage imageWithCVPixelBuffer:CMSampleBufferGetImageBuffer(sampleBuffer)];
|
||||
NSDictionary<NSNumber *, NSValue *> *faceBoundsByFaceID =
|
||||
[self _detectFaceFeaturesInImage:image withOrientation:orientation];
|
||||
|
||||
// Calculate the latency for face detection, if it is too long, discard the face detection results.
|
||||
NSTimeInterval latency =
|
||||
CACurrentMediaTime() - CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer));
|
||||
CFRelease(sampleBuffer);
|
||||
if (latency >= kSCCaptureCoreImageFaceDetectorMaxAllowedLatency) {
|
||||
faceBoundsByFaceID = nil;
|
||||
}
|
||||
|
||||
// Only announce face detection result if faceBoundsByFaceID is not empty, or faceBoundsByFaceID was not empty
|
||||
// last time.
|
||||
if (faceBoundsByFaceID.count > 0 || self->_hasDetectedFaces) {
|
||||
self->_hasDetectedFaces = faceBoundsByFaceID.count > 0;
|
||||
[self->_callbackPerformer perform:^{
|
||||
[self->_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance]
|
||||
didDetectFaceBounds:faceBoundsByFaceID];
|
||||
}];
|
||||
}
|
||||
}];
|
||||
}
|
||||
|
||||
@end
|
24
ManagedCapturer/SCCaptureDeviceAuthorization.h
Normal file
24
ManagedCapturer/SCCaptureDeviceAuthorization.h
Normal file
@ -0,0 +1,24 @@
|
||||
//
|
||||
// SCCaptureDeviceAuthorization.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Xiaomu Wu on 8/19/14.
|
||||
// Copyright (c) 2014 Snapchat, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
@interface SCCaptureDeviceAuthorization : NSObject
|
||||
|
||||
// Methods for checking / requesting authorization to use media capture devices of a given type.
|
||||
+ (BOOL)notDeterminedForMediaType:(NSString *)mediaType;
|
||||
+ (BOOL)deniedForMediaType:(NSString *)mediaType;
|
||||
+ (BOOL)restrictedForMediaType:(NSString *)mediaType;
|
||||
+ (void)requestAccessForMediaType:(NSString *)mediaType completionHandler:(void (^)(BOOL granted))handler;
|
||||
|
||||
// Convenience methods for media type == AVMediaTypeVideo
|
||||
+ (BOOL)notDeterminedForVideoCapture;
|
||||
+ (BOOL)deniedForVideoCapture;
|
||||
+ (void)requestAccessForVideoCaptureWithCompletionHandler:(void (^)(BOOL granted))handler;
|
||||
|
||||
@end
|
71
ManagedCapturer/SCCaptureDeviceAuthorization.m
Normal file
71
ManagedCapturer/SCCaptureDeviceAuthorization.m
Normal file
@ -0,0 +1,71 @@
|
||||
//
|
||||
// SCCaptureDeviceAuthorization.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Xiaomu Wu on 8/19/14.
|
||||
// Copyright (c) 2014 Snapchat, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#import "SCCaptureDeviceAuthorization.h"
|
||||
|
||||
#import <BlizzardSchema/SCAEvents.h>
|
||||
#import <SCFoundation/SCTrace.h>
|
||||
#import <SCLogger/SCLogger.h>
|
||||
|
||||
@import AVFoundation;
|
||||
|
||||
@implementation SCCaptureDeviceAuthorization
|
||||
|
||||
#pragma mark - Public
|
||||
|
||||
+ (BOOL)notDeterminedForMediaType:(NSString *)mediaType
|
||||
{
|
||||
return [AVCaptureDevice authorizationStatusForMediaType:mediaType] == AVAuthorizationStatusNotDetermined;
|
||||
}
|
||||
|
||||
+ (BOOL)deniedForMediaType:(NSString *)mediaType
|
||||
{
|
||||
return [AVCaptureDevice authorizationStatusForMediaType:mediaType] == AVAuthorizationStatusDenied;
|
||||
}
|
||||
|
||||
+ (BOOL)restrictedForMediaType:(NSString *)mediaType
|
||||
{
|
||||
return [AVCaptureDevice authorizationStatusForMediaType:mediaType] == AVAuthorizationStatusRestricted;
|
||||
}
|
||||
|
||||
+ (void)requestAccessForMediaType:(NSString *)mediaType completionHandler:(void (^)(BOOL granted))handler
|
||||
{
|
||||
[AVCaptureDevice requestAccessForMediaType:mediaType completionHandler:handler];
|
||||
}
|
||||
|
||||
#pragma mark - Convenience methods for AVMediaTypeVideo
|
||||
|
||||
+ (BOOL)notDeterminedForVideoCapture
|
||||
{
|
||||
return [self notDeterminedForMediaType:AVMediaTypeVideo];
|
||||
}
|
||||
|
||||
+ (BOOL)deniedForVideoCapture
|
||||
{
|
||||
return [self deniedForMediaType:AVMediaTypeVideo];
|
||||
}
|
||||
|
||||
+ (void)requestAccessForVideoCaptureWithCompletionHandler:(void (^)(BOOL granted))handler
|
||||
{
|
||||
BOOL firstTimeAsking =
|
||||
[AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo] == AVAuthorizationStatusNotDetermined;
|
||||
[self requestAccessForMediaType:AVMediaTypeVideo
|
||||
completionHandler:^(BOOL granted) {
|
||||
if (firstTimeAsking) {
|
||||
SCAPermissionPromptResponse *responseEvent = [[SCAPermissionPromptResponse alloc] init];
|
||||
[responseEvent setPermissionPromptType:SCAPermissionPromptType_OS_CAMERA];
|
||||
[responseEvent setAccepted:granted];
|
||||
[[SCLogger sharedInstance] logUserTrackedEvent:responseEvent];
|
||||
}
|
||||
if (handler) {
|
||||
handler(granted);
|
||||
}
|
||||
}];
|
||||
}
|
||||
|
||||
@end
|
31
ManagedCapturer/SCCaptureDeviceAuthorizationChecker.h
Normal file
31
ManagedCapturer/SCCaptureDeviceAuthorizationChecker.h
Normal file
@ -0,0 +1,31 @@
|
||||
//
|
||||
// SCCaptureDeviceAuthorizationChecker.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Sun Lei on 15/03/2018.
|
||||
//
|
||||
|
||||
@class SCQueuePerformer;
|
||||
|
||||
#import <SCBase/SCMacros.h>
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
/*
|
||||
In general, the function of SCCaptureDeviceAuthorizationChecker is to speed up the checking of AVMediaTypeVideo
|
||||
authorization. It would cache the authorization value. 'preloadVideoCaptureAuthorization' would be called very early
|
||||
after the app is launched to populate the cached value. 'authorizedForVideoCapture' could be called to get the value
|
||||
synchronously.
|
||||
|
||||
*/
|
||||
|
||||
@interface SCCaptureDeviceAuthorizationChecker : NSObject
|
||||
|
||||
SC_INIT_AND_NEW_UNAVAILABLE
|
||||
- (instancetype)initWithPerformer:(SCQueuePerformer *)performer NS_DESIGNATED_INITIALIZER;
|
||||
|
||||
- (BOOL)authorizedForVideoCapture;
|
||||
|
||||
- (void)preloadVideoCaptureAuthorization;
|
||||
|
||||
@end
|
71
ManagedCapturer/SCCaptureDeviceAuthorizationChecker.m
Normal file
71
ManagedCapturer/SCCaptureDeviceAuthorizationChecker.m
Normal file
@ -0,0 +1,71 @@
|
||||
//
|
||||
// SCCaptureDeviceAuthorizationChecker.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Sun Lei on 15/03/2018.
|
||||
//
|
||||
|
||||
#import "SCCaptureDeviceAuthorizationChecker.h"
|
||||
|
||||
#import <SCFoundation/SCQueuePerformer.h>
|
||||
#import <SCFoundation/SCTraceODPCompatible.h>
|
||||
|
||||
@import AVFoundation;
|
||||
|
||||
@interface SCCaptureDeviceAuthorizationChecker () {
|
||||
SCQueuePerformer *_performer;
|
||||
BOOL _videoCaptureAuthorizationCachedValue;
|
||||
}
|
||||
@end
|
||||
|
||||
@implementation SCCaptureDeviceAuthorizationChecker
|
||||
|
||||
- (instancetype)initWithPerformer:(SCQueuePerformer *)performer
|
||||
{
|
||||
SCTraceODPCompatibleStart(2);
|
||||
self = [super init];
|
||||
if (self) {
|
||||
_performer = performer;
|
||||
_videoCaptureAuthorizationCachedValue = NO;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)preloadVideoCaptureAuthorization
|
||||
{
|
||||
SCTraceODPCompatibleStart(2);
|
||||
[_performer perform:^{
|
||||
SCTraceODPCompatibleStart(2);
|
||||
_videoCaptureAuthorizationCachedValue = [self authorizedForMediaType:AVMediaTypeVideo];
|
||||
}];
|
||||
}
|
||||
|
||||
- (BOOL)authorizedForVideoCapture
|
||||
{
|
||||
SCTraceODPCompatibleStart(2);
|
||||
// Cache authorizedForVideoCapture for low devices if it's YES
|
||||
// [AVCaptureDevice authorizationStatusForMediaType:] is expensive on low devices like iPhone4
|
||||
if (_videoCaptureAuthorizationCachedValue) {
|
||||
// If the user authorizes and then unauthorizes, iOS would SIGKILL the app.
|
||||
// When the user opens the app, a pop-up tells the user to allow camera access in settings.
|
||||
// So 'return YES' makes sense here.
|
||||
return YES;
|
||||
} else {
|
||||
@weakify(self);
|
||||
[_performer performAndWait:^{
|
||||
@strongify(self);
|
||||
SC_GUARD_ELSE_RETURN(self);
|
||||
if (!_videoCaptureAuthorizationCachedValue) {
|
||||
_videoCaptureAuthorizationCachedValue = [self authorizedForMediaType:AVMediaTypeVideo];
|
||||
}
|
||||
}];
|
||||
return _videoCaptureAuthorizationCachedValue;
|
||||
}
|
||||
}
|
||||
|
||||
- (BOOL)authorizedForMediaType:(NSString *)mediaType
|
||||
{
|
||||
return [AVCaptureDevice authorizationStatusForMediaType:mediaType] == AVAuthorizationStatusAuthorized;
|
||||
}
|
||||
|
||||
@end
|
31
ManagedCapturer/SCCaptureDeviceResolver.h
Normal file
31
ManagedCapturer/SCCaptureDeviceResolver.h
Normal file
@ -0,0 +1,31 @@
|
||||
//
|
||||
// SCCaptureDeviceResolver.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Lin Jia on 11/8/17.
|
||||
//
|
||||
//
|
||||
|
||||
#import <AVFoundation/AVFoundation.h>
|
||||
|
||||
/*
|
||||
See https://jira.sc-corp.net/browse/CCAM-5843
|
||||
|
||||
Retrieving AVCaptureDevice is a flaky operation. Thus create capture device resolver to make our code more robust.
|
||||
|
||||
Resolver is used to retrieve AVCaptureDevice. We are going to do our best to find the camera for you.
|
||||
|
||||
Resolver is only going to be used by SCManagedCaptureDevice.
|
||||
|
||||
All APIs are thread safe.
|
||||
*/
|
||||
|
||||
@interface SCCaptureDeviceResolver : NSObject
|
||||
|
||||
+ (instancetype)sharedInstance;
|
||||
|
||||
- (AVCaptureDevice *)findAVCaptureDevice:(AVCaptureDevicePosition)position;
|
||||
|
||||
- (AVCaptureDevice *)findDualCamera;
|
||||
|
||||
@end
|
Reference in New Issue
Block a user