1
0

Add files via upload

This commit is contained in:
Khaled Alshehry 2018-05-24 00:44:21 +03:00 committed by GitHub
parent 7a2ee0fa35
commit df1a78560c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
40 changed files with 2572 additions and 0 deletions

View File

@ -0,0 +1,113 @@
//
// SCCaptureConfiguration.h
// Snapchat
//
// Created by Lin Jia on 10/3/17.
//
//
#import "SCCaptureConfigurationAnnouncer.h"
#import "SCManagedCaptureDevice.h"
#import "SCManagedCapturerState.h"
#import "SCVideoCaptureSessionInfo.h"
#import <SCFoundation/SCQueuePerformer.h>
#import <Looksery/LSAGLView.h>
#import <Foundation/Foundation.h>
/*
SCCaptureConfiguration is the configuration class which is going to be used for customer to configure camera. This is
how to use it:
SCCaptureConfiguration *configuration = [SCCaptureConfiguration new];
// Conduct the setting here.
e.g:
configuration.torchActive = YES;
// Commit your configuration
[captureConfigurator commitConfiguration:configuration
completionHandler:handler]
Here are several interesting facts about SCCaptureConfiguration:
1) Though SCCaptureConfiguration has so many parameters, you don't need to care the parameters which you do not intend
to set. For example, if you only want to set night mode active, here is the code:
SCCaptureConfiguration *configuration = [SCCaptureConfiguration new];
configuration.isNightModeActive = YES;
[captureConfigurator commitConfiguration:configuration
completionHandler:handler]
That is it.
2) you can set multiple configuration settings, then commit, before you commit, nothing will happen, e.g.:
SCCaptureConfiguration *configuration = [SCCaptureConfiguration new];
configuration.isNightModeActive = YES;
configuration.zoomFactor = 5;
configuration.lensesActive = YES;
[captureConfigurator commitConfiguration:configuration
completionHandler:handler]
3) commit a configuration means the configuration is gone. If you set parameters on configuration after it is commited,
it will crash on debug build, and on other builds such as production, the setting will be ignored, e.g.:
SCCaptureConfiguration *configuration = [SCCaptureConfiguration new];
configuration.isNightModeActive = YES;
[captureConfigurator commitConfiguration:configuration
completionHandler:handler]
// The line below will crash on debug, and ignored on other builds.
configuration.zoomFactor = 5;
4) commiting a configuration is an atomic action. That means all changes customers want to have on camera will happen
in a group. If 2 customers commit at the same time, we will handle them one by one.
5) We are still figuring out what parameters should be in this configuration, parameters could be added or deleted
later. In the end, the configuration is going to be the only way customers confige the camera.
*/
@interface SCCaptureConfiguration : NSObject
@property (nonatomic, assign) BOOL isRunning;
@property (nonatomic, assign) BOOL isNightModeActive;
@property (nonatomic, assign) BOOL lowLightCondition;
@property (nonatomic, assign) BOOL adjustingExposure;
@property (nonatomic, assign) SCManagedCaptureDevicePosition devicePosition;
@property (nonatomic, assign) CGFloat zoomFactor;
@property (nonatomic, assign) BOOL flashSupported;
@property (nonatomic, assign) BOOL torchSupported;
@property (nonatomic, assign) BOOL flashActive;
@property (nonatomic, assign) BOOL torchActive;
@property (nonatomic, assign) BOOL lensesActive;
@property (nonatomic, assign) BOOL arSessionActive;
@property (nonatomic, assign) BOOL liveVideoStreaming;
@property (nonatomic, strong) AVCaptureVideoPreviewLayer *videoPreviewLayer;
@property (nonatomic, strong) LSAGLView *videoPreviewGLView;
@property (nonatomic, assign) SCVideoCaptureSessionInfo captureSessionInfo;
@end

View File

@ -0,0 +1,75 @@
//
// SCCaptureConfiguration.m
// Snapchat
//
// Created by Lin Jia on 10/3/17.
//
//
#import "SCCaptureConfiguration.h"
#import "SCCaptureConfiguration_Private.h"
#import <SCFoundation/SCAppEnvironment.h>
#import <SCFoundation/SCAssertWrapper.h>
@interface SCCaptureConfiguration () {
BOOL _sealed;
NSMutableSet<SCCaptureConfigurationDirtyKey *> *_dirtyKeys;
}
@end
@implementation SCCaptureConfiguration
- (instancetype)init
{
self = [super init];
if (self) {
_dirtyKeys = [[NSMutableSet<SCCaptureConfigurationDirtyKey *> alloc] init];
_sealed = NO;
}
return self;
}
- (void)setIsRunning:(BOOL)running
{
if ([self _configurationSealed]) {
return;
}
_isRunning = running;
[_dirtyKeys addObject:@(SCCaptureConfigurationKeyIsRunning)];
}
/*
All set methods will be added later. They follow the format of setIsRunning.
*/
@end
@implementation SCCaptureConfiguration (privateMethods)
- (NSArray *)dirtyKeys
{
if (!_sealed && SCIsDebugBuild()) {
SCAssert(NO, @"Configuration not sealed yet, setting is still happening!");
}
return [_dirtyKeys allObjects];
}
- (void)seal
{
_sealed = YES;
}
- (BOOL)_configurationSealed
{
if (_sealed) {
if (SCIsDebugBuild()) {
SCAssert(NO, @"Try to set property after commit configuration to configurator");
}
return YES;
} else {
return NO;
}
}
@end

View File

@ -0,0 +1,27 @@
//
// SCCaptureConfigurationAnnouncer.h
// Snapchat
//
// Created by Lin Jia on 10/2/17.
//
//
#import "SCCaptureConfigurationListener.h"
#import <Foundation/Foundation.h>
/*
All APIs are thread safe. Announcer will not retain your object. So even if customer forgets to call remove listener,
it will not create zombie objects.
*/
@interface SCCaptureConfigurationAnnouncer : NSObject
/*
When customer adds an object to be a listener, that object will receive an update of current truth. That is the chance
for the object to do adjustment according to the current configuration of the camera.
*/
- (void)addListener:(id<SCCaptureConfigurationListener>)listener;
- (void)removeListener:(id<SCCaptureConfigurationListener>)listener;
@end

View File

@ -0,0 +1,67 @@
//
// SCCaptureConfigurationAnnouncer.m
// Snapchat
//
// Created by Lin Jia on 10/2/17.
//
//
#import "SCCaptureConfigurationAnnouncer.h"
#import "SCCaptureConfigurationAnnouncer_Private.h"
#import "SCCaptureConfigurator.h"
#import <SCFoundation/SCAssertWrapper.h>
#import <SCFoundation/SCPerforming.h>
@interface SCCaptureConfigurationAnnouncer () {
NSHashTable<id<SCCaptureConfigurationListener>> *_listeners;
SCQueuePerformer *_performer;
__weak SCCaptureConfigurator *_configurator;
}
@end
@implementation SCCaptureConfigurationAnnouncer
- (instancetype)initWithPerformer:(SCQueuePerformer *)performer configurator:(SCCaptureConfigurator *)configurator
{
self = [super init];
if (self) {
_listeners = [NSHashTable<id<SCCaptureConfigurationListener>> hashTableWithOptions:NSHashTableWeakMemory];
SCAssert(performer, @"performer should not be nil");
_performer = performer;
_configurator = configurator;
}
return self;
}
- (void)addListener:(id<SCCaptureConfigurationListener>)listener
{
[_performer perform:^{
SCAssert(listener, @"listener should not be nil");
[_listeners addObject:listener];
[listener captureConfigurationDidChangeTo:_configurator.currentConfiguration];
}];
}
- (void)removeListener:(id<SCCaptureConfigurationListener>)listener
{
[_performer perform:^{
SCAssert(listener, @"listener should not be nil");
[_listeners removeObject:listener];
}];
}
- (void)deliverConfigurationChange:(id<SCManagedCapturerState>)configuration
{
SCAssertPerformer(_performer);
for (id<SCCaptureConfigurationListener> listener in _listeners) {
[listener captureConfigurationDidChangeTo:configuration];
}
}
- (void)dealloc
{
[_listeners removeAllObjects];
}
@end

View File

@ -0,0 +1,33 @@
//
// SCCaptureConfigurationAnnouncer_Private.h
// Snapchat
//
// Created by Lin Jia on 10/2/17.
//
//
#import "SCCaptureConfigurationAnnouncer.h"
#import "SCManagedCapturerState.h"
#import <SCFoundation/SCQueuePerformer.h>
@class SCCaptureConfigurator;
/*
This private header is only going to be used by SCCaptureConfigurator. Other customers should only use the public
header.
*/
@interface SCCaptureConfigurationAnnouncer ()
/*
The announcer is going to be instantiated by SCCaptureConfigurator. It will take in a queue performer. The design is
that announcer and configurator is going to share the same serial queue to avoid racing. This is something we could
change later.
*/
- (instancetype)initWithPerformer:(SCQueuePerformer *)performer configurator:(SCCaptureConfigurator *)configurator;
/*
The API below is called by configurator to notify listener that configuration has changed.
*/
- (void)deliverConfigurationChange:(id<SCManagedCapturerState>)configuration;
@end

View File

@ -0,0 +1,23 @@
//
// SCCaptureConfigurationListener.h
// Snapchat
//
// Created by Lin Jia on 10/2/17.
//
#import "SCManagedCapturerState.h"
#import <Foundation/Foundation.h>
@class SCCaptureConfiguration;
/*
As a listener to configuration of camera core, you will get an update whenever the configuration changes, and you will
receive an immutable state object for the current truth.
*/
@protocol SCCaptureConfigurationListener <NSObject>
- (void)captureConfigurationDidChangeTo:(id<SCManagedCapturerState>)state;
@end

View File

@ -0,0 +1,46 @@
//
// SCCaptureConfiguration_Private.h
// Snapchat
//
// Created by Lin Jia on 10/3/17.
//
//
#import "SCCaptureConfiguration_Private.h"
typedef NSNumber SCCaptureConfigurationDirtyKey;
/*
The key values to identify dirty keys in SCCaptureConfiguration.
Dirty key is defined as the key customer changes.
e.g. if customer toggle device position. Dirty keys will have SCCaptureConfigurationKeyDevicePosition.
It is not complete, and it is only a draft now. It
will be gradually tuned while we work on the APIs.
*/
typedef NS_ENUM(NSUInteger, SCCaptureConfigurationKey) {
SCCaptureConfigurationKeyIsRunning,
SCCaptureConfigurationKeyIsNightModeActive,
SCCaptureConfigurationKeyLowLightCondition,
SCCaptureConfigurationKeyDevicePosition,
SCCaptureConfigurationKeyZoomFactor,
SCCaptureConfigurationKeyFlashActive,
SCCaptureConfigurationKeyTorchActive,
SCCaptureConfigurationKeyARSessionActive,
SCCaptureConfigurationKeyLensesActive,
SCCaptureConfigurationKeyVideoRecording,
};
@interface SCCaptureConfiguration (internalMethods)
// Return dirtyKeys, which identify the parameters customer want to set.
- (NSArray *)dirtyKeys;
// Called by SCCaptureConfigurator to seal a configuration, so future changes are ignored.
- (void)seal;
- (BOOL)_configurationSealed;
@end

View File

@ -0,0 +1,59 @@
//
// SCCaptureConfigurator.h
// Snapchat
//
// Created by Lin Jia on 10/2/17.
//
//
#import "SCCaptureConfiguration.h"
#import "SCCaptureConfigurationAnnouncer.h"
#import "SCManagedCaptureDevice.h"
#import "SCVideoCaptureSessionInfo.h"
#import <SCFoundation/SCQueuePerformer.h>
#import <Looksery/LSAGLView.h>
#import <Foundation/Foundation.h>
/*
SCCaptureConfigurator is the class you use to config the setting of the camera hardware. Such as setting the camera to
be front or back, setting camera hardware to be certain resolution, or to activate night mode.
You can use this class for many things:
a) do 1 time poking to checkout the current camera configuration via the currentConfiguration.
Note that we represent configuration via id<SCManagedCapturerState>. It is going to be an immutable object.
b) register to be the listener of the configuration change via the announcer.
Every time a camera configuration change, you will receive an update.
c) set the configuration via commitConfiguration API. You convey your setting intention via SCCaptureConfiguration.
You can register a completionHandler to be called after your configuration gets done.
Inside the completionHandler, we will pass you an error if it happens, and there will be a boolean cameraChanged. If
your configuration already equals the current configuration of the camera, we will not change the camera, the boolean
will be true.
d) All APIs are thread safe.
*/
typedef void (^SCCaptureConfigurationCompletionHandler)(NSError *error, BOOL cameraChanged);
@interface SCCaptureConfigurator : NSObject
@property (nonatomic, strong, readonly) SCCaptureConfigurationAnnouncer *announcer;
@property (nonatomic, strong, readonly) id<SCManagedCapturerState> currentConfiguration;
- (instancetype)init NS_UNAVAILABLE;
- (instancetype)initWithPerformer:(SCQueuePerformer *)performer;
- (void)commitConfiguration:(SCCaptureConfiguration *)configuration
completionHandler:(SCCaptureConfigurationCompletionHandler)completionHandler;
@end

View File

@ -0,0 +1,56 @@
//
// SCCaptureConfiguration.m
// Snapchat
//
// Created by Lin Jia on 10/2/17.
//
//
#import "SCCaptureConfigurator.h"
#import "SCCaptureConfigurationAnnouncer_Private.h"
#import "SCCaptureConfiguration_Private.h"
#import <SCFoundation/SCAssertWrapper.h>
@interface SCCaptureConfigurator () {
SCQueuePerformer *_performer;
}
@end
@implementation SCCaptureConfigurator
- (instancetype)initWithPerformer:(SCQueuePerformer *)performer
{
self = [super init];
if (self) {
_announcer = [[SCCaptureConfigurationAnnouncer alloc] initWithPerformer:performer configurator:self];
_performer = performer;
// TODO: initialize _currentConfiguration
}
return self;
}
- (void)commitConfiguration:(SCCaptureConfiguration *)configuration
completionHandler:(SCCaptureConfigurationCompletionHandler)completionHandler
{
[configuration seal];
[_performer perform:^() {
SCAssert(configuration, @"Configuration must be a valid input parameter");
NSArray<SCCaptureConfigurationDirtyKey *> *dirtyKeys = [configuration dirtyKeys];
for (SCCaptureConfigurationDirtyKey *key in dirtyKeys) {
[self _processKey:[key integerValue] configuration:configuration];
}
if (completionHandler) {
// TODO: passing in right parameters.
completionHandler(NULL, YES);
}
}];
}
- (void)_processKey:(SCCaptureConfigurationKey)key configuration:(SCCaptureConfiguration *)configuration
{
// Tune the hardware depending on what key is dirty, and what is the value is inside configuration.
}
@end

View File

@ -0,0 +1,42 @@
//
// SCCaptureCore.h
// Snapchat
//
// Created by Lin Jia on 10/2/17.
//
//
#import "SCCaptureStateMachineContext.h"
#import "SCCapturer.h"
#import <SCFoundation/SCPerforming.h>
#import <Foundation/Foundation.h>
@class SCCaptureConfigurator;
/*
SCCaptureCore abstracts away the hardware aspect of a camera. SCCaptureCore is the V2 version of the
SCManagedCapturerV1.
SCCaptureCore itself does very little things actually. Its main job is to expose APIs of camera hardware to outside
customers. The actual heavy lifting is done via delegating the jobs to multiple worker classes.
We generally categorize the operation of camera hardware into 2 categories:
1) make camera hardware do state transition. Such as what is shown in this graph:
https://docs.google.com/presentation/d/1KWk-XSgO0wFAjBZXsl_OnHBGpi_pd9-ds6Wje8vX-0s/edit#slide=id.g2017e46295_1_10
2) config camera hardware setting, such as setting the camera to be front or back, such as setting camera hardware to
be certain resolution, or to activate night mode.
Indeed, we create 2 working classes to do the heavy lifting. Both of them are under construction. Feel free to checkout
SCCaptureConfigurator, which is responsible for 2).
*/
@interface SCCaptureCore : NSObject <SCCapturer>
@property (nonatomic, strong, readonly) SCCaptureStateMachineContext *stateMachine;
@end

View File

@ -0,0 +1,475 @@
//
// SCCaptureCore.m
// Snapchat
//
// Created by Lin Jia on 10/2/17.
//
//
#import "SCCaptureCore.h"
#import "SCCaptureDeviceAuthorizationChecker.h"
#import "SCCaptureResource.h"
#import "SCCaptureWorker.h"
#import "SCManagedCapturePreviewLayerController.h"
#import "SCManagedCapturerGLViewManagerAPI.h"
#import "SCManagedCapturerLSAComponentTrackerAPI.h"
#import "SCManagedCapturerV1_Private.h"
#import <SCAudio/SCAudioConfiguration.h>
#import <SCFoundation/SCAssertWrapper.h>
static const char *kSCCaptureDeviceAuthorizationManagerQueueLabel =
"com.snapchat.capture_device_authorization_checker_queue";
@implementation SCCaptureCore {
SCManagedCapturerV1 *_managedCapturerV1;
SCQueuePerformer *_queuePerformer;
SCCaptureDeviceAuthorizationChecker *_authorizationChecker;
}
@synthesize blackCameraDetector = _blackCameraDetector;
- (instancetype)init
{
SCTraceStart();
SCAssertMainThread();
self = [super init];
if (self) {
_managedCapturerV1 = [SCManagedCapturerV1 sharedInstance];
SCCaptureResource *resource = _managedCapturerV1.captureResource;
_queuePerformer = resource.queuePerformer;
_stateMachine = [[SCCaptureStateMachineContext alloc] initWithResource:resource];
SCQueuePerformer *authorizationCheckPerformer =
[[SCQueuePerformer alloc] initWithLabel:kSCCaptureDeviceAuthorizationManagerQueueLabel
qualityOfService:QOS_CLASS_USER_INTERACTIVE
queueType:DISPATCH_QUEUE_SERIAL
context:SCQueuePerformerContextCamera];
_authorizationChecker =
[[SCCaptureDeviceAuthorizationChecker alloc] initWithPerformer:authorizationCheckPerformer];
}
return self;
}
- (id<SCManagedCapturerLensAPI>)lensProcessingCore
{
return _managedCapturerV1.lensProcessingCore;
}
// For APIs inside protocol SCCapture, if they are related to capture state machine, we delegate to state machine.
- (void)setupWithDevicePositionAsynchronously:(SCManagedCaptureDevicePosition)devicePosition
completionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context
{
[_stateMachine initializeCaptureWithDevicePositionAsynchronously:devicePosition
completionHandler:completionHandler
context:context];
}
- (SCCapturerToken *)startRunningAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context
{
return [_stateMachine startRunningWithContext:context completionHandler:completionHandler];
}
#pragma mark - Recording / Capture
- (void)captureStillImageAsynchronouslyWithAspectRatio:(CGFloat)aspectRatio
captureSessionID:(NSString *)captureSessionID
completionHandler:
(sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler
context:(NSString *)context
{
[_stateMachine captureStillImageAsynchronouslyWithAspectRatio:aspectRatio
captureSessionID:captureSessionID
completionHandler:completionHandler
context:context];
}
- (void)stopRunningAsynchronously:(SCCapturerToken *)token
completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler
context:(NSString *)context
{
[_stateMachine stopRunningWithCapturerToken:token completionHandler:completionHandler context:context];
}
- (void)stopRunningAsynchronously:(SCCapturerToken *)token
completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler
after:(NSTimeInterval)delay
context:(NSString *)context
{
[_stateMachine stopRunningWithCapturerToken:token after:delay completionHandler:completionHandler context:context];
}
#pragma mark - Scanning
- (void)startScanAsynchronouslyWithScanConfiguration:(SCScanConfiguration *)configuration context:(NSString *)context
{
[_stateMachine startScanAsynchronouslyWithScanConfiguration:configuration context:context];
}
- (void)stopScanAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler context:(NSString *)context
{
[_stateMachine stopScanAsynchronouslyWithCompletionHandler:completionHandler context:context];
}
- (void)prepareForRecordingAsynchronouslyWithContext:(NSString *)context
audioConfiguration:(SCAudioConfiguration *)configuration
{
[_stateMachine prepareForRecordingAsynchronouslyWithAudioConfiguration:configuration context:context];
}
- (void)startRecordingAsynchronouslyWithOutputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings
audioConfiguration:(SCAudioConfiguration *)configuration
maxDuration:(NSTimeInterval)maxDuration
fileURL:(NSURL *)fileURL
captureSessionID:(NSString *)captureSessionID
completionHandler:
(sc_managed_capturer_start_recording_completion_handler_t)completionHandler
context:(NSString *)context
{
[_stateMachine startRecordingWithOutputSettings:outputSettings
audioConfiguration:configuration
maxDuration:maxDuration
fileURL:fileURL
captureSessionID:captureSessionID
completionHandler:completionHandler
context:context];
}
- (void)stopRecordingAsynchronouslyWithContext:(NSString *)context
{
[_stateMachine stopRecordingWithContext:context];
}
- (void)cancelRecordingAsynchronouslyWithContext:(NSString *)context
{
[_stateMachine cancelRecordingWithContext:context];
[[self snapCreationTriggers] markSnapCreationEndWithContext:context];
}
#pragma mark -
- (void)startStreamingAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context
{
[_managedCapturerV1 startStreamingAsynchronouslyWithCompletionHandler:completionHandler context:context];
}
- (void)addSampleBufferDisplayController:(id<SCManagedSampleBufferDisplayController>)sampleBufferDisplayController
context:(NSString *)context
{
[_managedCapturerV1 addSampleBufferDisplayController:sampleBufferDisplayController context:context];
}
#pragma mark - Utilities
- (void)convertViewCoordinates:(CGPoint)viewCoordinates
completionHandler:(sc_managed_capturer_convert_view_coordniates_completion_handler_t)completionHandler
context:(NSString *)context
{
[_managedCapturerV1 convertViewCoordinates:viewCoordinates completionHandler:completionHandler context:context];
}
- (void)detectLensCategoryOnNextFrame:(CGPoint)point
lenses:(NSArray<SCLens *> *)lenses
completion:(sc_managed_lenses_processor_category_point_completion_handler_t)completion
context:(NSString *)context
{
[_managedCapturerV1 detectLensCategoryOnNextFrame:point lenses:lenses completion:completion context:context];
}
#pragma mark - Configurations
- (void)setDevicePositionAsynchronously:(SCManagedCaptureDevicePosition)devicePosition
completionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context
{
[_managedCapturerV1 setDevicePositionAsynchronously:devicePosition
completionHandler:completionHandler
context:context];
}
- (void)setFlashActive:(BOOL)flashActive
completionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context
{
[_managedCapturerV1 setFlashActive:flashActive completionHandler:completionHandler context:context];
}
- (void)setLensesActive:(BOOL)lensesActive
completionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context
{
[_managedCapturerV1 setLensesActive:lensesActive completionHandler:completionHandler context:context];
}
- (void)setLensesActive:(BOOL)lensesActive
filterFactory:(SCLookseryFilterFactory *)filterFactory
completionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context
{
[_managedCapturerV1 setLensesActive:lensesActive
filterFactory:filterFactory
completionHandler:completionHandler
context:context];
}
- (void)setLensesInTalkActive:(BOOL)lensesActive
completionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context
{
[_managedCapturerV1 setLensesInTalkActive:lensesActive completionHandler:completionHandler context:context];
}
- (void)setTorchActiveAsynchronously:(BOOL)torchActive
completionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context
{
[_managedCapturerV1 setTorchActiveAsynchronously:torchActive completionHandler:completionHandler context:context];
}
- (void)setNightModeActiveAsynchronously:(BOOL)active
completionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context
{
[_managedCapturerV1 setNightModeActiveAsynchronously:active completionHandler:completionHandler context:context];
}
- (void)lockZoomWithContext:(NSString *)context
{
[_managedCapturerV1 lockZoomWithContext:context];
}
- (void)unlockZoomWithContext:(NSString *)context
{
[_managedCapturerV1 unlockZoomWithContext:context];
}
- (void)setZoomFactorAsynchronously:(CGFloat)zoomFactor context:(NSString *)context
{
[_managedCapturerV1 setZoomFactorAsynchronously:zoomFactor context:context];
}
- (void)resetZoomFactorAsynchronously:(CGFloat)zoomFactor
devicePosition:(SCManagedCaptureDevicePosition)devicePosition
context:(NSString *)context
{
[_managedCapturerV1 resetZoomFactorAsynchronously:zoomFactor devicePosition:devicePosition context:context];
}
- (void)setExposurePointOfInterestAsynchronously:(CGPoint)pointOfInterest
fromUser:(BOOL)fromUser
completionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context
{
[_managedCapturerV1 setExposurePointOfInterestAsynchronously:pointOfInterest
fromUser:fromUser
completionHandler:completionHandler
context:context];
}
- (void)setAutofocusPointOfInterestAsynchronously:(CGPoint)pointOfInterest
completionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context
{
[_managedCapturerV1 setAutofocusPointOfInterestAsynchronously:pointOfInterest
completionHandler:completionHandler
context:context];
}
- (void)setPortraitModePointOfInterestAsynchronously:(CGPoint)pointOfInterest
completionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context
{
[_managedCapturerV1 setPortraitModePointOfInterestAsynchronously:pointOfInterest
completionHandler:completionHandler
context:context];
}
- (void)continuousAutofocusAndExposureAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler
context:(NSString *)context
{
[_managedCapturerV1 continuousAutofocusAndExposureAsynchronouslyWithCompletionHandler:completionHandler
context:context];
}
// I need to call these three methods from SCAppDelegate explicitly so that I get the latest information.
- (void)applicationDidEnterBackground
{
[_managedCapturerV1 applicationDidEnterBackground];
}
- (void)applicationWillEnterForeground
{
[_managedCapturerV1 applicationWillEnterForeground];
}
- (void)applicationDidBecomeActive
{
[_managedCapturerV1 applicationDidBecomeActive];
}
- (void)applicationWillResignActive
{
[_managedCapturerV1 applicationWillResignActive];
}
- (void)mediaServicesWereReset
{
[_managedCapturerV1 mediaServicesWereReset];
}
- (void)mediaServicesWereLost
{
[_managedCapturerV1 mediaServicesWereLost];
}
#pragma mark - Add / Remove Listener
- (void)addListener:(id<SCManagedCapturerListener>)listener
{
[_managedCapturerV1 addListener:listener];
}
- (void)removeListener:(id<SCManagedCapturerListener>)listener
{
[_managedCapturerV1 removeListener:listener];
}
- (void)addVideoDataSourceListener:(id<SCManagedVideoDataSourceListener>)listener
{
[_managedCapturerV1 addVideoDataSourceListener:listener];
}
- (void)removeVideoDataSourceListener:(id<SCManagedVideoDataSourceListener>)listener
{
[_managedCapturerV1 removeVideoDataSourceListener:listener];
}
- (void)addDeviceCapacityAnalyzerListener:(id<SCManagedDeviceCapacityAnalyzerListener>)listener
{
[_managedCapturerV1 addDeviceCapacityAnalyzerListener:listener];
}
- (void)removeDeviceCapacityAnalyzerListener:(id<SCManagedDeviceCapacityAnalyzerListener>)listener
{
[_managedCapturerV1 removeDeviceCapacityAnalyzerListener:listener];
}
- (NSString *)debugInfo
{
return [_managedCapturerV1 debugInfo];
}
- (id<SCManagedVideoDataSource>)currentVideoDataSource
{
return [_managedCapturerV1 currentVideoDataSource];
}
// For APIs inside protocol SCCapture, if they are not related to capture state machine, we directly delegate to V1.
- (void)checkRestrictedCamera:(void (^)(BOOL, BOOL, AVAuthorizationStatus))callback
{
[_managedCapturerV1 checkRestrictedCamera:callback];
}
- (void)recreateAVCaptureSession
{
[_managedCapturerV1 recreateAVCaptureSession];
}
#pragma mark -
- (CMTime)firstWrittenAudioBufferDelay
{
return [SCCaptureWorker firstWrittenAudioBufferDelay:_managedCapturerV1.captureResource];
}
- (BOOL)audioQueueStarted
{
return [SCCaptureWorker audioQueueStarted:_managedCapturerV1.captureResource];
}
- (BOOL)isLensApplied
{
return [SCCaptureWorker isLensApplied:_managedCapturerV1.captureResource];
}
- (BOOL)isVideoMirrored
{
return [SCCaptureWorker isVideoMirrored:_managedCapturerV1.captureResource];
}
- (SCVideoCaptureSessionInfo)activeSession
{
return _managedCapturerV1.activeSession;
}
- (void)setBlackCameraDetector:(SCBlackCameraDetector *)blackCameraDetector
deviceMotionProvider:(id<SCDeviceMotionProvider>)deviceMotionProvider
fileInputDecider:(id<SCFileInputDecider>)fileInputDecider
arImageCaptureProvider:(id<SCManagedCapturerARImageCaptureProvider>)arImageCaptureProvider
glviewManager:(id<SCManagedCapturerGLViewManagerAPI>)glViewManager
lensAPIProvider:(id<SCManagedCapturerLensAPIProvider>)lensAPIProvider
lsaComponentTracker:(id<SCManagedCapturerLSAComponentTrackerAPI>)lsaComponentTracker
managedCapturerPreviewLayerControllerDelegate:
(id<SCManagedCapturePreviewLayerControllerDelegate>)previewLayerControllerDelegate
{
_managedCapturerV1.captureResource.blackCameraDetector = blackCameraDetector;
_managedCapturerV1.captureResource.deviceMotionProvider = deviceMotionProvider;
_managedCapturerV1.captureResource.fileInputDecider = fileInputDecider;
_managedCapturerV1.captureResource.arImageCaptureProvider = arImageCaptureProvider;
_managedCapturerV1.captureResource.videoPreviewGLViewManager = glViewManager;
[_managedCapturerV1.captureResource.videoPreviewGLViewManager
configureWithCaptureResource:_managedCapturerV1.captureResource];
_managedCapturerV1.captureResource.lensAPIProvider = lensAPIProvider;
_managedCapturerV1.captureResource.lsaTrackingComponentHandler = lsaComponentTracker;
[_managedCapturerV1.captureResource.lsaTrackingComponentHandler
configureWithCaptureResource:_managedCapturerV1.captureResource];
_managedCapturerV1.captureResource.previewLayerControllerDelegate = previewLayerControllerDelegate;
[SCManagedCapturePreviewLayerController sharedInstance].delegate =
_managedCapturerV1.captureResource.previewLayerControllerDelegate;
}
- (SCBlackCameraDetector *)blackCameraDetector
{
return _managedCapturerV1.captureResource.blackCameraDetector;
}
- (void)captureSingleVideoFrameAsynchronouslyWithCompletionHandler:
(sc_managed_capturer_capture_video_frame_completion_handler_t)completionHandler
context:(NSString *)context
{
[_managedCapturerV1 captureSingleVideoFrameAsynchronouslyWithCompletionHandler:completionHandler context:context];
}
- (void)sampleFrameWithCompletionHandler:(void (^)(UIImage *frame, CMTime presentationTime))completionHandler
context:(NSString *)context
{
[_managedCapturerV1 sampleFrameWithCompletionHandler:completionHandler context:context];
}
- (void)addTimedTask:(SCTimedTask *)task context:(NSString *)context
{
[_managedCapturerV1 addTimedTask:task context:context];
}
- (void)clearTimedTasksWithContext:(NSString *)context
{
[_managedCapturerV1 clearTimedTasksWithContext:context];
}
- (BOOL)authorizedForVideoCapture
{
return [_authorizationChecker authorizedForVideoCapture];
}
- (void)preloadVideoCaptureAuthorization
{
[_authorizationChecker preloadVideoCaptureAuthorization];
}
#pragma mark - Snap Creation triggers
- (SCSnapCreationTriggers *)snapCreationTriggers
{
return [_managedCapturerV1 snapCreationTriggers];
}
@end

View File

@ -0,0 +1,47 @@
//
// SCDepthBlurMetalModule.metal
// Snapchat
//
// Created by Brian Ng on 10/31/17.
//
#include <metal_stdlib>
using namespace metal;
struct DepthBlurRenderData {
float depthRange;
float depthOffset;
float depthBlurForegroundThreshold;
float depthBlurBackgroundThreshold;
};
kernel void kernel_depth_blur(texture2d<float, access::read> sourceYTexture [[texture(0)]],
texture2d<float, access::read> sourceUVTexture [[texture(1)]],
texture2d<float, access::read> sourceDepthTexture[[texture(2)]],
texture2d<float, access::read> sourceBlurredYTexture [[texture(3)]],
texture2d<float, access::write> destinationYTexture [[texture(4)]],
texture2d<float, access::write> destinationUVTexture [[texture(5)]],
constant DepthBlurRenderData &renderData [[buffer(0)]],
uint2 gid [[thread_position_in_grid]],
uint2 size [[threads_per_grid]]) {
float2 valueUV = sourceUVTexture.read(gid).rg;
float depthValue = sourceDepthTexture.read(uint2(gid.x/4, gid.y/4)).r;
float normalizedDepthValue = (depthValue - renderData.depthOffset) / renderData.depthRange;
float valueYUnblurred = sourceYTexture.read(gid).r;
float valueYBlurred = sourceBlurredYTexture.read(gid).r;
float valueY = 0;
if (normalizedDepthValue > renderData.depthBlurForegroundThreshold) {
valueY = valueYUnblurred;
} else if (normalizedDepthValue < renderData.depthBlurBackgroundThreshold) {
valueY = valueYBlurred;
} else {
float blendRange = renderData.depthBlurForegroundThreshold - renderData.depthBlurBackgroundThreshold;
float normalizedBlendDepthValue = (normalizedDepthValue - renderData.depthBlurBackgroundThreshold) / blendRange;
valueY = valueYUnblurred * normalizedBlendDepthValue + valueYBlurred * (1 - normalizedBlendDepthValue);
}
destinationYTexture.write(valueY, gid);
destinationUVTexture.write(float4(valueUV.r, valueUV.g, 0, 0), gid);
}

View File

@ -0,0 +1,21 @@
//
// SCDepthBlurMetalRenderCommand.h
// Snapchat
//
// Created by Brian Ng on 11/8/17.
//
//
#import "SCMetalModule.h"
#import <Foundation/Foundation.h>
/*
@class SCDepthBlurMetalRenderCommand
Prepares the command buffer for the SCDepthBlurMetalModule.metal shader.
*/
@interface SCDepthBlurMetalRenderCommand : NSObject <SCMetalRenderCommand>
@property (nonatomic, readonly) NSString *functionName;
@end

View File

@ -0,0 +1,90 @@
//
// SCDepthBlurMetalRenderCommand.m
// Snapchat
//
// Created by Brian Ng on 11/8/17.
//
//
#import "SCDepthBlurMetalRenderCommand.h"
#import "SCCameraTweaks.h"
#import "SCMetalUtils.h"
#import <SCFoundation/NSString+SCFormat.h>
@import MetalPerformanceShaders;
@implementation SCDepthBlurMetalRenderCommand
typedef struct DepthBlurRenderData {
float depthRange;
float depthOffset;
float depthBlurForegroundThreshold;
float depthBlurBackgroundThreshold;
} DepthBlurRenderData;
#pragma mark - SCMetalRenderCommand
- (id<MTLComputeCommandEncoder>)encodeMetalCommand:(id<MTLCommandBuffer>)commandBuffer
pipelineState:(id<MTLComputePipelineState>)pipelineState
textureResource:(SCMetalTextureResource *)textureResource
{
#if !TARGET_IPHONE_SIMULATOR
CGFloat depthBlurForegroundThreshold = textureResource.depthBlurForegroundThreshold;
CGFloat depthBlurBackgroundThreshold =
textureResource.depthBlurForegroundThreshold > SCCameraTweaksDepthBlurBackgroundThreshold()
? SCCameraTweaksDepthBlurBackgroundThreshold()
: 0;
DepthBlurRenderData depthBlurRenderData = {
.depthRange = textureResource.depthRange,
.depthOffset = textureResource.depthOffset,
.depthBlurBackgroundThreshold = depthBlurBackgroundThreshold,
.depthBlurForegroundThreshold = depthBlurForegroundThreshold,
};
id<MTLBuffer> depthBlurRenderDataBuffer =
[textureResource.device newBufferWithLength:sizeof(DepthBlurRenderData)
options:MTLResourceOptionCPUCacheModeDefault];
memcpy(depthBlurRenderDataBuffer.contents, &depthBlurRenderData, sizeof(DepthBlurRenderData));
MPSImageGaussianBlur *kernel =
[[MPSImageGaussianBlur alloc] initWithDevice:textureResource.device sigma:SCCameraTweaksBlurSigma()];
[kernel encodeToCommandBuffer:commandBuffer
sourceTexture:textureResource.sourceYTexture
destinationTexture:textureResource.sourceBlurredYTexture];
id<MTLComputeCommandEncoder> commandEncoder = [commandBuffer computeCommandEncoder];
[commandEncoder setComputePipelineState:pipelineState];
[commandEncoder setTexture:textureResource.sourceYTexture atIndex:0];
[commandEncoder setTexture:textureResource.sourceUVTexture atIndex:1];
[commandEncoder setTexture:textureResource.sourceDepthTexture atIndex:2];
[commandEncoder setTexture:textureResource.sourceBlurredYTexture atIndex:3];
[commandEncoder setTexture:textureResource.destinationYTexture atIndex:4];
[commandEncoder setTexture:textureResource.destinationUVTexture atIndex:5];
[commandEncoder setBuffer:depthBlurRenderDataBuffer offset:0 atIndex:0];
return commandEncoder;
#else
return nil;
#endif
}
- (BOOL)requiresDepthData
{
return YES;
}
#pragma mark - SCMetalModuleFunctionProvider
- (NSString *)functionName
{
return @"kernel_depth_blur";
}
- (NSString *)description
{
return [NSString sc_stringWithFormat:@"SCDepthBlurMetalRenderCommand (shader function = %@)", self.functionName];
}
@end

View File

@ -0,0 +1,29 @@
//
// SCDepthToGrayscaleMetalModule.metal
// Snapchat
//
// Created by Brian Ng on 12/7/17.
//
#include <metal_stdlib>
using namespace metal;
typedef struct DepthToGrayscaleRenderData {
float depthRange;
float depthOffset;
} DepthToGrayscaleRenderData;
kernel void kernel_depth_to_grayscale(texture2d<float, access::read> sourceDepthTexture[[texture(0)]],
texture2d<float, access::write> destinationYTexture [[texture(1)]],
texture2d<float, access::write> destinationUVTexture [[texture(2)]],
constant DepthToGrayscaleRenderData &renderData [[buffer(0)]],
uint2 gid [[thread_position_in_grid]],
uint2 size [[threads_per_grid]]) {
float depthValue = sourceDepthTexture.read(uint2(gid.x/4, gid.y/4)).r;
float normalizedDepthValue = (depthValue - renderData.depthOffset) / renderData.depthRange;
destinationYTexture.write(normalizedDepthValue, gid);
destinationUVTexture.write(float4(0.5, 0.5, 0, 0), gid);
}

View File

@ -0,0 +1,21 @@
//
// SCDepthToGrayscaleMetalRenderCommand.h
// Snapchat
//
// Created by Brian Ng on 12/7/17.
//
//
#import "SCMetalModule.h"
#import <Foundation/Foundation.h>
/*
@class SCDepthToGrayscaleMetalRenderCommand
Prepares the command buffer for the SCDepthToGrayscaleMetalModule.metal shader.
*/
@interface SCDepthToGrayscaleMetalRenderCommand : NSObject <SCMetalRenderCommand>
@property (nonatomic, readonly) NSString *functionName;
@end

View File

@ -0,0 +1,72 @@
//
// SCDepthToGrayscaleMetalRenderCommand.m
// Snapchat
//
// Created by Brian Ng on 12/7/17.
//
//
#import "SCDepthToGrayscaleMetalRenderCommand.h"
#import "SCCameraTweaks.h"
#import "SCMetalUtils.h"
#import <SCFoundation/NSString+SCFormat.h>
@import MetalPerformanceShaders;
@implementation SCDepthToGrayscaleMetalRenderCommand
typedef struct DepthToGrayscaleRenderData {
float depthRange;
float depthOffset;
} DepthToGrayscaleRenderData;
#pragma mark - SCMetalRenderCommand
- (id<MTLComputeCommandEncoder>)encodeMetalCommand:(id<MTLCommandBuffer>)commandBuffer
pipelineState:(id<MTLComputePipelineState>)pipelineState
textureResource:(SCMetalTextureResource *)textureResource
{
#if !TARGET_IPHONE_SIMULATOR
DepthToGrayscaleRenderData depthToGrayscaleRenderData = {
.depthRange = textureResource.depthRange, .depthOffset = textureResource.depthOffset,
};
id<MTLBuffer> depthToGrayscaleDataBuffer =
[textureResource.device newBufferWithLength:sizeof(DepthToGrayscaleRenderData)
options:MTLResourceOptionCPUCacheModeDefault];
memcpy(depthToGrayscaleDataBuffer.contents, &depthToGrayscaleRenderData, sizeof(DepthToGrayscaleRenderData));
id<MTLComputeCommandEncoder> commandEncoder = [commandBuffer computeCommandEncoder];
[commandEncoder setComputePipelineState:pipelineState];
[commandEncoder setTexture:textureResource.sourceDepthTexture atIndex:0];
[commandEncoder setTexture:textureResource.destinationYTexture atIndex:1];
[commandEncoder setTexture:textureResource.destinationUVTexture atIndex:2];
[commandEncoder setBuffer:depthToGrayscaleDataBuffer offset:0 atIndex:0];
return commandEncoder;
#else
return nil;
#endif
}
- (BOOL)requiresDepthData
{
return YES;
}
#pragma mark - SCMetalModuleFunctionProvider
- (NSString *)functionName
{
return @"kernel_depth_to_grayscale";
}
- (NSString *)description
{
return [NSString
sc_stringWithFormat:@"SCDepthToGrayscaleMetalRenderCommand (shader function = %@)", self.functionName];
}
@end

View File

@ -0,0 +1,28 @@
//
// SCDigitalExposureHandler.h
// Snapchat
//
// Created by Yu-Kuan (Anthony) Lai on 6/15/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import <CoreGraphics/CoreGraphics.h>
#import <Foundation/Foundation.h>
@class SCExposureAdjustProcessingModule;
/*
@class SCDigitalExposureHandler
The SCDigitalExposureHandler will be built by the SCProcessingBuilder when the user indicates that he/she
wants to add SCExposureAdjustProcessingModule to the processing pipeline. The builder will take care
of initializing the handler by linking the processing module. Caller of the builder can then link up
the handler to the UI element (in this case, SCExposureSlider) so that user's control is hooked up to
the processing module.
*/
@interface SCDigitalExposureHandler : NSObject
- (instancetype)initWithProcessingModule:(SCExposureAdjustProcessingModule *)processingModule;
- (void)setExposureParameter:(CGFloat)value;
@end

View File

@ -0,0 +1,30 @@
//
// SCDigitalExposureHandler.m
// Snapchat
//
// Created by Yu-Kuan (Anthony) Lai on 6/15/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import "SCDigitalExposureHandler.h"
#import "SCExposureAdjustProcessingModule.h"
@implementation SCDigitalExposureHandler {
__weak SCExposureAdjustProcessingModule *_processingModule;
}
- (instancetype)initWithProcessingModule:(SCExposureAdjustProcessingModule *)processingModule
{
if (self = [super init]) {
_processingModule = processingModule;
}
return self;
}
- (void)setExposureParameter:(CGFloat)value
{
[_processingModule setEVValue:value];
}
@end

View File

@ -0,0 +1,60 @@
//
// SCExposureAdjustMetalModule.metal
// Snapchat
//
// Created by Michel Loenngren on 7/11/17.
//
//
#include <metal_stdlib>
using namespace metal;
kernel void kernel_exposure_adjust(texture2d<float, access::read> sourceYTexture [[texture(0)]],
texture2d<float, access::read> sourceUVTexture [[texture(1)]],
texture2d<float, access::write> destinationYTexture [[texture(2)]],
texture2d<float, access::write> destinationUVTexture [[texture(3)]],
uint2 gid [[thread_position_in_grid]],
uint2 size [[threads_per_grid]]) {
float valueY = sourceYTexture.read(gid).r;
float2 valueUV = sourceUVTexture.read(gid).rg;
float factor = 1.0 / pow(1.0 + valueY, 5) + 1.0;
valueY *= factor;
destinationYTexture.write(valueY, gid);
destinationUVTexture.write(float4(valueUV.r, valueUV.g, 0, 0), gid);
}
kernel void kernel_exposure_adjust_nightvision(texture2d<float, access::read> sourceYTexture [[texture(0)]],
texture2d<float, access::read> sourceUVTexture [[texture(1)]],
texture2d<float, access::write> destinationYTexture [[texture(2)]],
texture2d<float, access::write> destinationUVTexture [[texture(3)]],
uint2 gid [[thread_position_in_grid]],
uint2 size [[threads_per_grid]]) {
float valueY = sourceYTexture.read(gid).r;
float u = 0.5 - 0.368;
float v = 0.5 - 0.291;
destinationYTexture.write(valueY, gid);
destinationUVTexture.write(float4(u, v, 0, 0), gid);
}
kernel void kernel_exposure_adjust_inverted_nightvision(texture2d<float, access::read> sourceYTexture [[texture(0)]],
texture2d<float, access::read> sourceUVTexture [[texture(1)]],
texture2d<float, access::write> destinationYTexture [[texture(2)]],
texture2d<float, access::write> destinationUVTexture [[texture(3)]],
uint2 gid [[thread_position_in_grid]],
uint2 size [[threads_per_grid]]) {
float valueY = sourceYTexture.read(gid).r;
valueY = 1.0 - valueY;
float u = 0.5 - 0.368;
float v = 0.5 - 0.291;
destinationYTexture.write(valueY, gid);
destinationUVTexture.write(float4(u, v, 0, 0), gid);
}

View File

@ -0,0 +1,21 @@
//
// SCExposureAdjustMetalRenderCommand.h
// Snapchat
//
// Created by Michel Loenngren on 7/11/17.
//
//
#import "SCMetalModule.h"
#import <Foundation/Foundation.h>
/*
@class SCExposureAdjustProcessingModule
Prepares the command buffer for the SCExposureAdjustProcessingModule.metal shader.
*/
@interface SCExposureAdjustMetalRenderCommand : SCMetalModule <SCMetalRenderCommand>
@property (nonatomic, readonly) NSString *functionName;
@end

View File

@ -0,0 +1,66 @@
//
// SCExposureAdjustMetalRenderCommand.m
// Snapchat
//
// Created by Michel Loenngren on 7/11/17.
//
//
#import "SCExposureAdjustMetalRenderCommand.h"
#import "SCCameraTweaks.h"
#import "SCMetalUtils.h"
#import <SCFoundation/SCAssertWrapper.h>
@import Metal;
@implementation SCExposureAdjustMetalRenderCommand
#pragma mark - SCMetalRenderCommand
- (id<MTLComputeCommandEncoder>)encodeMetalCommand:(id<MTLCommandBuffer>)commandBuffer
pipelineState:(id<MTLComputePipelineState>)pipelineState
textureResource:(SCMetalTextureResource *)textureResource
{
id<MTLComputeCommandEncoder> commandEncoder = [commandBuffer computeCommandEncoder];
[commandEncoder setComputePipelineState:pipelineState];
#if !TARGET_IPHONE_SIMULATOR
[commandEncoder setTexture:textureResource.sourceYTexture atIndex:0];
[commandEncoder setTexture:textureResource.sourceUVTexture atIndex:1];
[commandEncoder setTexture:textureResource.destinationYTexture atIndex:2];
[commandEncoder setTexture:textureResource.destinationUVTexture atIndex:3];
#endif
return commandEncoder;
}
#pragma mark - SCMetalModuleFunctionProvider
- (NSString *)functionName
{
if (SCCameraExposureAdjustmentMode() == 1) {
return @"kernel_exposure_adjust";
} else if (SCCameraExposureAdjustmentMode() == 2) {
return @"kernel_exposure_adjust_nightvision";
} else if (SCCameraExposureAdjustmentMode() == 3) {
return @"kernel_exposure_adjust_inverted_nightvision";
} else {
SCAssertFail(@"Incorrect value from SCCameraExposureAdjustmentMode() %ld",
(long)SCCameraExposureAdjustmentMode());
return nil;
}
}
- (BOOL)requiresDepthData
{
return NO;
}
- (NSString *)description
{
return
[NSString sc_stringWithFormat:@"SCExposureAdjustMetalRenderCommand (shader function = %@)", self.functionName];
}
@end

View File

@ -0,0 +1,28 @@
//
// SCExposureAdjustProcessingModule.h
// Snapchat
//
// Created by Yu-Kuan (Anthony) Lai on 6/1/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import "SCProcessingModule.h"
#import <Foundation/Foundation.h>
/**
NOTE: If we start chaining multiple CIImage modules we should
not run them back to back but instead in one CIImage pass
as CoreImage will merge the shaders for best performance
*/
/*
@class SCExposureAdjustProcessingModule
This module use the CIExposureAdjust CIFilter to process the frames. It use the value provided by
the SCDigitalExposurehandler as evValue (default is 0).
*/
@interface SCExposureAdjustProcessingModule : NSObject <SCProcessingModule>
- (void)setEVValue:(CGFloat)value;
@end

View File

@ -0,0 +1,67 @@
//
// SCExposureAdjustProcessingModule.m
// Snapchat
//
// Created by Yu-Kuan (Anthony) Lai on 6/1/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import "SCExposureAdjustProcessingModule.h"
#import "SCProcessingModuleUtils.h"
@import CoreImage;
@import CoreMedia;
static const CGFloat kSCExposureAdjustProcessingModuleMaxEVValue = 2.0;
@implementation SCExposureAdjustProcessingModule {
CIContext *_context;
CIFilter *_filter;
CFMutableDictionaryRef _attributes;
CVPixelBufferPoolRef _bufferPool;
}
- (instancetype)init
{
if (self = [super init]) {
_context = [CIContext context];
_filter = [CIFilter filterWithName:@"CIExposureAdjust"];
[_filter setValue:@0.0 forKey:@"inputEV"];
}
return self;
}
- (void)setEVValue:(CGFloat)value
{
CGFloat newEVValue = value * kSCExposureAdjustProcessingModuleMaxEVValue;
[_filter setValue:@(newEVValue) forKey:@"inputEV"];
}
- (void)dealloc
{
CVPixelBufferPoolFlush(_bufferPool, kCVPixelBufferPoolFlushExcessBuffers);
CVPixelBufferPoolRelease(_bufferPool);
}
- (BOOL)requiresDepthData
{
return NO;
}
- (CMSampleBufferRef)render:(RenderData)renderData
{
CMSampleBufferRef input = renderData.sampleBuffer;
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(input);
CIImage *image = [CIImage imageWithCVPixelBuffer:pixelBuffer];
[_filter setValue:image forKey:kCIInputImageKey];
CIImage *result = [_filter outputImage];
return [SCProcessingModuleUtils sampleBufferFromImage:result
oldSampleBuffer:input
bufferPool:_bufferPool
context:_context];
}
@end

View File

@ -0,0 +1,48 @@
//
// SCMetalModule.h
// Snapchat
//
// Created by Michel Loenngren on 7/19/17.
//
//
#import "SCMetalTextureResource.h"
#import "SCMetalUtils.h"
#import "SCProcessingModule.h"
#import <Foundation/Foundation.h>
@protocol SCMetalModuleFunctionProvider <NSObject>
@property (nonatomic, readonly) NSString *functionName;
@end
@protocol SCMetalRenderCommand <SCMetalModuleFunctionProvider>
/**
Sets textures and parameters for the shader function. When implementing this function, the command encoder must be
computed and the pipeline state set. That is, ensure that there are calls to: [commandBuffer computeCommandEncoder]
and [commandEncoder setComputePipelineState:pipelineState].
*/
#if !TARGET_IPHONE_SIMULATOR
- (id<MTLComputeCommandEncoder>)encodeMetalCommand:(id<MTLCommandBuffer>)commandBuffer
pipelineState:(id<MTLComputePipelineState>)pipelineState
textureResource:(SCMetalTextureResource *)textureResource;
#endif
- (BOOL)requiresDepthData;
@end
/**
NOTE: If we start chaining multiple metal modules we should
not run them back to back but instead chain different render
passes.
*/
@interface SCMetalModule : NSObject <SCProcessingModule>
// Designated initializer: SCMetalModule should always have a SCMetalRenderCommand
- (instancetype)initWithMetalRenderCommand:(id<SCMetalRenderCommand>)metalRenderCommand;
@end

View File

@ -0,0 +1,155 @@
//
// SCMetalModule.m
// Snapchat
//
// Created by Michel Loenngren on 7/19/17.
//
//
#import "SCMetalModule.h"
#import "SCCameraTweaks.h"
#import <SCFoundation/SCAssertWrapper.h>
#import <SCFoundation/SCLog.h>
@interface SCMetalModule ()
#if !TARGET_IPHONE_SIMULATOR
@property (nonatomic, readonly) id<MTLLibrary> library;
@property (nonatomic, readonly) id<MTLDevice> device;
@property (nonatomic, readonly) id<MTLFunction> function;
@property (nonatomic, readonly) id<MTLComputePipelineState> computePipelineState;
@property (nonatomic, readonly) id<MTLCommandQueue> commandQueue;
@property (nonatomic, readonly) CVMetalTextureCacheRef textureCache;
#endif
@end
@implementation SCMetalModule {
id<SCMetalRenderCommand> _metalRenderCommand;
}
#if !TARGET_IPHONE_SIMULATOR
@synthesize library = _library;
@synthesize function = _function;
@synthesize computePipelineState = _computePipelineState;
@synthesize commandQueue = _commandQueue;
@synthesize textureCache = _textureCache;
#endif
- (instancetype)initWithMetalRenderCommand:(id<SCMetalRenderCommand>)metalRenderCommand
{
self = [super init];
if (self) {
_metalRenderCommand = metalRenderCommand;
}
return self;
}
#pragma mark - SCProcessingModule
- (CMSampleBufferRef)render:(RenderData)renderData
{
CMSampleBufferRef input = renderData.sampleBuffer;
#if !TARGET_IPHONE_SIMULATOR
id<MTLComputePipelineState> pipelineState = self.computePipelineState;
SC_GUARD_ELSE_RETURN_VALUE(pipelineState, input);
CVMetalTextureCacheRef textureCache = self.textureCache;
SC_GUARD_ELSE_RETURN_VALUE(textureCache, input);
id<MTLCommandQueue> commandQueue = self.commandQueue;
SC_GUARD_ELSE_RETURN_VALUE(commandQueue, input);
SCMetalTextureResource *textureResource =
[[SCMetalTextureResource alloc] initWithRenderData:renderData textureCache:textureCache device:self.device];
id<MTLCommandBuffer> commandBuffer = [commandQueue commandBuffer];
if (!_metalRenderCommand) {
SCAssertFail(@"Metal module must be initialized with an SCMetalRenderCommand");
}
id<MTLComputeCommandEncoder> commandEncoder = [_metalRenderCommand encodeMetalCommand:commandBuffer
pipelineState:pipelineState
textureResource:textureResource];
NSUInteger w = pipelineState.threadExecutionWidth;
NSUInteger h = pipelineState.maxTotalThreadsPerThreadgroup / w;
MTLSize threadsPerThreadgroup = MTLSizeMake(w, h, 1);
MTLSize threadgroupsPerGrid = MTLSizeMake((textureResource.sourceYTexture.width + w - 1) / w,
(textureResource.sourceYTexture.height + h - 1) / h, 1);
[commandEncoder dispatchThreadgroups:threadgroupsPerGrid threadsPerThreadgroup:threadsPerThreadgroup];
[commandEncoder endEncoding];
[commandBuffer commit];
[commandBuffer waitUntilCompleted];
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(renderData.sampleBuffer);
SCMetalCopyTexture(textureResource.destinationYTexture, imageBuffer, 0);
SCMetalCopyTexture(textureResource.destinationUVTexture, imageBuffer, 1);
#endif
return input;
}
- (BOOL)requiresDepthData
{
return [_metalRenderCommand requiresDepthData];
}
#pragma mark - Lazy properties
#if !TARGET_IPHONE_SIMULATOR
- (id<MTLLibrary>)library
{
if (!_library) {
NSString *libPath = [[NSBundle mainBundle] pathForResource:@"sccamera-default" ofType:@"metallib"];
NSError *error = nil;
_library = [self.device newLibraryWithFile:libPath error:&error];
if (error) {
SCLogGeneralError(@"Create metallib error: %@", error.description);
}
}
return _library;
}
- (id<MTLDevice>)device
{
return SCGetManagedCaptureMetalDevice();
}
- (id<MTLFunction>)function
{
return [self.library newFunctionWithName:[_metalRenderCommand functionName]];
}
- (id<MTLComputePipelineState>)computePipelineState
{
if (!_computePipelineState) {
NSError *error = nil;
_computePipelineState = [self.device newComputePipelineStateWithFunction:self.function error:&error];
if (error) {
SCLogGeneralError(@"Error while creating compute pipeline state %@", error.description);
}
}
return _computePipelineState;
}
- (id<MTLCommandQueue>)commandQueue
{
if (!_commandQueue) {
_commandQueue = [self.device newCommandQueue];
}
return _commandQueue;
}
- (CVMetalTextureCacheRef)textureCache
{
if (!_textureCache) {
CVMetalTextureCacheCreate(kCFAllocatorDefault, nil, self.device, nil, &_textureCache);
}
return _textureCache;
}
#endif
@end

View File

@ -0,0 +1,54 @@
//
// SCMetalTextureResource.h
// Snapchat
//
// Created by Brian Ng on 11/7/17.
//
#import "SCProcessingModule.h"
#import "SCCapturerDefines.h"
#import <Foundation/Foundation.h>
#if !TARGET_IPHONE_SIMULATOR
#import <Metal/Metal.h>
#endif
/*
@class SCMetalTextureResource
The SCMetalTextureResource is created by SCMetalModule and is passed to a SCMetalRenderCommand.
This resource provides a collection of textures for rendering, where a SCMetalRenderCommand
selects which textures it needs. Textures are lazily initialiazed to optimize performance.
Additionally, information pertaining to depth is provided if normalizing depth is desired:
depthRange is the range of possible depth values [depthOffset, depthOffset + depthRange],
where depthOffset is the min depth value in the given depth map.
NOTE: This class is NOT thread safe -- ensure any calls are made by a performer by calling
SCAssertPerformer before actually accessing any textures
*/
@interface SCMetalTextureResource : NSObject
#if !TARGET_IPHONE_SIMULATOR
@property (nonatomic, readonly) id<MTLTexture> sourceYTexture;
@property (nonatomic, readonly) id<MTLTexture> sourceUVTexture;
@property (nonatomic, readonly) id<MTLTexture> destinationYTexture;
@property (nonatomic, readonly) id<MTLTexture> destinationUVTexture;
// Textures for SCDepthBlurMetalCommand
@property (nonatomic, readonly) id<MTLTexture> sourceBlurredYTexture;
@property (nonatomic, readonly) id<MTLTexture> sourceDepthTexture;
@property (nonatomic, readonly) id<MTLDevice> device;
#endif
// Available depth-related auxiliary resources (when depth data is provided)
@property (nonatomic, readonly) float depthRange;
@property (nonatomic, readonly) float depthOffset;
@property (nonatomic, readonly) CGFloat depthBlurForegroundThreshold;
@property (nonatomic, readonly) SampleBufferMetadata sampleBufferMetadata;
#if !TARGET_IPHONE_SIMULATOR
- (instancetype)initWithRenderData:(RenderData)renderData
textureCache:(CVMetalTextureCacheRef)textureCache
device:(id<MTLDevice>)device;
#endif
@end

View File

@ -0,0 +1,215 @@
//
// SCMetalTextureResource.m
// Snapchat
//
// Created by Brian Ng on 11/7/17.
//
#import "SCMetalTextureResource.h"
#import "SCCameraSettingUtils.h"
#import "SCCameraTweaks.h"
#import "SCMetalUtils.h"
@import CoreImage;
#if !TARGET_IPHONE_SIMULATOR
static NSInteger const kSCFocusRectSize = 4;
#endif
@interface SCMetalTextureResource ()
#if !TARGET_IPHONE_SIMULATOR
@property (nonatomic, readonly) CVMetalTextureCacheRef textureCache;
#endif
@end
@implementation SCMetalTextureResource {
RenderData _renderData;
CVImageBufferRef _imageBuffer;
CIContext *_context;
}
#if !TARGET_IPHONE_SIMULATOR
@synthesize sourceYTexture = _sourceYTexture;
@synthesize sourceUVTexture = _sourceUVTexture;
@synthesize destinationYTexture = _destinationYTexture;
@synthesize destinationUVTexture = _destinationUVTexture;
@synthesize sourceBlurredYTexture = _sourceBlurredYTexture;
@synthesize sourceDepthTexture = _sourceDepthTexture;
@synthesize depthRange = _depthRange;
@synthesize depthOffset = _depthOffset;
@synthesize depthBlurForegroundThreshold = _depthBlurForegroundThreshold;
@synthesize device = _device;
@synthesize sampleBufferMetadata = _sampleBufferMetadata;
- (instancetype)initWithRenderData:(RenderData)renderData
textureCache:(CVMetalTextureCacheRef)textureCache
device:(id<MTLDevice>)device
{
self = [super init];
if (self) {
_imageBuffer = CMSampleBufferGetImageBuffer(renderData.sampleBuffer);
_renderData = renderData;
_textureCache = textureCache;
_device = device;
_context = [CIContext contextWithOptions:@{ kCIContextWorkingFormat : @(kCIFormatRGBAh) }];
}
return self;
}
#endif
#if !TARGET_IPHONE_SIMULATOR
- (id<MTLTexture>)sourceYTexture
{
if (!_sourceYTexture) {
CVPixelBufferLockBaseAddress(_imageBuffer, kCVPixelBufferLock_ReadOnly);
_sourceYTexture = SCMetalTextureFromPixelBuffer(_imageBuffer, 0, MTLPixelFormatR8Unorm, _textureCache);
CVPixelBufferUnlockBaseAddress(_imageBuffer, kCVPixelBufferLock_ReadOnly);
}
return _sourceYTexture;
}
- (id<MTLTexture>)sourceUVTexture
{
if (!_sourceUVTexture) {
CVPixelBufferLockBaseAddress(_imageBuffer, kCVPixelBufferLock_ReadOnly);
_sourceUVTexture = SCMetalTextureFromPixelBuffer(_imageBuffer, 1, MTLPixelFormatRG8Unorm, _textureCache);
CVPixelBufferUnlockBaseAddress(_imageBuffer, kCVPixelBufferLock_ReadOnly);
}
return _sourceUVTexture;
}
- (id<MTLTexture>)destinationYTexture
{
if (!_destinationYTexture) {
MTLTextureDescriptor *textureDescriptor =
[MTLTextureDescriptor texture2DDescriptorWithPixelFormat:MTLPixelFormatR8Unorm
width:CVPixelBufferGetWidthOfPlane(_imageBuffer, 0)
height:CVPixelBufferGetHeightOfPlane(_imageBuffer, 0)
mipmapped:NO];
textureDescriptor.usage |= MTLTextureUsageShaderWrite;
_destinationYTexture = [_device newTextureWithDescriptor:textureDescriptor];
}
return _destinationYTexture;
}
- (id<MTLTexture>)destinationUVTexture
{
if (!_destinationUVTexture) {
MTLTextureDescriptor *textureDescriptor =
[MTLTextureDescriptor texture2DDescriptorWithPixelFormat:MTLPixelFormatRG8Unorm
width:CVPixelBufferGetWidthOfPlane(_imageBuffer, 1)
height:CVPixelBufferGetHeightOfPlane(_imageBuffer, 1)
mipmapped:NO];
textureDescriptor.usage |= MTLTextureUsageShaderWrite;
_destinationUVTexture = [_device newTextureWithDescriptor:textureDescriptor];
}
return _destinationUVTexture;
}
- (id<MTLTexture>)sourceBlurredYTexture
{
if (!_sourceBlurredYTexture) {
MTLTextureDescriptor *textureDescriptor =
[MTLTextureDescriptor texture2DDescriptorWithPixelFormat:MTLPixelFormatR8Unorm
width:CVPixelBufferGetWidthOfPlane(_imageBuffer, 0)
height:CVPixelBufferGetHeightOfPlane(_imageBuffer, 0)
mipmapped:NO];
textureDescriptor.usage |= MTLTextureUsageShaderWrite;
_sourceBlurredYTexture = [_device newTextureWithDescriptor:textureDescriptor];
}
return _sourceBlurredYTexture;
}
- (id<MTLTexture>)sourceDepthTexture
{
if (!_sourceDepthTexture) {
CVPixelBufferLockBaseAddress(_imageBuffer, kCVPixelBufferLock_ReadOnly);
_sourceDepthTexture =
SCMetalTextureFromPixelBuffer(_renderData.depthDataMap, 0, MTLPixelFormatR16Float, _textureCache);
CVPixelBufferUnlockBaseAddress(_imageBuffer, kCVPixelBufferLock_ReadOnly);
}
return _sourceDepthTexture;
}
- (float)depthRange
{
if (_depthRange == 0) {
// Get min/max values of depth image to normalize
size_t bufferWidth = CVPixelBufferGetWidth(_renderData.depthDataMap);
size_t bufferHeight = CVPixelBufferGetHeight(_renderData.depthDataMap);
size_t bufferBytesPerRow = CVPixelBufferGetBytesPerRow(_renderData.depthDataMap);
CVPixelBufferLockBaseAddress(_renderData.depthDataMap, kCVPixelBufferLock_ReadOnly);
unsigned char *pixelBufferPointer = CVPixelBufferGetBaseAddress(_renderData.depthDataMap);
__fp16 *bufferPtr = (__fp16 *)pixelBufferPointer;
uint32_t ptrInc = (int)bufferBytesPerRow / sizeof(__fp16);
float depthMin = MAXFLOAT;
float depthMax = -MAXFLOAT;
for (int j = 0; j < bufferHeight; j++) {
for (int i = 0; i < bufferWidth; i++) {
float value = bufferPtr[i];
if (!isnan(value)) {
depthMax = MAX(depthMax, value);
depthMin = MIN(depthMin, value);
}
}
bufferPtr += ptrInc;
}
CVPixelBufferUnlockBaseAddress(_renderData.depthDataMap, kCVPixelBufferLock_ReadOnly);
_depthRange = depthMax - depthMin;
_depthOffset = depthMin;
}
return _depthRange;
}
- (float)depthOffset
{
if (_depthRange == 0) {
[self depthRange];
}
return _depthOffset;
}
- (CGFloat)depthBlurForegroundThreshold
{
if (_renderData.depthBlurPointOfInterest) {
CGPoint point = *_renderData.depthBlurPointOfInterest;
CIImage *disparityImage = [CIImage imageWithCVPixelBuffer:_renderData.depthDataMap];
CIVector *vector =
[CIVector vectorWithX:point.x * CVPixelBufferGetWidth(_renderData.depthDataMap) - kSCFocusRectSize / 2
Y:point.y * CVPixelBufferGetHeight(_renderData.depthDataMap) - kSCFocusRectSize / 2
Z:kSCFocusRectSize
W:kSCFocusRectSize];
CIImage *minMaxImage =
[[disparityImage imageByClampingToExtent] imageByApplyingFilter:@"CIAreaMinMaxRed"
withInputParameters:@{kCIInputExtentKey : vector}];
UInt8 pixel[4] = {0, 0, 0, 0};
[_context render:minMaxImage
toBitmap:&pixel
rowBytes:4
bounds:CGRectMake(0, 0, 1, 1)
format:kCIFormatRGBA8
colorSpace:nil];
CGFloat disparity = pixel[1] / 255.0;
CGFloat normalizedDisparity = (disparity - self.depthOffset) / self.depthRange;
return normalizedDisparity;
} else {
return SCCameraTweaksDepthBlurForegroundThreshold();
}
}
- (SampleBufferMetadata)sampleBufferMetadata
{
SampleBufferMetadata sampleMetadata = {
.isoSpeedRating = 0, .exposureTime = 0.033, .brightness = 0,
};
retrieveSampleBufferMetadata(_renderData.sampleBuffer, &sampleMetadata);
return sampleMetadata;
}
#endif
@end

View File

@ -0,0 +1,37 @@
//
// SCNightModeEnhancementMetalModule.metal
// Snapchat
//
// Created by Chao Pang on 12/21/17.
//
//
#include <metal_stdlib>
using namespace metal;
typedef struct SampleBufferMetadata {
int iosSpeedRating;
float exposureTime;
float brightness;
}SampleBufferMetadata;
kernel void kernel_night_mode_enhancement(texture2d<float, access::read> sourceYTexture [[texture(0)]],
texture2d<float, access::read> sourceUVTexture [[texture(1)]],
texture2d<float, access::write> destinationYTexture [[texture(2)]],
texture2d<float, access::write> destinationUVTexture [[texture(3)]],
constant SampleBufferMetadata &metaData [[buffer(0)]],
uint2 gid [[thread_position_in_grid]],
uint2 size [[threads_per_grid]]) {
float valueY = sourceYTexture.read(gid).r;
float2 valueUV = sourceUVTexture.read(gid).rg;
float factor = 1.0 - metaData.brightness * 0.1;
factor = max(min(factor, 1.3), 1.0);
valueY = min(valueY * factor, 1.0);
valueUV.rg = max(min((valueUV.rg - 0.5) * factor + 0.5, 1.0), 0.0);
destinationYTexture.write(valueY, gid);
destinationUVTexture.write(float4(valueUV.r, valueUV.g, 0, 0), gid);
}

View File

@ -0,0 +1,19 @@
//
// SCNightModeEnhancementMetalRenderCommand.h
// Snapchat
//
// Created by Chao Pang on 12/21/17.
//
#import "SCMetalModule.h"
#import <Foundation/Foundation.h>
/*
Prepares the command buffer for the SCNightModeEnhancementMetalModule.metal.
*/
@interface SCNightModeEnhancementMetalRenderCommand : SCMetalModule <SCMetalRenderCommand>
@property (nonatomic, readonly) NSString *functionName;
@end

View File

@ -0,0 +1,64 @@
//
// SCNightModeEnhancementMetalRenderCommand.m
// Snapchat
//
// Created by Chao Pang on 12/21/17.
//
#import "SCNightModeEnhancementMetalRenderCommand.h"
#import "SCCameraTweaks.h"
#import "SCMetalUtils.h"
#import <SCFoundation/NSString+SCFormat.h>
@import Metal;
@implementation SCNightModeEnhancementMetalRenderCommand
#pragma mark - SCMetalRenderCommand
- (id<MTLComputeCommandEncoder>)encodeMetalCommand:(id<MTLCommandBuffer>)commandBuffer
pipelineState:(id<MTLComputePipelineState>)pipelineState
textureResource:(SCMetalTextureResource *)textureResource
{
id<MTLComputeCommandEncoder> commandEncoder = [commandBuffer computeCommandEncoder];
[commandEncoder setComputePipelineState:pipelineState];
#if !TARGET_IPHONE_SIMULATOR
SampleBufferMetadata sampleBufferMetadata = {
.isoSpeedRating = textureResource.sampleBufferMetadata.isoSpeedRating,
.exposureTime = textureResource.sampleBufferMetadata.exposureTime,
.brightness = textureResource.sampleBufferMetadata.brightness,
};
id<MTLBuffer> metadataBuffer = [textureResource.device newBufferWithLength:sizeof(SampleBufferMetadata)
options:MTLResourceOptionCPUCacheModeDefault];
memcpy(metadataBuffer.contents, &sampleBufferMetadata, sizeof(SampleBufferMetadata));
[commandEncoder setTexture:textureResource.sourceYTexture atIndex:0];
[commandEncoder setTexture:textureResource.sourceUVTexture atIndex:1];
[commandEncoder setTexture:textureResource.destinationYTexture atIndex:2];
[commandEncoder setTexture:textureResource.destinationUVTexture atIndex:3];
[commandEncoder setBuffer:metadataBuffer offset:0 atIndex:0];
#endif
return commandEncoder;
}
#pragma mark - SCMetalModuleFunctionProvider
- (NSString *)functionName
{
return @"kernel_night_mode_enhancement";
}
- (BOOL)requiresDepthData
{
return NO;
}
- (NSString *)description
{
return [NSString
sc_stringWithFormat:@"SCNightModeEnhancementMetalRenderCommand (shader function = %@)", self.functionName];
}
@end

View File

@ -0,0 +1,32 @@
//
// SCProcessingModule.h
// Snapchat
//
// Created by Yu-Kuan (Anthony) Lai on 5/30/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import <AVFoundation/AVFoundation.h>
#import <CoreMedia/CoreMedia.h>
#import <Foundation/Foundation.h>
typedef struct RenderData {
CMSampleBufferRef sampleBuffer;
CVPixelBufferRef depthDataMap; // Optional - for depth blur rendering
CGPoint *depthBlurPointOfInterest; // Optional - for depth blur rendering
} RenderData;
/*
@protocol SCProcessingModule
A single module that is responsible for the actual image processing work. Multiple modules can be chained
together by the SCProcessingPipelineBuilder and the frame can be passed through the entire
SCProcessingPipeline.
*/
@protocol SCProcessingModule <NSObject>
- (CMSampleBufferRef)render:(RenderData)renderData;
// Needed to protect against depth data potentially being nil during the render pass
- (BOOL)requiresDepthData;
@end

View File

@ -0,0 +1,22 @@
//
// SCProcessingModuleUtils.h
// Snapchat
//
// Created by Brian Ng on 11/10/17.
//
#import <CoreImage/CoreImage.h>
#import <CoreMedia/CoreMedia.h>
#import <Foundation/Foundation.h>
@interface SCProcessingModuleUtils : NSObject
+ (CVPixelBufferRef)pixelBufferFromImage:(CIImage *)image
bufferPool:(CVPixelBufferPoolRef)bufferPool
context:(CIContext *)context;
+ (CMSampleBufferRef)sampleBufferFromImage:(CIImage *)image
oldSampleBuffer:(CMSampleBufferRef)oldSampleBuffer
bufferPool:(CVPixelBufferPoolRef)bufferPool
context:(CIContext *)context;
@end

View File

@ -0,0 +1,84 @@
//
// SCProcessingModuleUtils.m
// Snapchat
//
// Created by Brian Ng on 11/10/17.
//
#import "SCProcessingModuleUtils.h"
#import <SCFoundation/SCLog.h>
@import CoreImage;
@implementation SCProcessingModuleUtils
+ (CVPixelBufferRef)pixelBufferFromImage:(CIImage *)image
bufferPool:(CVPixelBufferPoolRef)bufferPool
context:(CIContext *)context
{
CVReturn result;
if (bufferPool == NULL) {
NSDictionary *pixelAttributes = @{
(NSString *) kCVPixelBufferIOSurfacePropertiesKey : @{}, (NSString *)
kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange), (NSString *)
kCVPixelBufferWidthKey : @(image.extent.size.width), (NSString *)
kCVPixelBufferHeightKey : @(image.extent.size.height)
};
result = CVPixelBufferPoolCreate(kCFAllocatorDefault, NULL,
(__bridge CFDictionaryRef _Nullable)(pixelAttributes), &bufferPool);
if (result != kCVReturnSuccess) {
SCLogGeneralError(@"[Processing Pipeline] Error creating pixel buffer pool %i", result);
return NULL;
}
}
CVPixelBufferRef resultBuffer = NULL;
result = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, bufferPool, &resultBuffer);
if (result == kCVReturnSuccess) {
[context render:image toCVPixelBuffer:resultBuffer];
} else {
SCLogGeneralError(@"[Processing Pipeline] Error creating pixel buffer from pool %i", result);
}
return resultBuffer;
}
+ (CMSampleBufferRef)sampleBufferFromImage:(CIImage *)image
oldSampleBuffer:(CMSampleBufferRef)oldSampleBuffer
bufferPool:(CVPixelBufferPoolRef)bufferPool
context:(CIContext *)context
{
CVPixelBufferRef pixelBuffer =
[SCProcessingModuleUtils pixelBufferFromImage:image bufferPool:bufferPool context:context];
if (!pixelBuffer) {
SCLogGeneralError(@"[Processing Pipeline] Error creating new pixel buffer from image");
return oldSampleBuffer;
}
CMSampleBufferRef newSampleBuffer = NULL;
CMSampleTimingInfo timimgInfo = kCMTimingInfoInvalid;
CMSampleBufferGetSampleTimingInfo(oldSampleBuffer, 0, &timimgInfo);
CMVideoFormatDescriptionRef videoInfo = NULL;
OSStatus status = CMVideoFormatDescriptionCreateForImageBuffer(NULL, pixelBuffer, &videoInfo);
if (status != noErr) {
SCLogGeneralError(@"[Processing Pipeline] Error creating video format description %i", (int)status);
CVPixelBufferRelease(pixelBuffer);
return oldSampleBuffer;
}
status = CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, true, NULL, NULL, videoInfo,
&timimgInfo, &newSampleBuffer);
if (status != noErr) {
SCLogGeneralError(@"[Processing Pipeline] Error creating CMSampleBuffer %i", (int)status);
CVPixelBufferRelease(pixelBuffer);
return oldSampleBuffer;
}
CVPixelBufferRelease(pixelBuffer);
return newSampleBuffer;
}
@end

View File

@ -0,0 +1,23 @@
//
// SCProcessingPipeline.h
// Snapchat
//
// Created by Yu-Kuan (Anthony) Lai on 5/30/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import "SCProcessingModule.h"
#import <Foundation/Foundation.h>
/*
@class SCProcessingPipeline
The SCProcessingPipeline chains together a series of SCProcessingModules and passes the frame through
each of them in a pre-determined order. This is done through a chain of command, where the resulting
frame from the the first module is passed to the second, then to the third, etc.
*/
@interface SCProcessingPipeline : NSObject <SCProcessingModule>
@property (nonatomic, strong) NSMutableArray<id<SCProcessingModule>> *processingModules;
@end

View File

@ -0,0 +1,46 @@
//
// SCProcessingPipeline.m
// Snapchat
//
// Created by Yu-Kuan (Anthony) Lai on 5/30/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import "SCProcessingPipeline.h"
#import <SCFoundation/NSString+Helpers.h>
@import CoreMedia;
@implementation SCProcessingPipeline
- (CMSampleBufferRef)render:(RenderData)renderData
{
for (id<SCProcessingModule> module in self.processingModules) {
if (![module requiresDepthData] || ([module requiresDepthData] && renderData.depthDataMap)) {
renderData.sampleBuffer = [module render:renderData];
}
}
return renderData.sampleBuffer;
}
- (NSString *)description
{
NSMutableString *desc = [NSMutableString new];
[desc appendString:@"ProcessingPipeline, modules: "];
for (id<SCProcessingModule> module in self.processingModules) {
[desc appendFormat:@"%@, ", [module description]];
}
if (self.processingModules.count > 0) {
return [desc substringToIndex:desc.lengthOfCharacterSequences - 2];
}
return desc;
}
- (BOOL)requiresDepthData
{
return NO;
}
@end

View File

@ -0,0 +1,29 @@
//
// SCProcessingPipelineBuilder.h
// Snapchat
//
// Created by Yu-Kuan (Anthony) Lai on 6/1/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import <Foundation/Foundation.h>
@class SCDigitalExposureHandler;
@class SCProcessingPipeline;
/*
@class SCProcessingPipelineBuilder
The builder object is responsible for creating the SCProcessingPipeline, the underneath
SCProcessingModules, and eventually chaining the SCProcessingModules together in a pre-determined
order. The builder is also responsible for providing consumers with handler objects.
*/
@interface SCProcessingPipelineBuilder : NSObject
@property (nonatomic) BOOL useExposureAdjust;
@property (nonatomic) BOOL portraitModeEnabled;
@property (nonatomic) BOOL enhancedNightMode;
- (SCProcessingPipeline *)build;
@end

View File

@ -0,0 +1,57 @@
//
// SCProcessingPipelineBuilder.m
// Snapchat
//
// Created by Yu-Kuan (Anthony) Lai on 6/1/17.
// Copyright © 2017 Snapchat, Inc. All rights reserved.
//
#import "SCProcessingPipelineBuilder.h"
#import "SCCameraTweaks.h"
#import "SCDepthBlurMetalRenderCommand.h"
#import "SCDepthToGrayscaleMetalRenderCommand.h"
#import "SCDigitalExposureHandler.h"
#import "SCExposureAdjustMetalRenderCommand.h"
#import "SCMetalUtils.h"
#import "SCNightModeEnhancementMetalRenderCommand.h"
#import "SCProcessingPipeline.h"
@implementation SCProcessingPipelineBuilder
- (SCProcessingPipeline *)build
{
if (!_useExposureAdjust && !_portraitModeEnabled && !_enhancedNightMode) { // in the future: && !useA && !useB ...
return nil;
}
SCProcessingPipeline *processingPipeline = [[SCProcessingPipeline alloc] init];
NSMutableArray<id<SCProcessingModule>> *processingModules = [NSMutableArray array];
// order of adding module matters!
if (_useExposureAdjust && SCDeviceSupportsMetal()) {
// this check looks redundant right now, but when we have more modules it will be necessary
SCMetalModule *exposureAdjustMetalModule =
[[SCMetalModule alloc] initWithMetalRenderCommand:[SCExposureAdjustMetalRenderCommand new]];
[processingModules addObject:exposureAdjustMetalModule];
}
if (_portraitModeEnabled) {
id<SCMetalRenderCommand> renderCommand = SCCameraTweaksDepthToGrayscaleOverride()
? [SCDepthToGrayscaleMetalRenderCommand new]
: [SCDepthBlurMetalRenderCommand new];
SCMetalModule *depthBlurMetalModule = [[SCMetalModule alloc] initWithMetalRenderCommand:renderCommand];
[processingModules addObject:depthBlurMetalModule];
}
if (_enhancedNightMode && SCDeviceSupportsMetal()) {
SCMetalModule *nightModeEnhancementModule =
[[SCMetalModule alloc] initWithMetalRenderCommand:[SCNightModeEnhancementMetalRenderCommand new]];
[processingModules addObject:nightModeEnhancementModule];
}
processingPipeline.processingModules = processingModules;
return processingPipeline;
}
@end

View File

@ -0,0 +1,23 @@
//
// SCStillImageDepthBlurFilter.h
// Snapchat
//
// Created by Brian Ng on 10/11/17.
//
#import "SCProcessingModule.h"
#import <Foundation/Foundation.h>
/*
@class SCStillImageDepthBlurFilter
This module uses the CIDepthBlurEffect CIFilter that uses rgb and depth information to produce an image with
the portrait mode effect (background blurred, foreground sharp).
*/
@interface SCStillImageDepthBlurFilter : NSObject
// Applies the CIDepthBlurEffect filter to a still image capture photo. If an error occured, the original
// photoData will be returned
- (NSData *)renderWithPhotoData:(NSData *)photoData renderData:(RenderData)renderData NS_AVAILABLE_IOS(11_0);
@end

View File

@ -0,0 +1,68 @@
//
// SCStillImageDepthBlurFilter.m
// Snapchat
//
// Created by Brian Ng on 10/11/17.
//
#import "SCStillImageDepthBlurFilter.h"
#import "SCCameraTweaks.h"
#import "SCProcessingModuleUtils.h"
@import CoreMedia;
@implementation SCStillImageDepthBlurFilter {
CIContext *_context;
CIFilter *_filter;
CVPixelBufferPoolRef _bufferPool;
}
- (instancetype)init
{
if (self = [super init]) {
_context = [CIContext contextWithOptions:@{ kCIContextWorkingFormat : @(kCIFormatRGBAh) }];
_filter = [CIFilter filterWithName:@"CIDepthBlurEffect"];
}
return self;
}
- (void)dealloc
{
CVPixelBufferPoolFlush(_bufferPool, kCVPixelBufferPoolFlushExcessBuffers);
CVPixelBufferPoolRelease(_bufferPool);
}
- (NSData *)renderWithPhotoData:(NSData *)photoData renderData:(RenderData)renderData NS_AVAILABLE_IOS(11_0)
{
CIImage *mainImage = [CIImage imageWithData:photoData];
CVPixelBufferRef disparityImagePixelBuffer = renderData.depthDataMap;
CIImage *disparityImage = [CIImage imageWithCVPixelBuffer:disparityImagePixelBuffer];
if (!disparityImage) {
return photoData;
}
[_filter setValue:mainImage forKey:kCIInputImageKey];
[_filter setValue:disparityImage forKey:kCIInputDisparityImageKey];
if (renderData.depthBlurPointOfInterest && SCCameraTweaksEnableFilterInputFocusRect()) {
CGPoint pointOfInterest = *renderData.depthBlurPointOfInterest;
[_filter setValue:[CIVector vectorWithX:pointOfInterest.x Y:pointOfInterest.y Z:1 W:1]
forKey:@"inputFocusRect"];
}
CIImage *result = [_filter outputImage];
if (!result) {
return photoData;
}
CGColorSpaceRef deviceRGBColorSpace = CGColorSpaceCreateDeviceRGB();
NSData *processedPhotoData = [_context JPEGRepresentationOfImage:result colorSpace:deviceRGBColorSpace options:@{}];
CGColorSpaceRelease(deviceRGBColorSpace);
if (!processedPhotoData) {
return photoData;
}
renderData.sampleBuffer = [SCProcessingModuleUtils sampleBufferFromImage:result
oldSampleBuffer:renderData.sampleBuffer
bufferPool:_bufferPool
context:_context];
return processedPhotoData;
}
@end