Add files via upload
This commit is contained in:
parent
db9d4071ce
commit
402429fa18
103
ManagedCapturer/SCCaptureBaseState.h
Normal file
103
ManagedCapturer/SCCaptureBaseState.h
Normal file
@ -0,0 +1,103 @@
|
||||
//
|
||||
// SCCaptureBaseState.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Lin Jia on 10/19/17.
|
||||
//
|
||||
//
|
||||
|
||||
#import "SCCaptureCommon.h"
|
||||
#import "SCCaptureStateDelegate.h"
|
||||
#import "SCCaptureStateMachineBookKeeper.h"
|
||||
#import "SCCaptureStateUtil.h"
|
||||
#import "SCCaptureWorker.h"
|
||||
#import "SCManagedCaptureDevice.h"
|
||||
#import "SCManagedCapturerState.h"
|
||||
#import "SCStateTransitionPayload.h"
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
@class SCCaptureResource;
|
||||
|
||||
@class SCCapturerToken;
|
||||
|
||||
@class SCAudioConfiguration;
|
||||
|
||||
@class SCQueuePerformer;
|
||||
/*
|
||||
Every state machine state needs to inherent SCCaptureBaseState to have the APIs. State machine state in general will
|
||||
only implement APIs which are legal for itself. If illegal APIs are invoked, SCCaptureBaseState will handle it.
|
||||
The intended behavior:
|
||||
1) crash using SCAssert in Debug build,
|
||||
2) ignore api call, and log the call, for alpha/master/production.
|
||||
3) in the future, we will introduce dangerous API call concept, and restart camera in such case, to avoid bad state.
|
||||
|
||||
Every state machine state is going to be built to follow functional programming as more as possible. The shared
|
||||
resources between them will be passed into the API via SCCaptureResource.
|
||||
*/
|
||||
|
||||
@interface SCCaptureBaseState : NSObject
|
||||
|
||||
- (instancetype)init NS_UNAVAILABLE;
|
||||
|
||||
- (instancetype)initWithPerformer:(SCQueuePerformer *)performer
|
||||
bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper
|
||||
delegate:(id<SCCaptureStateDelegate>)delegate;
|
||||
|
||||
/* The following API will be invoked at the moment state context promote the state to be current state. State use this
|
||||
* chance to do something, such as start recording for recording state.
|
||||
*/
|
||||
- (void)didBecomeCurrentState:(SCStateTransitionPayload *)payload
|
||||
resource:(SCCaptureResource *)resource
|
||||
context:(NSString *)context;
|
||||
|
||||
- (SCCaptureStateMachineStateId)stateId;
|
||||
|
||||
- (void)initializeCaptureWithDevicePosition:(SCManagedCaptureDevicePosition)devicePosition
|
||||
resource:(SCCaptureResource *)resource
|
||||
completionHandler:(dispatch_block_t)completionHandler
|
||||
context:(NSString *)context;
|
||||
|
||||
- (void)startRunningWithCapturerToken:(SCCapturerToken *)token
|
||||
resource:(SCCaptureResource *)resource
|
||||
completionHandler:(dispatch_block_t)completionHandler
|
||||
context:(NSString *)context;
|
||||
|
||||
- (void)stopRunningWithCapturerToken:(SCCapturerToken *)token
|
||||
resource:(SCCaptureResource *)resource
|
||||
completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler
|
||||
context:(NSString *)context;
|
||||
|
||||
- (void)prepareForRecordingWithResource:(SCCaptureResource *)resource
|
||||
audioConfiguration:(SCAudioConfiguration *)configuration
|
||||
context:(NSString *)context;
|
||||
|
||||
- (void)startRecordingWithResource:(SCCaptureResource *)resource
|
||||
audioConfiguration:(SCAudioConfiguration *)configuration
|
||||
outputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings
|
||||
maxDuration:(NSTimeInterval)maxDuration
|
||||
fileURL:(NSURL *)fileURL
|
||||
captureSessionID:(NSString *)captureSessionID
|
||||
completionHandler:(sc_managed_capturer_start_recording_completion_handler_t)completionHandler
|
||||
context:(NSString *)context;
|
||||
|
||||
- (void)stopRecordingWithResource:(SCCaptureResource *)resource context:(NSString *)context;
|
||||
|
||||
- (void)cancelRecordingWithResource:(SCCaptureResource *)resource context:(NSString *)context;
|
||||
|
||||
- (void)captureStillImageWithResource:(SCCaptureResource *)resource
|
||||
aspectRatio:(CGFloat)aspectRatio
|
||||
captureSessionID:(NSString *)captureSessionID
|
||||
completionHandler:(sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler
|
||||
context:(NSString *)context;
|
||||
|
||||
- (void)startScanWithScanConfiguration:(SCScanConfiguration *)configuration
|
||||
resource:(SCCaptureResource *)resource
|
||||
context:(NSString *)context;
|
||||
|
||||
- (void)stopScanWithCompletionHandler:(dispatch_block_t)completionHandler
|
||||
resource:(SCCaptureResource *)resource
|
||||
context:(NSString *)context;
|
||||
|
||||
@property (nonatomic, strong, readonly) SCCaptureStateMachineBookKeeper *bookKeeper;
|
||||
@end
|
169
ManagedCapturer/SCCaptureBaseState.m
Normal file
169
ManagedCapturer/SCCaptureBaseState.m
Normal file
@ -0,0 +1,169 @@
|
||||
//
|
||||
// SCCaptureBaseState.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Lin Jia on 10/19/17.
|
||||
//
|
||||
//
|
||||
|
||||
#import "SCCaptureBaseState.h"
|
||||
|
||||
#import "SCCaptureStateMachineBookKeeper.h"
|
||||
#import "SCCapturerToken.h"
|
||||
#import "SCManagedCapturerV1_Private.h"
|
||||
|
||||
#import <SCFoundation/SCAppEnvironment.h>
|
||||
#import <SCFoundation/SCAssertWrapper.h>
|
||||
#import <SCFoundation/SCQueuePerformer.h>
|
||||
|
||||
@implementation SCCaptureBaseState {
|
||||
SCCaptureStateMachineBookKeeper *_bookKeeper;
|
||||
SCQueuePerformer *_performer;
|
||||
__weak id<SCCaptureStateDelegate> _delegate;
|
||||
}
|
||||
|
||||
- (instancetype)initWithPerformer:(SCQueuePerformer *)performer
|
||||
bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper
|
||||
delegate:(id<SCCaptureStateDelegate>)delegate
|
||||
{
|
||||
self = [super init];
|
||||
if (self) {
|
||||
SCAssert(performer, @"");
|
||||
SCAssert(bookKeeper, @"");
|
||||
_bookKeeper = bookKeeper;
|
||||
_performer = performer;
|
||||
_delegate = delegate;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (SCCaptureStateMachineStateId)stateId
|
||||
{
|
||||
return SCCaptureBaseStateId;
|
||||
}
|
||||
|
||||
- (void)didBecomeCurrentState:(SCStateTransitionPayload *)payload
|
||||
resource:(SCCaptureResource *)resource
|
||||
context:(NSString *)context
|
||||
{
|
||||
[self _handleBaseStateBehavior:@"didBecomeCurrentState" context:context];
|
||||
}
|
||||
|
||||
- (void)initializeCaptureWithDevicePosition:(SCManagedCaptureDevicePosition)devicePosition
|
||||
resource:(SCCaptureResource *)resource
|
||||
completionHandler:(dispatch_block_t)completionHandler
|
||||
context:(NSString *)context
|
||||
{
|
||||
[self _handleBaseStateBehavior:@"initializeCaptureWithDevicePosition" context:context];
|
||||
}
|
||||
|
||||
- (void)startRunningWithCapturerToken:(SCCapturerToken *)token
|
||||
resource:(SCCaptureResource *)resource
|
||||
completionHandler:(dispatch_block_t)completionHandler
|
||||
context:(NSString *)context
|
||||
{
|
||||
[self _handleBaseStateBehavior:@"startRunningWithCapturerToken" context:context];
|
||||
}
|
||||
|
||||
- (void)stopRunningWithCapturerToken:(SCCapturerToken *)token
|
||||
resource:(SCCaptureResource *)resource
|
||||
completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler
|
||||
context:(NSString *)context
|
||||
{
|
||||
SCAssertPerformer(_performer);
|
||||
BOOL actuallyStopped = [[SCManagedCapturerV1 sharedInstance] stopRunningWithCaptureToken:token
|
||||
completionHandler:completionHandler
|
||||
context:context];
|
||||
// TODO: Fix CCAM-14450
|
||||
// This is a temporary solution for https://jira.sc-corp.net/browse/CCAM-14450
|
||||
// It is caused by switching from scanning state to stop running state when the view is disappearing in the scanning
|
||||
// state, which can be reproduced by triggering scanning and then switch to maps page.
|
||||
// We remove SCAssert to ingore the crashes in master branch and will find a solution for the illegal call for the
|
||||
// state machine later
|
||||
|
||||
if (self.stateId != SCCaptureScanningStateId) {
|
||||
SCAssert(!actuallyStopped, @"actuallyStopped in state: %@ with context: %@", SCCaptureStateName([self stateId]),
|
||||
context);
|
||||
} else {
|
||||
SCLogCaptureStateMachineInfo(@"actuallyStopped:%d in state: %@ with context: %@", actuallyStopped,
|
||||
SCCaptureStateName([self stateId]), context);
|
||||
}
|
||||
|
||||
if (actuallyStopped) {
|
||||
[_delegate currentState:self
|
||||
requestToTransferToNewState:SCCaptureInitializedStateId
|
||||
payload:nil
|
||||
context:context];
|
||||
}
|
||||
}
|
||||
|
||||
- (void)prepareForRecordingWithResource:(SCCaptureResource *)resource
|
||||
audioConfiguration:(SCAudioConfiguration *)configuration
|
||||
context:(NSString *)context
|
||||
{
|
||||
[self _handleBaseStateBehavior:@"prepareForRecordingWithResource" context:context];
|
||||
}
|
||||
|
||||
- (void)startRecordingWithResource:(SCCaptureResource *)resource
|
||||
audioConfiguration:(SCAudioConfiguration *)configuration
|
||||
outputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings
|
||||
maxDuration:(NSTimeInterval)maxDuration
|
||||
fileURL:(NSURL *)fileURL
|
||||
captureSessionID:(NSString *)captureSessionID
|
||||
completionHandler:(sc_managed_capturer_start_recording_completion_handler_t)completionHandler
|
||||
context:(NSString *)context
|
||||
{
|
||||
[self _handleBaseStateBehavior:@"startRecordingWithResource" context:context];
|
||||
}
|
||||
|
||||
- (void)stopRecordingWithResource:(SCCaptureResource *)resource context:(NSString *)context
|
||||
{
|
||||
[self _handleBaseStateBehavior:@"stopRecordingWithResource" context:context];
|
||||
}
|
||||
|
||||
- (void)cancelRecordingWithResource:(SCCaptureResource *)resource context:(NSString *)context
|
||||
{
|
||||
[self _handleBaseStateBehavior:@"cancelRecordingWithResource" context:context];
|
||||
}
|
||||
|
||||
- (void)captureStillImageWithResource:(SCCaptureResource *)resource
|
||||
aspectRatio:(CGFloat)aspectRatio
|
||||
captureSessionID:(NSString *)captureSessionID
|
||||
completionHandler:(sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler
|
||||
context:(NSString *)context
|
||||
{
|
||||
[self _handleBaseStateBehavior:@"captureStillImageWithResource" context:context];
|
||||
}
|
||||
|
||||
- (void)startScanWithScanConfiguration:(SCScanConfiguration *)configuration
|
||||
resource:(SCCaptureResource *)resource
|
||||
context:(NSString *)context
|
||||
{
|
||||
[self _handleBaseStateBehavior:@"startScanWithScanConfiguration" context:context];
|
||||
}
|
||||
|
||||
- (void)stopScanWithCompletionHandler:(dispatch_block_t)completionHandler
|
||||
resource:(SCCaptureResource *)resource
|
||||
context:(NSString *)context
|
||||
{
|
||||
// Temporary solution until IDT-12520 is resolved.
|
||||
[SCCaptureWorker stopScanWithCompletionHandler:completionHandler resource:resource];
|
||||
//[self _handleBaseStateBehavior:@"stopScanWithCompletionHandler"];
|
||||
}
|
||||
|
||||
- (void)_handleBaseStateBehavior:(NSString *)illegalAPIName context:(NSString *)context
|
||||
{
|
||||
[_bookKeeper state:[self stateId]
|
||||
illegalAPIcalled:illegalAPIName
|
||||
callStack:[NSThread callStackSymbols]
|
||||
context:context];
|
||||
if (SCIsDebugBuild()) {
|
||||
SCAssertFail(@"illegal API invoked on capture state machine");
|
||||
}
|
||||
}
|
||||
|
||||
- (SCCaptureStateMachineBookKeeper *)bookKeeper
|
||||
{
|
||||
return _bookKeeper;
|
||||
}
|
||||
@end
|
30
ManagedCapturer/SCCaptureStateDelegate.h
Normal file
30
ManagedCapturer/SCCaptureStateDelegate.h
Normal file
@ -0,0 +1,30 @@
|
||||
//
|
||||
// SCCaptureStateDelegate.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Lin Jia on 10/27/17.
|
||||
//
|
||||
//
|
||||
|
||||
#import "SCCaptureStateUtil.h"
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
@class SCCaptureBaseState;
|
||||
@class SCStateTransitionPayload;
|
||||
/*
|
||||
The state machine state delegate is used by state machine states to hint to the system that "I am done, now transfer
|
||||
to other state".
|
||||
|
||||
Currently, SCCaptureStateMachineContext is the central piece that glues all states together, and it is the delegate for
|
||||
those states.
|
||||
*/
|
||||
|
||||
@protocol SCCaptureStateDelegate <NSObject>
|
||||
|
||||
- (void)currentState:(SCCaptureBaseState *)state
|
||||
requestToTransferToNewState:(SCCaptureStateMachineStateId)newState
|
||||
payload:(SCStateTransitionPayload *)payload
|
||||
context:(NSString *)context;
|
||||
|
||||
@end
|
29
ManagedCapturer/SCCaptureStateMachineBookKeeper.h
Normal file
29
ManagedCapturer/SCCaptureStateMachineBookKeeper.h
Normal file
@ -0,0 +1,29 @@
|
||||
//
|
||||
// SCCaptureStateTransitionBookKeeper.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Lin Jia on 10/27/17.
|
||||
//
|
||||
//
|
||||
|
||||
#import "SCCaptureStateUtil.h"
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
/*
|
||||
Book keeper is used to record every state transition, and every illegal API call.
|
||||
*/
|
||||
|
||||
@interface SCCaptureStateMachineBookKeeper : NSObject
|
||||
|
||||
- (void)stateTransitionFrom:(SCCaptureStateMachineStateId)fromId
|
||||
to:(SCCaptureStateMachineStateId)toId
|
||||
context:(NSString *)context;
|
||||
|
||||
- (void)state:(SCCaptureStateMachineStateId)captureState
|
||||
illegalAPIcalled:(NSString *)illegalAPIName
|
||||
callStack:(NSArray<NSString *> *)callStack
|
||||
context:(NSString *)context;
|
||||
|
||||
- (void)logAPICalled:(NSString *)apiName context:(NSString *)context;
|
||||
@end
|
63
ManagedCapturer/SCCaptureStateMachineBookKeeper.m
Normal file
63
ManagedCapturer/SCCaptureStateMachineBookKeeper.m
Normal file
@ -0,0 +1,63 @@
|
||||
//
|
||||
// SCCaptureStateTransitionBookKeeper.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Lin Jia on 10/27/17.
|
||||
//
|
||||
//
|
||||
|
||||
#import "SCCaptureStateMachineBookKeeper.h"
|
||||
|
||||
#import "SCCaptureStateUtil.h"
|
||||
#import "SCLogger+Camera.h"
|
||||
|
||||
#import <SCFoundation/SCAssertWrapper.h>
|
||||
#import <SCLogger/SCCameraMetrics.h>
|
||||
|
||||
@interface SCCaptureStateMachineBookKeeper () {
|
||||
NSDate *_lastStateStartTime;
|
||||
}
|
||||
@end
|
||||
|
||||
@implementation SCCaptureStateMachineBookKeeper
|
||||
|
||||
- (void)stateTransitionFrom:(SCCaptureStateMachineStateId)fromId
|
||||
to:(SCCaptureStateMachineStateId)toId
|
||||
context:(NSString *)context
|
||||
{
|
||||
NSDate *date = [NSDate date];
|
||||
SCLogCaptureStateMachineInfo(@"State %@ life span: %f seconds, transition to: %@, in context:%@, at: %@ \n",
|
||||
SCCaptureStateName(fromId), [date timeIntervalSinceDate:_lastStateStartTime],
|
||||
SCCaptureStateName(toId), context, date);
|
||||
_lastStateStartTime = date;
|
||||
}
|
||||
|
||||
- (void)state:(SCCaptureStateMachineStateId)captureState
|
||||
illegalAPIcalled:(NSString *)illegalAPIName
|
||||
callStack:(NSArray<NSString *> *)callStack
|
||||
context:(NSString *)context
|
||||
|
||||
{
|
||||
SCAssert(callStack, @"call stack empty");
|
||||
SCAssert(illegalAPIName, @"");
|
||||
SCAssert(context, @"Context is empty");
|
||||
SCLogCaptureStateMachineError(@"State: %@, illegal API invoke: %@, at: %@, callstack: %@ \n",
|
||||
SCCaptureStateName(captureState), illegalAPIName, [NSDate date], callStack);
|
||||
NSArray<NSString *> *reportedArray =
|
||||
[callStack count] > 15 ? [callStack subarrayWithRange:NSMakeRange(0, 15)] : callStack;
|
||||
[[SCLogger sharedInstance] logEvent:kSCCameraStateMachineIllegalAPICall
|
||||
parameters:@{
|
||||
@"state" : SCCaptureStateName(captureState),
|
||||
@"API" : illegalAPIName,
|
||||
@"call_stack" : reportedArray,
|
||||
@"context" : context
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)logAPICalled:(NSString *)apiName context:(NSString *)context
|
||||
{
|
||||
SCAssert(apiName, @"API name is empty");
|
||||
SCAssert(context, @"Context is empty");
|
||||
SCLogCaptureStateMachineInfo(@"api: %@ context: %@", apiName, context);
|
||||
}
|
||||
@end
|
76
ManagedCapturer/SCCaptureStateMachineContext.h
Normal file
76
ManagedCapturer/SCCaptureStateMachineContext.h
Normal file
@ -0,0 +1,76 @@
|
||||
//
|
||||
// SCCaptureStateMachineContext.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Lin Jia on 10/18/17.
|
||||
//
|
||||
//
|
||||
|
||||
#import "SCCaptureCommon.h"
|
||||
#import "SCManagedCaptureDevice.h"
|
||||
|
||||
#import <SCAudio/SCAudioConfiguration.h>
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
/*
|
||||
SCCaptureStateMachineContext is the central piece that glues all states together.
|
||||
|
||||
It will pass API calls to the current state.
|
||||
|
||||
The classic state machine design pattern:
|
||||
https://en.wikipedia.org/wiki/State_pattern
|
||||
|
||||
It is also the delegate for the states it manages, so that those states can tell stateMachineContext to transit to next
|
||||
state.
|
||||
*/
|
||||
|
||||
@class SCCaptureResource;
|
||||
|
||||
@class SCCapturerToken;
|
||||
|
||||
@interface SCCaptureStateMachineContext : NSObject
|
||||
|
||||
- (instancetype)initWithResource:(SCCaptureResource *)resource;
|
||||
|
||||
- (void)initializeCaptureWithDevicePositionAsynchronously:(SCManagedCaptureDevicePosition)devicePosition
|
||||
completionHandler:(dispatch_block_t)completionHandler
|
||||
context:(NSString *)context;
|
||||
|
||||
- (SCCapturerToken *)startRunningWithContext:(NSString *)context completionHandler:(dispatch_block_t)completionHandler;
|
||||
|
||||
- (void)stopRunningWithCapturerToken:(SCCapturerToken *)token
|
||||
completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler
|
||||
context:(NSString *)context;
|
||||
|
||||
- (void)stopRunningWithCapturerToken:(SCCapturerToken *)token
|
||||
after:(NSTimeInterval)delay
|
||||
completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler
|
||||
context:(NSString *)context;
|
||||
|
||||
- (void)prepareForRecordingAsynchronouslyWithAudioConfiguration:(SCAudioConfiguration *)configuration
|
||||
context:(NSString *)context;
|
||||
|
||||
- (void)startRecordingWithOutputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings
|
||||
audioConfiguration:(SCAudioConfiguration *)configuration
|
||||
maxDuration:(NSTimeInterval)maxDuration
|
||||
fileURL:(NSURL *)fileURL
|
||||
captureSessionID:(NSString *)captureSessionID
|
||||
completionHandler:(sc_managed_capturer_start_recording_completion_handler_t)completionHandler
|
||||
context:(NSString *)context;
|
||||
|
||||
- (void)stopRecordingWithContext:(NSString *)context;
|
||||
|
||||
- (void)cancelRecordingWithContext:(NSString *)context;
|
||||
|
||||
- (void)captureStillImageAsynchronouslyWithAspectRatio:(CGFloat)aspectRatio
|
||||
captureSessionID:(NSString *)captureSessionID
|
||||
completionHandler:
|
||||
(sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler
|
||||
context:(NSString *)context;
|
||||
|
||||
#pragma mark - Scanning
|
||||
- (void)startScanAsynchronouslyWithScanConfiguration:(SCScanConfiguration *)configuration context:(NSString *)context;
|
||||
- (void)stopScanAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler context:(NSString *)context;
|
||||
|
||||
@end
|
301
ManagedCapturer/SCCaptureStateMachineContext.m
Normal file
301
ManagedCapturer/SCCaptureStateMachineContext.m
Normal file
@ -0,0 +1,301 @@
|
||||
//
|
||||
// SCCaptureStateMachineContext.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Lin Jia on 10/18/17.
|
||||
//
|
||||
//
|
||||
|
||||
#import "SCCaptureStateMachineContext.h"
|
||||
|
||||
#import "SCCaptureBaseState.h"
|
||||
#import "SCCaptureImageState.h"
|
||||
#import "SCCaptureImageWhileRecordingState.h"
|
||||
#import "SCCaptureInitializedState.h"
|
||||
#import "SCCaptureRecordingState.h"
|
||||
#import "SCCaptureResource.h"
|
||||
#import "SCCaptureRunningState.h"
|
||||
#import "SCCaptureScanningState.h"
|
||||
#import "SCCaptureStateMachineBookKeeper.h"
|
||||
#import "SCCaptureStateUtil.h"
|
||||
#import "SCCaptureUninitializedState.h"
|
||||
#import "SCCaptureWorker.h"
|
||||
#import "SCCapturerToken.h"
|
||||
#import "SCStateTransitionPayload.h"
|
||||
|
||||
#import <SCAudio/SCAudioConfiguration.h>
|
||||
#import <SCFoundation/SCAssertWrapper.h>
|
||||
#import <SCFoundation/SCQueuePerformer.h>
|
||||
#import <SCFoundation/SCTrace.h>
|
||||
#import <SCLogger/SCCameraMetrics.h>
|
||||
#import <SCLogger/SCLogger+Performance.h>
|
||||
|
||||
@interface SCCaptureStateMachineContext () <SCCaptureStateDelegate> {
|
||||
SCQueuePerformer *_queuePerformer;
|
||||
|
||||
// Cache all the states.
|
||||
NSMutableDictionary<SCCaptureStateKey *, SCCaptureBaseState *> *_states;
|
||||
SCCaptureBaseState *_currentState;
|
||||
SCCaptureStateMachineBookKeeper *_bookKeeper;
|
||||
SCCaptureResource *_captureResource;
|
||||
}
|
||||
@end
|
||||
|
||||
@implementation SCCaptureStateMachineContext
|
||||
|
||||
- (instancetype)initWithResource:(SCCaptureResource *)resource
|
||||
{
|
||||
self = [super init];
|
||||
if (self) {
|
||||
SCAssert(resource, @"");
|
||||
SCAssert(resource.queuePerformer, @"");
|
||||
_captureResource = resource;
|
||||
_queuePerformer = resource.queuePerformer;
|
||||
_states = [[NSMutableDictionary<SCCaptureStateKey *, SCCaptureBaseState *> alloc] init];
|
||||
_bookKeeper = [[SCCaptureStateMachineBookKeeper alloc] init];
|
||||
[self _setCurrentState:SCCaptureUninitializedStateId payload:nil context:SCCapturerContext];
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)_setCurrentState:(SCCaptureStateMachineStateId)stateId
|
||||
payload:(SCStateTransitionPayload *)payload
|
||||
context:(NSString *)context
|
||||
{
|
||||
switch (stateId) {
|
||||
case SCCaptureUninitializedStateId:
|
||||
if (![_states objectForKey:@(stateId)]) {
|
||||
SCCaptureUninitializedState *uninitializedState =
|
||||
[[SCCaptureUninitializedState alloc] initWithPerformer:_queuePerformer
|
||||
bookKeeper:_bookKeeper
|
||||
delegate:self];
|
||||
[_states setObject:uninitializedState forKey:@(stateId)];
|
||||
}
|
||||
_currentState = [_states objectForKey:@(stateId)];
|
||||
break;
|
||||
case SCCaptureInitializedStateId:
|
||||
if (![_states objectForKey:@(stateId)]) {
|
||||
SCCaptureInitializedState *initializedState =
|
||||
[[SCCaptureInitializedState alloc] initWithPerformer:_queuePerformer
|
||||
bookKeeper:_bookKeeper
|
||||
delegate:self];
|
||||
[_states setObject:initializedState forKey:@(stateId)];
|
||||
}
|
||||
_currentState = [_states objectForKey:@(stateId)];
|
||||
break;
|
||||
case SCCaptureRunningStateId:
|
||||
if (![_states objectForKey:@(stateId)]) {
|
||||
SCCaptureRunningState *runningState =
|
||||
[[SCCaptureRunningState alloc] initWithPerformer:_queuePerformer bookKeeper:_bookKeeper delegate:self];
|
||||
[_states setObject:runningState forKey:@(stateId)];
|
||||
}
|
||||
_currentState = [_states objectForKey:@(stateId)];
|
||||
break;
|
||||
case SCCaptureImageStateId:
|
||||
if (![_states objectForKey:@(stateId)]) {
|
||||
SCCaptureImageState *captureImageState =
|
||||
[[SCCaptureImageState alloc] initWithPerformer:_queuePerformer bookKeeper:_bookKeeper delegate:self];
|
||||
[_states setObject:captureImageState forKey:@(stateId)];
|
||||
}
|
||||
_currentState = [_states objectForKey:@(stateId)];
|
||||
break;
|
||||
case SCCaptureImageWhileRecordingStateId:
|
||||
if (![_states objectForKey:@(stateId)]) {
|
||||
SCCaptureImageWhileRecordingState *captureImageWhileRecordingState =
|
||||
[[SCCaptureImageWhileRecordingState alloc] initWithPerformer:_queuePerformer
|
||||
bookKeeper:_bookKeeper
|
||||
delegate:self];
|
||||
[_states setObject:captureImageWhileRecordingState forKey:@(stateId)];
|
||||
}
|
||||
_currentState = [_states objectForKey:@(stateId)];
|
||||
break;
|
||||
case SCCaptureScanningStateId:
|
||||
if (![_states objectForKey:@(stateId)]) {
|
||||
SCCaptureScanningState *scanningState =
|
||||
[[SCCaptureScanningState alloc] initWithPerformer:_queuePerformer bookKeeper:_bookKeeper delegate:self];
|
||||
[_states setObject:scanningState forKey:@(stateId)];
|
||||
}
|
||||
_currentState = [_states objectForKey:@(stateId)];
|
||||
break;
|
||||
case SCCaptureRecordingStateId:
|
||||
if (![_states objectForKey:@(stateId)]) {
|
||||
SCCaptureRecordingState *recordingState = [[SCCaptureRecordingState alloc] initWithPerformer:_queuePerformer
|
||||
bookKeeper:_bookKeeper
|
||||
delegate:self];
|
||||
[_states setObject:recordingState forKey:@(stateId)];
|
||||
}
|
||||
_currentState = [_states objectForKey:@(stateId)];
|
||||
break;
|
||||
default:
|
||||
SCAssert(NO, @"illigal state Id");
|
||||
break;
|
||||
}
|
||||
[_currentState didBecomeCurrentState:payload resource:_captureResource context:context];
|
||||
}
|
||||
|
||||
- (void)initializeCaptureWithDevicePositionAsynchronously:(SCManagedCaptureDevicePosition)devicePosition
|
||||
completionHandler:(dispatch_block_t)completionHandler
|
||||
context:(NSString *)context
|
||||
{
|
||||
[SCCaptureWorker setupCapturePreviewLayerController];
|
||||
|
||||
SCTraceResumeToken resumeToken = SCTraceCapture();
|
||||
[_queuePerformer perform:^{
|
||||
SCTraceResume(resumeToken);
|
||||
[_currentState initializeCaptureWithDevicePosition:devicePosition
|
||||
resource:_captureResource
|
||||
completionHandler:completionHandler
|
||||
context:context];
|
||||
}];
|
||||
}
|
||||
|
||||
- (SCCapturerToken *)startRunningWithContext:(NSString *)context completionHandler:(dispatch_block_t)completionHandler
|
||||
{
|
||||
[[SCLogger sharedInstance] updateLogTimedEventStart:kSCCameraMetricsOpen uniqueId:@""];
|
||||
|
||||
SCCapturerToken *token = [[SCCapturerToken alloc] initWithIdentifier:context];
|
||||
SCTraceResumeToken resumeToken = SCTraceCapture();
|
||||
[_queuePerformer perform:^{
|
||||
SCTraceResume(resumeToken);
|
||||
[_currentState startRunningWithCapturerToken:token
|
||||
resource:_captureResource
|
||||
completionHandler:completionHandler
|
||||
context:context];
|
||||
}];
|
||||
|
||||
return token;
|
||||
}
|
||||
|
||||
- (void)stopRunningWithCapturerToken:(SCCapturerToken *)token
|
||||
completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler
|
||||
context:(NSString *)context
|
||||
{
|
||||
SCTraceResumeToken resumeToken = SCTraceCapture();
|
||||
[_queuePerformer perform:^{
|
||||
SCTraceResume(resumeToken);
|
||||
[_currentState stopRunningWithCapturerToken:token
|
||||
resource:_captureResource
|
||||
completionHandler:completionHandler
|
||||
context:context];
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)stopRunningWithCapturerToken:(SCCapturerToken *)token
|
||||
after:(NSTimeInterval)delay
|
||||
completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler
|
||||
context:(NSString *)context
|
||||
{
|
||||
SCTraceResumeToken resumeToken = SCTraceCapture();
|
||||
[_queuePerformer perform:^{
|
||||
SCTraceResume(resumeToken);
|
||||
[_currentState stopRunningWithCapturerToken:token
|
||||
resource:_captureResource
|
||||
completionHandler:completionHandler
|
||||
context:context];
|
||||
}
|
||||
after:delay];
|
||||
}
|
||||
|
||||
- (void)prepareForRecordingAsynchronouslyWithAudioConfiguration:(SCAudioConfiguration *)configuration
|
||||
context:(NSString *)context
|
||||
{
|
||||
SCTraceResumeToken resumeToken = SCTraceCapture();
|
||||
[_queuePerformer perform:^{
|
||||
SCTraceResume(resumeToken);
|
||||
[_currentState prepareForRecordingWithResource:_captureResource
|
||||
audioConfiguration:configuration
|
||||
context:context];
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)startRecordingWithOutputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings
|
||||
audioConfiguration:(SCAudioConfiguration *)configuration
|
||||
maxDuration:(NSTimeInterval)maxDuration
|
||||
fileURL:(NSURL *)fileURL
|
||||
captureSessionID:(NSString *)captureSessionID
|
||||
completionHandler:(sc_managed_capturer_start_recording_completion_handler_t)completionHandler
|
||||
context:(NSString *)context
|
||||
{
|
||||
SCTraceResumeToken resumeToken = SCTraceCapture();
|
||||
[_queuePerformer perform:^{
|
||||
SCTraceResume(resumeToken);
|
||||
[_currentState startRecordingWithResource:_captureResource
|
||||
audioConfiguration:configuration
|
||||
outputSettings:outputSettings
|
||||
maxDuration:maxDuration
|
||||
fileURL:fileURL
|
||||
captureSessionID:captureSessionID
|
||||
completionHandler:completionHandler
|
||||
context:context];
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)stopRecordingWithContext:(NSString *)context
|
||||
{
|
||||
SCTraceResumeToken resumeToken = SCTraceCapture();
|
||||
[_queuePerformer perform:^{
|
||||
SCTraceResume(resumeToken);
|
||||
[_currentState stopRecordingWithResource:_captureResource context:context];
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)cancelRecordingWithContext:(NSString *)context
|
||||
{
|
||||
SCTraceResumeToken resumeToken = SCTraceCapture();
|
||||
[_queuePerformer perform:^{
|
||||
SCTraceResume(resumeToken);
|
||||
[_currentState cancelRecordingWithResource:_captureResource context:context];
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)captureStillImageAsynchronouslyWithAspectRatio:(CGFloat)aspectRatio
|
||||
captureSessionID:(NSString *)captureSessionID
|
||||
completionHandler:
|
||||
(sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler
|
||||
context:(NSString *)context
|
||||
{
|
||||
[_queuePerformer perform:^() {
|
||||
[_currentState captureStillImageWithResource:_captureResource
|
||||
aspectRatio:aspectRatio
|
||||
captureSessionID:captureSessionID
|
||||
completionHandler:completionHandler
|
||||
context:context];
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)startScanAsynchronouslyWithScanConfiguration:(SCScanConfiguration *)configuration context:(NSString *)context
|
||||
{
|
||||
[_queuePerformer perform:^() {
|
||||
[_currentState startScanWithScanConfiguration:configuration resource:_captureResource context:context];
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)stopScanAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler context:(NSString *)context
|
||||
{
|
||||
[_queuePerformer perform:^() {
|
||||
[_currentState stopScanWithCompletionHandler:completionHandler resource:_captureResource context:context];
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)currentState:(SCCaptureBaseState *)state
|
||||
requestToTransferToNewState:(SCCaptureStateMachineStateId)newState
|
||||
payload:(SCStateTransitionPayload *)payload
|
||||
context:(NSString *)context
|
||||
{
|
||||
SCAssertPerformer(_queuePerformer);
|
||||
SCAssert(_currentState == state, @"state: %@ newState: %@ context:%@", SCCaptureStateName([state stateId]),
|
||||
SCCaptureStateName(newState), context);
|
||||
if (payload) {
|
||||
SCAssert(payload.fromState == [state stateId], @"From state id check");
|
||||
SCAssert(payload.toState == newState, @"To state id check");
|
||||
}
|
||||
|
||||
if (_currentState != state) {
|
||||
return;
|
||||
}
|
||||
|
||||
[_bookKeeper stateTransitionFrom:[state stateId] to:newState context:context];
|
||||
[self _setCurrentState:newState payload:payload context:context];
|
||||
}
|
||||
|
||||
@end
|
37
ManagedCapturer/SCCaptureStateUtil.h
Normal file
37
ManagedCapturer/SCCaptureStateUtil.h
Normal file
@ -0,0 +1,37 @@
|
||||
//
|
||||
// SCCaptureStateUtil.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Lin Jia on 10/27/17.
|
||||
//
|
||||
//
|
||||
|
||||
#import "SCLogger+Camera.h"
|
||||
|
||||
#import <SCBase/SCMacros.h>
|
||||
#import <SCFoundation/SCLog.h>
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
#define SCLogCaptureStateMachineInfo(fmt, ...) SCLogCoreCameraInfo(@"[SCCaptureStateMachine] " fmt, ##__VA_ARGS__)
|
||||
#define SCLogCaptureStateMachineError(fmt, ...) SCLogCoreCameraError(@"[SCCaptureStateMachine] " fmt, ##__VA_ARGS__)
|
||||
|
||||
typedef NSNumber SCCaptureStateKey;
|
||||
|
||||
typedef NS_ENUM(NSUInteger, SCCaptureStateMachineStateId) {
|
||||
SCCaptureBaseStateId = 0,
|
||||
SCCaptureUninitializedStateId,
|
||||
SCCaptureInitializedStateId,
|
||||
SCCaptureImageStateId,
|
||||
SCCaptureImageWhileRecordingStateId,
|
||||
SCCaptureRunningStateId,
|
||||
SCCaptureRecordingStateId,
|
||||
SCCaptureScanningStateId,
|
||||
SCCaptureStateMachineStateIdCount
|
||||
};
|
||||
|
||||
SC_EXTERN_C_BEGIN
|
||||
|
||||
NSString *SCCaptureStateName(SCCaptureStateMachineStateId stateId);
|
||||
|
||||
SC_EXTERN_C_END
|
38
ManagedCapturer/SCCaptureStateUtil.m
Normal file
38
ManagedCapturer/SCCaptureStateUtil.m
Normal file
@ -0,0 +1,38 @@
|
||||
//
|
||||
// SCCaptureStateUtil.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Lin Jia on 10/27/17.
|
||||
//
|
||||
//
|
||||
|
||||
#import "SCCaptureStateUtil.h"
|
||||
|
||||
#import <SCFoundation/SCAppEnvironment.h>
|
||||
#import <SCFoundation/SCAssertWrapper.h>
|
||||
|
||||
NSString *SCCaptureStateName(SCCaptureStateMachineStateId stateId)
|
||||
{
|
||||
switch (stateId) {
|
||||
case SCCaptureBaseStateId:
|
||||
return @"SCCaptureBaseStateId";
|
||||
case SCCaptureUninitializedStateId:
|
||||
return @"SCCaptureUninitializedStateId";
|
||||
case SCCaptureInitializedStateId:
|
||||
return @"SCCaptureInitializedStateId";
|
||||
case SCCaptureImageStateId:
|
||||
return @"SCCaptureImageStateId";
|
||||
case SCCaptureImageWhileRecordingStateId:
|
||||
return @"SCCaptureImageWhileRecordingStateId";
|
||||
case SCCaptureRunningStateId:
|
||||
return @"SCCaptureRunningStateId";
|
||||
case SCCaptureRecordingStateId:
|
||||
return @"SCCaptureRecordingStateId";
|
||||
case SCCaptureScanningStateId:
|
||||
return @"SCCaptureScanningStateId";
|
||||
default:
|
||||
SCCAssert(NO, @"illegate state id");
|
||||
break;
|
||||
}
|
||||
return @"SCIllegalStateId";
|
||||
}
|
12
ManagedCapturer/SCManagedCapturerLogging.h
Normal file
12
ManagedCapturer/SCManagedCapturerLogging.h
Normal file
@ -0,0 +1,12 @@
|
||||
//
|
||||
// SCManagedCapturerLogging.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Lin Jia on 11/13/17.
|
||||
//
|
||||
|
||||
#import <SCFoundation/SCLog.h>
|
||||
|
||||
#define SCLogCapturerInfo(fmt, ...) SCLogCoreCameraInfo(@"[SCManagedCapturer] " fmt, ##__VA_ARGS__)
|
||||
#define SCLogCapturerWarning(fmt, ...) SCLogCoreCameraWarning(@"[SCManagedCapturer] " fmt, ##__VA_ARGS__)
|
||||
#define SCLogCapturerError(fmt, ...) SCLogCoreCameraError(@"[SCManagedCapturer] " fmt, ##__VA_ARGS__)
|
667
ManagedCapturer/SCManagedPhotoCapturer.m
Normal file
667
ManagedCapturer/SCManagedPhotoCapturer.m
Normal file
@ -0,0 +1,667 @@
|
||||
//
|
||||
// SCManagedPhotoCapturer.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Chao Pang on 10/5/16.
|
||||
// Copyright © 2016 Snapchat, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#import "SCManagedPhotoCapturer.h"
|
||||
|
||||
#import "AVCaptureConnection+InputDevice.h"
|
||||
#import "SCCameraTweaks.h"
|
||||
#import "SCLogger+Camera.h"
|
||||
#import "SCManagedCapturer.h"
|
||||
#import "SCManagedFrameHealthChecker.h"
|
||||
#import "SCManagedStillImageCapturer_Protected.h"
|
||||
#import "SCStillImageCaptureVideoInputMethod.h"
|
||||
#import "SCStillImageDepthBlurFilter.h"
|
||||
|
||||
#import <SCCrashLogger/SCCrashLogger.h>
|
||||
#import <SCFoundation/SCAssertWrapper.h>
|
||||
#import <SCFoundation/SCLog.h>
|
||||
#import <SCFoundation/SCPerforming.h>
|
||||
#import <SCFoundation/SCQueuePerformer.h>
|
||||
#import <SCFoundation/SCTrace.h>
|
||||
#import <SCLenses/SCLens.h>
|
||||
#import <SCLogger/SCCameraMetrics.h>
|
||||
#import <SCLogger/SClogger+Performance.h>
|
||||
#import <SCWebP/UIImage+WebP.h>
|
||||
|
||||
@import ImageIO;
|
||||
|
||||
static NSString *const kSCManagedPhotoCapturerErrorDomain = @"kSCManagedPhotoCapturerErrorDomain";
|
||||
|
||||
static NSInteger const kSCManagedPhotoCapturerErrorEncounteredException = 10000;
|
||||
static NSInteger const kSCManagedPhotoCapturerInconsistentStatus = 10001;
|
||||
|
||||
typedef NS_ENUM(NSUInteger, SCManagedPhotoCapturerStatus) {
|
||||
SCManagedPhotoCapturerStatusPrepareToCapture,
|
||||
SCManagedPhotoCapturerStatusWillCapture,
|
||||
SCManagedPhotoCapturerStatusDidFinishProcess,
|
||||
};
|
||||
|
||||
@interface SCManagedPhotoCapturer () <AVCapturePhotoCaptureDelegate>
|
||||
@end
|
||||
|
||||
@implementation SCManagedPhotoCapturer {
|
||||
AVCapturePhotoOutput *_photoOutput;
|
||||
|
||||
BOOL _shouldCapture;
|
||||
BOOL _shouldEnableHRSI;
|
||||
BOOL _portraitModeCaptureEnabled;
|
||||
NSUInteger _retries;
|
||||
|
||||
CGPoint _portraitModePointOfInterest;
|
||||
SCStillImageDepthBlurFilter *_depthBlurFilter;
|
||||
sc_managed_still_image_capturer_capture_still_image_completion_handler_t _callbackBlock;
|
||||
|
||||
SCStillImageCaptureVideoInputMethod *_videoFileMethod;
|
||||
|
||||
SCManagedPhotoCapturerStatus _status;
|
||||
}
|
||||
|
||||
- (instancetype)initWithSession:(AVCaptureSession *)session
|
||||
performer:(id<SCPerforming>)performer
|
||||
lensProcessingCore:(id<SCManagedCapturerLensAPI>)lensProcessingCore
|
||||
delegate:(id<SCManagedStillImageCapturerDelegate>)delegate
|
||||
{
|
||||
SCTraceStart();
|
||||
self = [super initWithSession:session performer:performer lensProcessingCore:lensProcessingCore delegate:delegate];
|
||||
if (self) {
|
||||
[self setupWithSession:session];
|
||||
_portraitModePointOfInterest = CGPointMake(0.5, 0.5);
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)setupWithSession:(AVCaptureSession *)session
|
||||
{
|
||||
SCTraceStart();
|
||||
_photoOutput = [[AVCapturePhotoOutput alloc] init];
|
||||
_photoOutput.highResolutionCaptureEnabled = YES;
|
||||
[self setAsOutput:session];
|
||||
}
|
||||
|
||||
- (void)setAsOutput:(AVCaptureSession *)session
|
||||
{
|
||||
SCTraceStart();
|
||||
if ([session canAddOutput:_photoOutput]) {
|
||||
[session addOutput:_photoOutput];
|
||||
}
|
||||
}
|
||||
|
||||
- (void)setHighResolutionStillImageOutputEnabled:(BOOL)highResolutionStillImageOutputEnabled
|
||||
{
|
||||
SCTraceStart();
|
||||
SCAssert([_performer isCurrentPerformer], @"");
|
||||
// Here we cannot directly set _photoOutput.highResolutionCaptureEnabled, since it will cause
|
||||
// black frame blink when enabling lenses. Instead, we enable HRSI in AVCapturePhotoSettings.
|
||||
// https://ph.sc-corp.net/T96228
|
||||
_shouldEnableHRSI = highResolutionStillImageOutputEnabled;
|
||||
}
|
||||
|
||||
- (void)enableStillImageStabilization
|
||||
{
|
||||
// The lens stabilization is enabled when configure AVCapturePhotoSettings
|
||||
// instead of AVCapturePhotoOutput
|
||||
SCTraceStart();
|
||||
}
|
||||
|
||||
- (void)setPortraitModeCaptureEnabled:(BOOL)enabled
|
||||
{
|
||||
_portraitModeCaptureEnabled = enabled;
|
||||
if (@available(ios 11.0, *)) {
|
||||
_photoOutput.depthDataDeliveryEnabled = enabled;
|
||||
}
|
||||
if (enabled && _depthBlurFilter == nil) {
|
||||
_depthBlurFilter = [[SCStillImageDepthBlurFilter alloc] init];
|
||||
}
|
||||
}
|
||||
|
||||
- (void)setPortraitModePointOfInterest:(CGPoint)pointOfInterest
|
||||
{
|
||||
_portraitModePointOfInterest = pointOfInterest;
|
||||
}
|
||||
|
||||
- (void)removeAsOutput:(AVCaptureSession *)session
|
||||
{
|
||||
SCTraceStart();
|
||||
[session removeOutput:_photoOutput];
|
||||
}
|
||||
|
||||
- (void)captureStillImageWithAspectRatio:(CGFloat)aspectRatio
|
||||
atZoomFactor:(float)zoomFactor
|
||||
fieldOfView:(float)fieldOfView
|
||||
state:(SCManagedCapturerState *)state
|
||||
captureSessionID:(NSString *)captureSessionID
|
||||
shouldCaptureFromVideo:(BOOL)shouldCaptureFromVideo
|
||||
completionHandler:
|
||||
(sc_managed_still_image_capturer_capture_still_image_completion_handler_t)completionHandler
|
||||
{
|
||||
SCTraceStart();
|
||||
SCAssert(completionHandler, @"completionHandler shouldn't be nil");
|
||||
SCAssert([_performer isCurrentPerformer], @"");
|
||||
_retries = 6; // AVFoundation Unknown Error usually resolves itself within 0.5 seconds
|
||||
_aspectRatio = aspectRatio;
|
||||
_zoomFactor = zoomFactor;
|
||||
_fieldOfView = fieldOfView;
|
||||
_state = state;
|
||||
_captureSessionID = captureSessionID;
|
||||
_shouldCaptureFromVideo = shouldCaptureFromVideo;
|
||||
SCAssert(!_completionHandler, @"We shouldn't have a _completionHandler at this point otherwise we are destroying "
|
||||
@"current completion handler.");
|
||||
|
||||
// The purpose of these lines is to attach a strong reference to self to the completion handler.
|
||||
// This is because AVCapturePhotoOutput does not hold a strong reference to its delegate, which acts as a completion
|
||||
// handler.
|
||||
// If self is deallocated during the call to _photoOuptut capturePhotoWithSettings:delegate:, which may happen if
|
||||
// any AVFoundationError occurs,
|
||||
// then it's callback method, captureOutput:didFinish..., will not be called, and the completion handler will be
|
||||
// forgotten.
|
||||
// This comes with a risk of a memory leak. If for whatever reason the completion handler field is never used and
|
||||
// then unset,
|
||||
// then we have a permanent retain cycle.
|
||||
_callbackBlock = completionHandler;
|
||||
__typeof(self) strongSelf = self;
|
||||
_completionHandler = ^(UIImage *fullScreenImage, NSDictionary *metadata, NSError *error) {
|
||||
strongSelf->_callbackBlock(fullScreenImage, metadata, error);
|
||||
strongSelf->_callbackBlock = nil;
|
||||
};
|
||||
[[SCLogger sharedInstance] logCameraExposureAdjustmentDelayStart];
|
||||
|
||||
if (!_adjustingExposureManualDetect) {
|
||||
SCLogCoreCameraInfo(@"Capturing still image now");
|
||||
[self _capturePhotoWithExposureAdjustmentStrategy:kSCCameraExposureAdjustmentStrategyNo];
|
||||
_shouldCapture = NO;
|
||||
} else {
|
||||
SCLogCoreCameraInfo(@"Wait adjusting exposure (or after 0.4 seconds) and then capture still image");
|
||||
_shouldCapture = YES;
|
||||
[self _deadlineCapturePhoto];
|
||||
}
|
||||
}
|
||||
|
||||
#pragma mark - SCManagedDeviceCapacityAnalyzerListener
|
||||
|
||||
- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer
|
||||
didChangeAdjustingExposure:(BOOL)adjustingExposure
|
||||
{
|
||||
SCTraceStart();
|
||||
@weakify(self);
|
||||
[_performer performImmediatelyIfCurrentPerformer:^{
|
||||
@strongify(self);
|
||||
SC_GUARD_ELSE_RETURN(self);
|
||||
// Since this is handled on a different thread, therefore, dispatch back to the queue we operated on.
|
||||
self->_adjustingExposureManualDetect = adjustingExposure;
|
||||
[self _didChangeAdjustingExposure:adjustingExposure
|
||||
withStrategy:kSCCameraExposureAdjustmentStrategyManualDetect];
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer
|
||||
didChangeLightingCondition:(SCCapturerLightingConditionType)lightingCondition
|
||||
{
|
||||
SCTraceStart();
|
||||
@weakify(self);
|
||||
[_performer performImmediatelyIfCurrentPerformer:^{
|
||||
@strongify(self);
|
||||
SC_GUARD_ELSE_RETURN(self);
|
||||
self->_lightingConditionType = lightingCondition;
|
||||
}];
|
||||
}
|
||||
|
||||
#pragma mark - SCManagedCapturerListener
|
||||
|
||||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeAdjustingExposure:(SCManagedCapturerState *)state
|
||||
{
|
||||
SCTraceStart();
|
||||
@weakify(self);
|
||||
[_performer performImmediatelyIfCurrentPerformer:^{
|
||||
@strongify(self);
|
||||
SC_GUARD_ELSE_RETURN(self);
|
||||
// Since this is handled on a different thread, therefore, dispatch back to the queue we operated on.
|
||||
[self _didChangeAdjustingExposure:state.adjustingExposure withStrategy:kSCCameraExposureAdjustmentStrategyKVO];
|
||||
}];
|
||||
}
|
||||
|
||||
#pragma mark - AVCapturePhotoCaptureDelegate
|
||||
|
||||
- (void)captureOutput:(AVCapturePhotoOutput *)captureOutput
|
||||
didFinishProcessingPhotoSampleBuffer:(CMSampleBufferRef)photoSampleBuffer
|
||||
previewPhotoSampleBuffer:(CMSampleBufferRef)previewPhotoSampleBuffer
|
||||
resolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings
|
||||
bracketSettings:(AVCaptureBracketedStillImageSettings *)bracketSettings
|
||||
error:(NSError *)error
|
||||
{
|
||||
SCTraceStart();
|
||||
if (photoSampleBuffer) {
|
||||
CFRetain(photoSampleBuffer);
|
||||
}
|
||||
@weakify(self);
|
||||
[_performer performImmediatelyIfCurrentPerformer:^{
|
||||
SCTraceStart();
|
||||
@strongify(self);
|
||||
SC_GUARD_ELSE_RETURN(self);
|
||||
SC_GUARD_ELSE_RUN_AND_RETURN(photoSampleBuffer, [self _photoCaptureDidFailWithError:error]);
|
||||
if (self->_status == SCManagedPhotoCapturerStatusWillCapture) {
|
||||
NSData *imageData = [AVCapturePhotoOutput JPEGPhotoDataRepresentationForJPEGSampleBuffer:photoSampleBuffer
|
||||
previewPhotoSampleBuffer:nil];
|
||||
|
||||
[[SCLogger sharedInstance] updateLogTimedEvent:kSCCameraMetricsRecordingDelay
|
||||
uniqueId:@"IMAGE"
|
||||
splitPoint:@"DID_FINISH_PROCESSING"];
|
||||
[self _capturePhotoFinishedWithImageData:imageData
|
||||
sampleBuffer:photoSampleBuffer
|
||||
cameraInfo:cameraInfoForBuffer(photoSampleBuffer)
|
||||
error:error];
|
||||
|
||||
} else {
|
||||
SCLogCoreCameraInfo(@"DidFinishProcessingPhoto with unexpected status: %@",
|
||||
[self _photoCapturerStatusToString:self->_status]);
|
||||
[self _photoCaptureDidFailWithError:[NSError errorWithDomain:kSCManagedPhotoCapturerErrorDomain
|
||||
code:kSCManagedPhotoCapturerInconsistentStatus
|
||||
userInfo:nil]];
|
||||
}
|
||||
CFRelease(photoSampleBuffer);
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)captureOutput:(AVCapturePhotoOutput *)output
|
||||
didFinishProcessingPhoto:(nonnull AVCapturePhoto *)photo
|
||||
error:(nullable NSError *)error NS_AVAILABLE_IOS(11_0)
|
||||
{
|
||||
SCTraceStart();
|
||||
@weakify(self);
|
||||
[_performer performImmediatelyIfCurrentPerformer:^{
|
||||
SCTraceStart();
|
||||
@strongify(self);
|
||||
SC_GUARD_ELSE_RETURN(self);
|
||||
NSData *imageData = [photo fileDataRepresentation];
|
||||
SC_GUARD_ELSE_RUN_AND_RETURN(imageData, [self _photoCaptureDidFailWithError:error]);
|
||||
if (self->_status == SCManagedPhotoCapturerStatusWillCapture) {
|
||||
if (@available(ios 11.0, *)) {
|
||||
if (_portraitModeCaptureEnabled) {
|
||||
RenderData renderData = {
|
||||
.depthDataMap = photo.depthData.depthDataMap,
|
||||
.depthBlurPointOfInterest = &_portraitModePointOfInterest,
|
||||
};
|
||||
imageData = [_depthBlurFilter renderWithPhotoData:imageData renderData:renderData];
|
||||
}
|
||||
}
|
||||
|
||||
[[SCLogger sharedInstance] updateLogTimedEvent:kSCCameraMetricsRecordingDelay
|
||||
uniqueId:@"IMAGE"
|
||||
splitPoint:@"DID_FINISH_PROCESSING"];
|
||||
|
||||
[self _capturePhotoFinishedWithImageData:imageData metadata:photo.metadata error:error];
|
||||
|
||||
} else {
|
||||
SCLogCoreCameraInfo(@"DidFinishProcessingPhoto with unexpected status: %@",
|
||||
[self _photoCapturerStatusToString:self->_status]);
|
||||
[self _photoCaptureDidFailWithError:[NSError errorWithDomain:kSCManagedPhotoCapturerErrorDomain
|
||||
code:kSCManagedPhotoCapturerInconsistentStatus
|
||||
userInfo:nil]];
|
||||
}
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)captureOutput:(AVCapturePhotoOutput *)captureOutput
|
||||
willBeginCaptureForResolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings
|
||||
{
|
||||
SCTraceStart();
|
||||
@weakify(self);
|
||||
[_performer performImmediatelyIfCurrentPerformer:^{
|
||||
SCTraceStart();
|
||||
@strongify(self);
|
||||
SC_GUARD_ELSE_RETURN(self);
|
||||
if ([self->_delegate respondsToSelector:@selector(managedStillImageCapturerWillCapturePhoto:)]) {
|
||||
if (self->_status == SCManagedPhotoCapturerStatusPrepareToCapture) {
|
||||
self->_status = SCManagedPhotoCapturerStatusWillCapture;
|
||||
|
||||
[[SCLogger sharedInstance] updateLogTimedEvent:kSCCameraMetricsRecordingDelay
|
||||
uniqueId:@"IMAGE"
|
||||
splitPoint:@"WILL_BEGIN_CAPTURE"];
|
||||
[self->_delegate managedStillImageCapturerWillCapturePhoto:self];
|
||||
} else {
|
||||
SCLogCoreCameraInfo(@"WillBeginCapture with unexpected status: %@",
|
||||
[self _photoCapturerStatusToString:self->_status]);
|
||||
}
|
||||
}
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)captureOutput:(AVCapturePhotoOutput *)captureOutput
|
||||
didCapturePhotoForResolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings
|
||||
{
|
||||
SCTraceStart();
|
||||
@weakify(self);
|
||||
[_performer performImmediatelyIfCurrentPerformer:^{
|
||||
SCTraceStart();
|
||||
@strongify(self);
|
||||
SC_GUARD_ELSE_RETURN(self);
|
||||
if ([self->_delegate respondsToSelector:@selector(managedStillImageCapturerDidCapturePhoto:)]) {
|
||||
if (self->_status == SCManagedPhotoCapturerStatusWillCapture ||
|
||||
self->_status == SCManagedPhotoCapturerStatusDidFinishProcess) {
|
||||
[[SCLogger sharedInstance] updateLogTimedEvent:kSCCameraMetricsRecordingDelay
|
||||
uniqueId:@"IMAGE"
|
||||
splitPoint:@"DID_CAPTURE_PHOTO"];
|
||||
[self->_delegate managedStillImageCapturerDidCapturePhoto:self];
|
||||
} else {
|
||||
SCLogCoreCameraInfo(@"DidCapturePhoto with unexpected status: %@",
|
||||
[self _photoCapturerStatusToString:self->_status]);
|
||||
}
|
||||
}
|
||||
}];
|
||||
}
|
||||
|
||||
#pragma mark - Private methods
|
||||
|
||||
- (void)_didChangeAdjustingExposure:(BOOL)adjustingExposure withStrategy:(NSString *)strategy
|
||||
{
|
||||
if (!adjustingExposure && self->_shouldCapture) {
|
||||
SCLogCoreCameraInfo(@"Capturing after adjusting exposure using strategy: %@", strategy);
|
||||
[self _capturePhotoWithExposureAdjustmentStrategy:strategy];
|
||||
self->_shouldCapture = NO;
|
||||
}
|
||||
}
|
||||
|
||||
- (void)_capturePhotoFinishedWithImageData:(NSData *)imageData
|
||||
sampleBuffer:(CMSampleBufferRef)sampleBuffer
|
||||
cameraInfo:(NSDictionary *)cameraInfo
|
||||
error:(NSError *)error
|
||||
{
|
||||
[self _photoCaptureDidSucceedWithImageData:imageData
|
||||
sampleBuffer:sampleBuffer
|
||||
cameraInfo:cameraInfoForBuffer(sampleBuffer)
|
||||
error:error];
|
||||
self->_status = SCManagedPhotoCapturerStatusDidFinishProcess;
|
||||
}
|
||||
|
||||
- (void)_capturePhotoFinishedWithImageData:(NSData *)imageData metadata:(NSDictionary *)metadata error:(NSError *)error
|
||||
{
|
||||
[self _photoCaptureDidSucceedWithImageData:imageData metadata:metadata error:error];
|
||||
self->_status = SCManagedPhotoCapturerStatusDidFinishProcess;
|
||||
}
|
||||
|
||||
- (void)_deadlineCapturePhoto
|
||||
{
|
||||
SCTraceStart();
|
||||
// Use the SCManagedCapturer's private queue.
|
||||
@weakify(self);
|
||||
[_performer perform:^{
|
||||
@strongify(self);
|
||||
SC_GUARD_ELSE_RETURN(self);
|
||||
if (self->_shouldCapture) {
|
||||
[self _capturePhotoWithExposureAdjustmentStrategy:kSCCameraExposureAdjustmentStrategyDeadline];
|
||||
self->_shouldCapture = NO;
|
||||
}
|
||||
}
|
||||
after:SCCameraTweaksExposureDeadline()];
|
||||
}
|
||||
|
||||
- (void)_capturePhotoWithExposureAdjustmentStrategy:(NSString *)strategy
|
||||
{
|
||||
SCTraceStart();
|
||||
[[SCLogger sharedInstance] logCameraExposureAdjustmentDelayEndWithStrategy:strategy];
|
||||
if (_shouldCaptureFromVideo) {
|
||||
[self captureStillImageFromVideoBuffer];
|
||||
return;
|
||||
}
|
||||
SCAssert([_performer isCurrentPerformer], @"");
|
||||
SCAssert(_photoOutput, @"_photoOutput shouldn't be nil");
|
||||
_status = SCManagedPhotoCapturerStatusPrepareToCapture;
|
||||
AVCapturePhotoOutput *photoOutput = _photoOutput;
|
||||
AVCaptureConnection *captureConnection = [self _captureConnectionFromPhotoOutput:photoOutput];
|
||||
SCManagedCapturerState *state = [_state copy];
|
||||
#if !TARGET_IPHONE_SIMULATOR
|
||||
if (!captureConnection) {
|
||||
sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler = _completionHandler;
|
||||
_completionHandler = nil;
|
||||
completionHandler(nil, nil, [NSError errorWithDomain:kSCManagedStillImageCapturerErrorDomain
|
||||
code:kSCManagedStillImageCapturerNoStillImageConnection
|
||||
userInfo:nil]);
|
||||
}
|
||||
#endif
|
||||
AVCapturePhotoSettings *photoSettings =
|
||||
[self _photoSettingsWithPhotoOutput:photoOutput captureConnection:captureConnection captureState:state];
|
||||
// Select appropriate image capture method
|
||||
|
||||
if ([_delegate managedStillImageCapturerShouldProcessFileInput:self]) {
|
||||
if (!_videoFileMethod) {
|
||||
_videoFileMethod = [[SCStillImageCaptureVideoInputMethod alloc] init];
|
||||
}
|
||||
[[SCLogger sharedInstance] logStillImageCaptureApi:@"SCStillImageCaptureVideoFileInput"];
|
||||
[[SCCoreCameraLogger sharedInstance]
|
||||
logCameraCreationDelaySplitPointStillImageCaptureApi:@"SCStillImageCaptureVideoFileInput"];
|
||||
[_delegate managedStillImageCapturerWillCapturePhoto:self];
|
||||
[_videoFileMethod captureStillImageWithCapturerState:state
|
||||
successBlock:^(NSData *imageData, NSDictionary *cameraInfo, NSError *error) {
|
||||
[_performer performImmediatelyIfCurrentPerformer:^{
|
||||
[self _photoCaptureDidSucceedWithImageData:imageData
|
||||
sampleBuffer:nil
|
||||
cameraInfo:cameraInfo
|
||||
error:error];
|
||||
}];
|
||||
}
|
||||
failureBlock:^(NSError *error) {
|
||||
[_performer performImmediatelyIfCurrentPerformer:^{
|
||||
[self _photoCaptureDidFailWithError:error];
|
||||
}];
|
||||
}];
|
||||
} else {
|
||||
[[SCLogger sharedInstance] logStillImageCaptureApi:@"AVCapturePhoto"];
|
||||
[[SCCoreCameraLogger sharedInstance] logCameraCreationDelaySplitPointStillImageCaptureApi:@"AVCapturePhoto"];
|
||||
@try {
|
||||
[photoOutput capturePhotoWithSettings:photoSettings delegate:self];
|
||||
} @catch (NSException *e) {
|
||||
[SCCrashLogger logHandledException:e];
|
||||
[self
|
||||
_photoCaptureDidFailWithError:[NSError errorWithDomain:kSCManagedPhotoCapturerErrorDomain
|
||||
code:kSCManagedPhotoCapturerErrorEncounteredException
|
||||
userInfo:@{
|
||||
@"exception" : e
|
||||
}]];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
- (void)_photoCaptureDidSucceedWithImageData:(NSData *)imageData
|
||||
sampleBuffer:(CMSampleBufferRef)sampleBuffer
|
||||
cameraInfo:(NSDictionary *)cameraInfo
|
||||
error:(NSError *)error
|
||||
{
|
||||
SCTraceStart();
|
||||
SCAssert([_performer isCurrentPerformer], @"");
|
||||
[[SCLogger sharedInstance] logPreCaptureOperationFinishedAt:CACurrentMediaTime()];
|
||||
[[SCCoreCameraLogger sharedInstance]
|
||||
logCameraCreationDelaySplitPointPreCaptureOperationFinishedAt:CACurrentMediaTime()];
|
||||
|
||||
UIImage *fullScreenImage = [self imageFromData:imageData
|
||||
currentZoomFactor:_zoomFactor
|
||||
targetAspectRatio:_aspectRatio
|
||||
fieldOfView:_fieldOfView
|
||||
state:_state
|
||||
sampleBuffer:sampleBuffer];
|
||||
[[SCLogger sharedInstance] updateLogTimedEvent:kSCCameraMetricsRecordingDelay
|
||||
uniqueId:@"IMAGE"
|
||||
splitPoint:@"WILL_START_COMPLETION_HANDLER"];
|
||||
sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler = _completionHandler;
|
||||
_completionHandler = nil;
|
||||
if (completionHandler) {
|
||||
completionHandler(fullScreenImage, cameraInfo, error);
|
||||
}
|
||||
}
|
||||
|
||||
- (void)_photoCaptureDidSucceedWithImageData:(NSData *)imageData
|
||||
metadata:(NSDictionary *)metadata
|
||||
error:(NSError *)error
|
||||
{
|
||||
SCTraceStart();
|
||||
SCAssert([_performer isCurrentPerformer], @"");
|
||||
[[SCLogger sharedInstance] logPreCaptureOperationFinishedAt:CACurrentMediaTime()];
|
||||
[[SCCoreCameraLogger sharedInstance]
|
||||
logCameraCreationDelaySplitPointPreCaptureOperationFinishedAt:CACurrentMediaTime()];
|
||||
|
||||
UIImage *fullScreenImage = [self imageFromData:imageData
|
||||
currentZoomFactor:_zoomFactor
|
||||
targetAspectRatio:_aspectRatio
|
||||
fieldOfView:_fieldOfView
|
||||
state:_state
|
||||
metadata:metadata];
|
||||
[[SCLogger sharedInstance] updateLogTimedEvent:kSCCameraMetricsRecordingDelay
|
||||
uniqueId:@"IMAGE"
|
||||
splitPoint:@"WILL_START_COMPLETION_HANDLER"];
|
||||
sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler = _completionHandler;
|
||||
_completionHandler = nil;
|
||||
if (completionHandler) {
|
||||
completionHandler(fullScreenImage, metadata, error);
|
||||
}
|
||||
}
|
||||
|
||||
- (void)_photoCaptureDidFailWithError:(NSError *)error
|
||||
{
|
||||
SCTraceStart();
|
||||
SCAssert([_performer isCurrentPerformer], @"");
|
||||
sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler = _completionHandler;
|
||||
_completionHandler = nil;
|
||||
if (completionHandler) {
|
||||
completionHandler(nil, nil, error);
|
||||
}
|
||||
}
|
||||
|
||||
- (AVCaptureConnection *)_captureConnectionFromPhotoOutput:(AVCapturePhotoOutput *)photoOutput
|
||||
{
|
||||
SCTraceStart();
|
||||
SCAssert([_performer isCurrentPerformer], @"");
|
||||
NSArray *connections = [photoOutput.connections copy];
|
||||
for (AVCaptureConnection *connection in connections) {
|
||||
for (AVCaptureInputPort *port in [connection inputPorts]) {
|
||||
if ([[port mediaType] isEqual:AVMediaTypeVideo]) {
|
||||
return connection;
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil;
|
||||
}
|
||||
|
||||
- (AVCapturePhotoSettings *)_photoSettingsWithPhotoOutput:(AVCapturePhotoOutput *)photoOutput
|
||||
captureConnection:(AVCaptureConnection *)captureConnection
|
||||
captureState:(SCManagedCapturerState *)state
|
||||
{
|
||||
SCTraceStart();
|
||||
if ([self _shouldUseBracketPhotoSettingsWithCaptureState:state]) {
|
||||
return [self _bracketPhotoSettingsWithPhotoOutput:photoOutput
|
||||
captureConnection:captureConnection
|
||||
captureState:state];
|
||||
} else {
|
||||
return [self _defaultPhotoSettingsWithPhotoOutput:photoOutput captureState:state];
|
||||
}
|
||||
}
|
||||
|
||||
- (BOOL)_shouldUseBracketPhotoSettingsWithCaptureState:(SCManagedCapturerState *)state
|
||||
{
|
||||
// According to Apple docmentation, AVCapturePhotoBracketSettings do not support flashMode,
|
||||
// autoStillImageStabilizationEnabled, livePhotoMovieFileURL or livePhotoMovieMetadata.
|
||||
// Besides, we only use AVCapturePhotoBracketSettings if capture settings needs to be set manually.
|
||||
return !state.flashActive && !_portraitModeCaptureEnabled &&
|
||||
(([SCManagedCaptureDevice isEnhancedNightModeSupported] && state.isNightModeActive) ||
|
||||
[_delegate managedStillImageCapturerIsUnderDeviceMotion:self]);
|
||||
}
|
||||
|
||||
- (AVCapturePhotoSettings *)_defaultPhotoSettingsWithPhotoOutput:(AVCapturePhotoOutput *)photoOutput
|
||||
captureState:(SCManagedCapturerState *)state
|
||||
{
|
||||
SCTraceStart();
|
||||
// Specify the output file format
|
||||
AVCapturePhotoSettings *photoSettings =
|
||||
[AVCapturePhotoSettings photoSettingsWithFormat:@{AVVideoCodecKey : AVVideoCodecJPEG}];
|
||||
|
||||
// Enable HRSI if necessary
|
||||
if (photoSettings.isHighResolutionPhotoEnabled != _shouldEnableHRSI) {
|
||||
photoSettings.highResolutionPhotoEnabled = _shouldEnableHRSI;
|
||||
}
|
||||
|
||||
// Turn on flash if active and supported by device
|
||||
if (state.flashActive && state.flashSupported) {
|
||||
photoSettings.flashMode = AVCaptureFlashModeOn;
|
||||
}
|
||||
|
||||
// Turn on stabilization if available
|
||||
// Seems that setting autoStillImageStabilizationEnabled doesn't work during video capture session,
|
||||
// but we set enable it anyway as it is harmless.
|
||||
if (photoSettings.isAutoStillImageStabilizationEnabled) {
|
||||
photoSettings.autoStillImageStabilizationEnabled = YES;
|
||||
}
|
||||
|
||||
if (_portraitModeCaptureEnabled) {
|
||||
if (@available(ios 11.0, *)) {
|
||||
photoSettings.depthDataDeliveryEnabled = YES;
|
||||
}
|
||||
}
|
||||
|
||||
return photoSettings;
|
||||
}
|
||||
|
||||
- (AVCapturePhotoSettings *)_bracketPhotoSettingsWithPhotoOutput:(AVCapturePhotoOutput *)photoOutput
|
||||
captureConnection:(AVCaptureConnection *)captureConnection
|
||||
captureState:(SCManagedCapturerState *)state
|
||||
{
|
||||
SCTraceStart();
|
||||
OSType rawPixelFormatType = [photoOutput.availableRawPhotoPixelFormatTypes.firstObject unsignedIntValue];
|
||||
NSArray<AVCaptureBracketedStillImageSettings *> *bracketedSettings =
|
||||
[self _bracketSettingsArray:captureConnection withCaptureState:state];
|
||||
SCAssert(bracketedSettings.count <= photoOutput.maxBracketedCapturePhotoCount,
|
||||
@"Bracket photo count cannot exceed maximum count");
|
||||
// Specify the output file format and raw pixel format
|
||||
AVCapturePhotoBracketSettings *photoSettings =
|
||||
[AVCapturePhotoBracketSettings photoBracketSettingsWithRawPixelFormatType:rawPixelFormatType
|
||||
processedFormat:@{
|
||||
AVVideoCodecKey : AVVideoCodecJPEG
|
||||
}
|
||||
bracketedSettings:bracketedSettings];
|
||||
|
||||
// Enable HRSI if necessary
|
||||
if (photoSettings.isHighResolutionPhotoEnabled != _shouldEnableHRSI) {
|
||||
photoSettings.highResolutionPhotoEnabled = _shouldEnableHRSI;
|
||||
}
|
||||
|
||||
// If lens stabilization is supportd, enable the stabilization when device is moving
|
||||
if (photoOutput.isLensStabilizationDuringBracketedCaptureSupported && !photoSettings.isLensStabilizationEnabled &&
|
||||
[_delegate managedStillImageCapturerIsUnderDeviceMotion:self]) {
|
||||
photoSettings.lensStabilizationEnabled = YES;
|
||||
}
|
||||
return photoSettings;
|
||||
}
|
||||
|
||||
- (NSArray *)_bracketSettingsArray:(AVCaptureConnection *)stillImageConnection
|
||||
withCaptureState:(SCManagedCapturerState *)state
|
||||
{
|
||||
NSInteger const stillCount = 1;
|
||||
NSMutableArray *bracketSettingsArray = [NSMutableArray arrayWithCapacity:stillCount];
|
||||
AVCaptureDevice *device = [stillImageConnection inputDevice];
|
||||
CMTime exposureDuration = device.exposureDuration;
|
||||
if ([SCManagedCaptureDevice isEnhancedNightModeSupported] && state.isNightModeActive) {
|
||||
exposureDuration = [self adjustedExposureDurationForNightModeWithCurrentExposureDuration:exposureDuration];
|
||||
}
|
||||
AVCaptureBracketedStillImageSettings *settings = [AVCaptureManualExposureBracketedStillImageSettings
|
||||
manualExposureSettingsWithExposureDuration:exposureDuration
|
||||
ISO:AVCaptureISOCurrent];
|
||||
for (NSInteger i = 0; i < stillCount; i++) {
|
||||
[bracketSettingsArray addObject:settings];
|
||||
}
|
||||
return [bracketSettingsArray copy];
|
||||
}
|
||||
|
||||
- (NSString *)_photoCapturerStatusToString:(SCManagedPhotoCapturerStatus)status
|
||||
{
|
||||
switch (status) {
|
||||
case SCManagedPhotoCapturerStatusPrepareToCapture:
|
||||
return @"PhotoCapturerStatusPrepareToCapture";
|
||||
case SCManagedPhotoCapturerStatusWillCapture:
|
||||
return @"PhotoCapturerStatusWillCapture";
|
||||
case SCManagedPhotoCapturerStatusDidFinishProcess:
|
||||
return @"PhotoCapturerStatusDidFinishProcess";
|
||||
}
|
||||
}
|
||||
|
||||
@end
|
36
ManagedCapturer/SCManagedRecordedVideo.h
Normal file
36
ManagedCapturer/SCManagedRecordedVideo.h
Normal file
@ -0,0 +1,36 @@
|
||||
// ed265cb0c346ae35dce70d3fc12a0bd8deae0802
|
||||
// Generated by the value-object.rb DO NOT EDIT!!
|
||||
|
||||
#import <AvailabilityMacros.h>
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
#import <UIKit/UIKit.h>
|
||||
|
||||
@protocol SCManagedRecordedVideo <NSObject, NSCoding, NSCopying>
|
||||
|
||||
@property (nonatomic, copy, readonly) NSURL *videoURL;
|
||||
|
||||
@property (nonatomic, copy, readonly) NSURL *rawVideoDataFileURL;
|
||||
|
||||
@property (nonatomic, copy, readonly) UIImage *placeholderImage;
|
||||
|
||||
@property (nonatomic, assign, readonly) BOOL isFrontFacingCamera;
|
||||
|
||||
@end
|
||||
|
||||
@interface SCManagedRecordedVideo : NSObject <SCManagedRecordedVideo>
|
||||
|
||||
@property (nonatomic, copy, readonly) NSURL *videoURL;
|
||||
|
||||
@property (nonatomic, copy, readonly) NSURL *rawVideoDataFileURL;
|
||||
|
||||
@property (nonatomic, copy, readonly) UIImage *placeholderImage;
|
||||
|
||||
@property (nonatomic, assign, readonly) BOOL isFrontFacingCamera;
|
||||
|
||||
- (instancetype)initWithVideoURL:(NSURL *)videoURL
|
||||
rawVideoDataFileURL:(NSURL *)rawVideoDataFileURL
|
||||
placeholderImage:(UIImage *)placeholderImage
|
||||
isFrontFacingCamera:(BOOL)isFrontFacingCamera;
|
||||
|
||||
@end
|
180
ManagedCapturer/SCManagedRecordedVideo.m
Normal file
180
ManagedCapturer/SCManagedRecordedVideo.m
Normal file
@ -0,0 +1,180 @@
|
||||
// ed265cb0c346ae35dce70d3fc12a0bd8deae0802
|
||||
// Generated by the value-object.rb DO NOT EDIT!!
|
||||
|
||||
#import "SCManagedRecordedVideo.h"
|
||||
|
||||
#import <FastCoding/FastCoder.h>
|
||||
|
||||
@implementation SCManagedRecordedVideo
|
||||
|
||||
- (instancetype)initWithVideoURL:(NSURL *)videoURL
|
||||
rawVideoDataFileURL:(NSURL *)rawVideoDataFileURL
|
||||
placeholderImage:(UIImage *)placeholderImage
|
||||
isFrontFacingCamera:(BOOL)isFrontFacingCamera
|
||||
{
|
||||
self = [super init];
|
||||
if (self) {
|
||||
_videoURL = [(NSObject *)videoURL copy];
|
||||
_rawVideoDataFileURL = [(NSObject *)rawVideoDataFileURL copy];
|
||||
_placeholderImage = [(NSObject *)placeholderImage copy];
|
||||
_isFrontFacingCamera = isFrontFacingCamera;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
#pragma mark - NSCopying
|
||||
|
||||
- (instancetype)copyWithZone:(NSZone *)zone
|
||||
{
|
||||
// Immutable object, bypass copy
|
||||
return self;
|
||||
}
|
||||
|
||||
#pragma mark - NSCoding
|
||||
|
||||
- (instancetype)initWithCoder:(NSCoder *)aDecoder
|
||||
{
|
||||
self = [super init];
|
||||
if (self) {
|
||||
_videoURL = [aDecoder decodeObjectForKey:@"videoURL"];
|
||||
_rawVideoDataFileURL = [aDecoder decodeObjectForKey:@"rawVideoDataFileURL"];
|
||||
_placeholderImage = [aDecoder decodeObjectForKey:@"placeholderImage"];
|
||||
_isFrontFacingCamera = [aDecoder decodeBoolForKey:@"isFrontFacingCamera"];
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)encodeWithCoder:(NSCoder *)aCoder
|
||||
{
|
||||
[aCoder encodeObject:_videoURL forKey:@"videoURL"];
|
||||
[aCoder encodeObject:_rawVideoDataFileURL forKey:@"rawVideoDataFileURL"];
|
||||
[aCoder encodeObject:_placeholderImage forKey:@"placeholderImage"];
|
||||
[aCoder encodeBool:_isFrontFacingCamera forKey:@"isFrontFacingCamera"];
|
||||
}
|
||||
|
||||
#pragma mark - FasterCoding
|
||||
|
||||
- (BOOL)preferFasterCoding
|
||||
{
|
||||
return YES;
|
||||
}
|
||||
|
||||
- (void)encodeWithFasterCoder:(id<FCFasterCoder>)fasterCoder
|
||||
{
|
||||
[fasterCoder encodeBool:_isFrontFacingCamera];
|
||||
[fasterCoder encodeObject:_placeholderImage];
|
||||
[fasterCoder encodeObject:_rawVideoDataFileURL];
|
||||
[fasterCoder encodeObject:_videoURL];
|
||||
}
|
||||
|
||||
- (void)decodeWithFasterDecoder:(id<FCFasterDecoder>)fasterDecoder
|
||||
{
|
||||
_isFrontFacingCamera = (BOOL)[fasterDecoder decodeBool];
|
||||
_placeholderImage = (UIImage *)[fasterDecoder decodeObject];
|
||||
_rawVideoDataFileURL = (NSURL *)[fasterDecoder decodeObject];
|
||||
_videoURL = (NSURL *)[fasterDecoder decodeObject];
|
||||
}
|
||||
|
||||
- (void)setObject:(id)val forUInt64Key:(uint64_t)key
|
||||
{
|
||||
switch (key) {
|
||||
case 50783861721184594ULL:
|
||||
_placeholderImage = (UIImage *)val;
|
||||
break;
|
||||
case 13152167848358790ULL:
|
||||
_rawVideoDataFileURL = (NSURL *)val;
|
||||
break;
|
||||
case 48945309622713334ULL:
|
||||
_videoURL = (NSURL *)val;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
- (void)setBool:(BOOL)val forUInt64Key:(uint64_t)key
|
||||
{
|
||||
switch (key) {
|
||||
case 11924284868025312ULL:
|
||||
_isFrontFacingCamera = (BOOL)val;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
+ (uint64_t)fasterCodingVersion
|
||||
{
|
||||
return 17435789727352013688ULL;
|
||||
}
|
||||
|
||||
+ (uint64_t *)fasterCodingKeys
|
||||
{
|
||||
static uint64_t keys[] = {
|
||||
4 /* Total */,
|
||||
FC_ENCODE_KEY_TYPE(11924284868025312, FCEncodeTypeBool),
|
||||
FC_ENCODE_KEY_TYPE(50783861721184594, FCEncodeTypeObject),
|
||||
FC_ENCODE_KEY_TYPE(13152167848358790, FCEncodeTypeObject),
|
||||
FC_ENCODE_KEY_TYPE(48945309622713334, FCEncodeTypeObject),
|
||||
};
|
||||
return keys;
|
||||
}
|
||||
|
||||
#pragma mark - isEqual
|
||||
|
||||
- (BOOL)isEqual:(id)object
|
||||
{
|
||||
if (self == object) {
|
||||
return YES;
|
||||
}
|
||||
if (![object isMemberOfClass:[self class]]) {
|
||||
return NO;
|
||||
}
|
||||
SCManagedRecordedVideo *other = (SCManagedRecordedVideo *)object;
|
||||
if (other.videoURL != _videoURL && ![(NSObject *)other.videoURL isEqual:_videoURL]) {
|
||||
return NO;
|
||||
}
|
||||
if (other.rawVideoDataFileURL != _rawVideoDataFileURL &&
|
||||
![(NSObject *)other.rawVideoDataFileURL isEqual:_rawVideoDataFileURL]) {
|
||||
return NO;
|
||||
}
|
||||
if (other.placeholderImage != _placeholderImage &&
|
||||
![(NSObject *)other.placeholderImage isEqual:_placeholderImage]) {
|
||||
return NO;
|
||||
}
|
||||
if (other.isFrontFacingCamera != _isFrontFacingCamera) {
|
||||
return NO;
|
||||
}
|
||||
return YES;
|
||||
}
|
||||
|
||||
- (NSUInteger)hash
|
||||
{
|
||||
NSUInteger subhashes[] = {[_videoURL hash], [_rawVideoDataFileURL hash], [_placeholderImage hash],
|
||||
(NSUInteger)_isFrontFacingCamera};
|
||||
NSUInteger result = subhashes[0];
|
||||
for (int i = 1; i < 4; i++) {
|
||||
unsigned long long base = (((unsigned long long)result) << 32 | subhashes[i]);
|
||||
base = (~base) + (base << 18);
|
||||
base ^= (base >> 31);
|
||||
base *= 21;
|
||||
base ^= (base >> 11);
|
||||
base += (base << 6);
|
||||
base ^= (base >> 22);
|
||||
result = (NSUInteger)base;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
#pragma mark - Print description in console: lldb> po #{variable name}
|
||||
|
||||
- (NSString *)description
|
||||
{
|
||||
NSMutableString *desc = [NSMutableString string];
|
||||
[desc appendString:@"{\n"];
|
||||
[desc appendFormat:@"\tvideoURL:%@\n", [_videoURL description]];
|
||||
[desc appendFormat:@"\trawVideoDataFileURL:%@\n", [_rawVideoDataFileURL description]];
|
||||
[desc appendFormat:@"\tplaceholderImage:%@\n", [_placeholderImage description]];
|
||||
[desc appendFormat:@"\tisFrontFacingCamera:%@\n", [@(_isFrontFacingCamera) description]];
|
||||
[desc appendString:@"}\n"];
|
||||
|
||||
return [desc copy];
|
||||
}
|
||||
|
||||
@end
|
6
ManagedCapturer/SCManagedRecordedVideo.value
Normal file
6
ManagedCapturer/SCManagedRecordedVideo.value
Normal file
@ -0,0 +1,6 @@
|
||||
interface SCManagedRecordedVideo
|
||||
NSURL *videoURL;
|
||||
NSURL *rawVideoDataFileURL;
|
||||
UIImage *placeholderImage;
|
||||
BOOL isFrontFacingCamera;
|
||||
end
|
92
ManagedCapturer/SCManagedStillImageCapturer.h
Normal file
92
ManagedCapturer/SCManagedStillImageCapturer.h
Normal file
@ -0,0 +1,92 @@
|
||||
//
|
||||
// SCManagedStillImageCapturer.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Liu Liu on 4/30/15.
|
||||
// Copyright (c) 2015 Liu Liu. All rights reserved.
|
||||
//
|
||||
|
||||
#import "SCCoreCameraLogger.h"
|
||||
#import "SCManagedCaptureDevice.h"
|
||||
#import "SCManagedCapturerListener.h"
|
||||
#import "SCManagedCapturerState.h"
|
||||
#import "SCManagedDeviceCapacityAnalyzerListener.h"
|
||||
|
||||
#import <SCCameraFoundation/SCManagedVideoDataSourceListener.h>
|
||||
#import <SCLogger/SCCameraMetrics+ExposureAdjustment.h>
|
||||
|
||||
#import <AVFoundation/AVFoundation.h>
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
SC_EXTERN_C_BEGIN
|
||||
|
||||
extern BOOL SCPhotoCapturerIsEnabled(void);
|
||||
|
||||
SC_EXTERN_C_END
|
||||
|
||||
@protocol SCPerforming;
|
||||
@protocol SCManagedStillImageCapturerDelegate;
|
||||
@class SCCaptureResource;
|
||||
|
||||
typedef void (^sc_managed_still_image_capturer_capture_still_image_completion_handler_t)(UIImage *fullScreenImage,
|
||||
NSDictionary *metadata,
|
||||
NSError *error);
|
||||
|
||||
@interface SCManagedStillImageCapturer
|
||||
: NSObject <SCManagedDeviceCapacityAnalyzerListener, SCManagedCapturerListener, SCManagedVideoDataSourceListener> {
|
||||
SCManagedCapturerState *_state;
|
||||
BOOL _shouldCaptureFromVideo;
|
||||
BOOL _captureImageFromVideoImmediately;
|
||||
CGFloat _aspectRatio;
|
||||
float _zoomFactor;
|
||||
float _fieldOfView;
|
||||
BOOL _adjustingExposureManualDetect;
|
||||
sc_managed_still_image_capturer_capture_still_image_completion_handler_t _completionHandler;
|
||||
}
|
||||
|
||||
+ (instancetype)capturerWithCaptureResource:(SCCaptureResource *)captureResource;
|
||||
|
||||
SC_INIT_AND_NEW_UNAVAILABLE;
|
||||
|
||||
@property (nonatomic, weak) id<SCManagedStillImageCapturerDelegate> delegate;
|
||||
|
||||
- (void)setupWithSession:(AVCaptureSession *)session;
|
||||
|
||||
- (void)setAsOutput:(AVCaptureSession *)session;
|
||||
|
||||
- (void)removeAsOutput:(AVCaptureSession *)session;
|
||||
|
||||
- (void)setHighResolutionStillImageOutputEnabled:(BOOL)highResolutionStillImageOutputEnabled;
|
||||
|
||||
- (void)setPortraitModeCaptureEnabled:(BOOL)enabled;
|
||||
|
||||
- (void)setPortraitModePointOfInterest:(CGPoint)pointOfInterest;
|
||||
|
||||
- (void)enableStillImageStabilization;
|
||||
|
||||
- (void)captureStillImageWithAspectRatio:(CGFloat)aspectRatio
|
||||
atZoomFactor:(float)zoomFactor
|
||||
fieldOfView:(float)fieldOfView
|
||||
state:(SCManagedCapturerState *)state
|
||||
captureSessionID:(NSString *)captureSessionID
|
||||
shouldCaptureFromVideo:(BOOL)shouldCaptureFromVideo
|
||||
completionHandler:
|
||||
(sc_managed_still_image_capturer_capture_still_image_completion_handler_t)completionHandler;
|
||||
|
||||
- (void)captureStillImageFromVideoBuffer;
|
||||
|
||||
@end
|
||||
|
||||
@protocol SCManagedStillImageCapturerDelegate <NSObject>
|
||||
|
||||
- (BOOL)managedStillImageCapturerIsUnderDeviceMotion:(SCManagedStillImageCapturer *)managedStillImageCapturer;
|
||||
|
||||
- (BOOL)managedStillImageCapturerShouldProcessFileInput:(SCManagedStillImageCapturer *)managedStillImageCapturer;
|
||||
|
||||
@optional
|
||||
|
||||
- (void)managedStillImageCapturerWillCapturePhoto:(SCManagedStillImageCapturer *)managedStillImageCapturer;
|
||||
|
||||
- (void)managedStillImageCapturerDidCapturePhoto:(SCManagedStillImageCapturer *)managedStillImageCapturer;
|
||||
|
||||
@end
|
399
ManagedCapturer/SCManagedStillImageCapturer.mm
Normal file
399
ManagedCapturer/SCManagedStillImageCapturer.mm
Normal file
@ -0,0 +1,399 @@
|
||||
//
|
||||
// SCManagedStillImageCapturer.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Liu Liu on 4/30/15.
|
||||
// Copyright (c) 2015 Liu Liu. All rights reserved.
|
||||
//
|
||||
|
||||
#import "SCManagedStillImageCapturer.h"
|
||||
|
||||
#import "SCCameraSettingUtils.h"
|
||||
#import "SCCameraTweaks.h"
|
||||
#import "SCCaptureResource.h"
|
||||
#import "SCLogger+Camera.h"
|
||||
#import "SCManagedCaptureSession.h"
|
||||
#import "SCManagedCapturer.h"
|
||||
#import "SCManagedCapturerLensAPI.h"
|
||||
#import "SCManagedFrameHealthChecker.h"
|
||||
#import "SCManagedLegacyStillImageCapturer.h"
|
||||
#import "SCManagedPhotoCapturer.h"
|
||||
#import "SCManagedStillImageCapturerHandler.h"
|
||||
#import "SCManagedStillImageCapturer_Protected.h"
|
||||
|
||||
#import <SCFoundation/NSException+Exceptions.h>
|
||||
#import <SCFoundation/SCLog.h>
|
||||
#import <SCFoundation/SCPerforming.h>
|
||||
#import <SCFoundation/SCQueuePerformer.h>
|
||||
#import <SCFoundation/SCTrace.h>
|
||||
#import <SCFoundation/UIImage+CVPixelBufferRef.h>
|
||||
#import <SCLenses/SCLens.h>
|
||||
#import <SCLogger/SCCameraMetrics.h>
|
||||
#import <SCWebP/UIImage+WebP.h>
|
||||
|
||||
#import <ImageIO/ImageIO.h>
|
||||
|
||||
NSString *const kSCManagedStillImageCapturerErrorDomain = @"kSCManagedStillImageCapturerErrorDomain";
|
||||
|
||||
NSInteger const kSCCameraShutterSoundID = 1108;
|
||||
|
||||
#if !TARGET_IPHONE_SIMULATOR
|
||||
NSInteger const kSCManagedStillImageCapturerNoStillImageConnection = 1101;
|
||||
#endif
|
||||
NSInteger const kSCManagedStillImageCapturerApplicationStateBackground = 1102;
|
||||
|
||||
// We will do the image capture regardless if these is still camera adjustment in progress after 0.4 seconds.
|
||||
NSTimeInterval const kSCManagedStillImageCapturerDeadline = 0.4;
|
||||
NSTimeInterval const kSCCameraRetryInterval = 0.1;
|
||||
|
||||
BOOL SCPhotoCapturerIsEnabled(void)
|
||||
{
|
||||
// Due to the native crash in https://jira.sc-corp.net/browse/CCAM-4904, we guard it >= 10.2
|
||||
return SC_AT_LEAST_IOS_10_2;
|
||||
}
|
||||
|
||||
NSDictionary *cameraInfoForBuffer(CMSampleBufferRef imageDataSampleBuffer)
|
||||
{
|
||||
CFDictionaryRef exifAttachments =
|
||||
(CFDictionaryRef)CMGetAttachment(imageDataSampleBuffer, kCGImagePropertyExifDictionary, NULL);
|
||||
float brightness = [retrieveBrightnessFromEXIFAttachments(exifAttachments) floatValue];
|
||||
NSInteger ISOSpeedRating = [retrieveISOSpeedRatingFromEXIFAttachments(exifAttachments) integerValue];
|
||||
return @{
|
||||
(__bridge NSString *) kCGImagePropertyExifISOSpeedRatings : @(ISOSpeedRating), (__bridge NSString *)
|
||||
kCGImagePropertyExifBrightnessValue : @(brightness)
|
||||
};
|
||||
}
|
||||
|
||||
@implementation SCManagedStillImageCapturer
|
||||
|
||||
+ (instancetype)capturerWithCaptureResource:(SCCaptureResource *)captureResource
|
||||
{
|
||||
if (SCPhotoCapturerIsEnabled()) {
|
||||
return [[SCManagedPhotoCapturer alloc] initWithSession:captureResource.managedSession.avSession
|
||||
performer:captureResource.queuePerformer
|
||||
lensProcessingCore:captureResource.lensProcessingCore
|
||||
delegate:captureResource.stillImageCapturerHandler];
|
||||
} else {
|
||||
return [[SCManagedLegacyStillImageCapturer alloc] initWithSession:captureResource.managedSession.avSession
|
||||
performer:captureResource.queuePerformer
|
||||
lensProcessingCore:captureResource.lensProcessingCore
|
||||
delegate:captureResource.stillImageCapturerHandler];
|
||||
}
|
||||
}
|
||||
|
||||
- (instancetype)initWithSession:(AVCaptureSession *)session
|
||||
performer:(id<SCPerforming>)performer
|
||||
lensProcessingCore:(id<SCManagedCapturerLensAPI>)lensAPI
|
||||
delegate:(id<SCManagedStillImageCapturerDelegate>)delegate
|
||||
{
|
||||
self = [super init];
|
||||
if (self) {
|
||||
_session = session;
|
||||
_performer = performer;
|
||||
_lensAPI = lensAPI;
|
||||
_delegate = delegate;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)setupWithSession:(AVCaptureSession *)session
|
||||
{
|
||||
UNIMPLEMENTED_METHOD;
|
||||
}
|
||||
|
||||
- (void)setAsOutput:(AVCaptureSession *)session
|
||||
{
|
||||
UNIMPLEMENTED_METHOD;
|
||||
}
|
||||
|
||||
- (void)setHighResolutionStillImageOutputEnabled:(BOOL)highResolutionStillImageOutputEnabled
|
||||
{
|
||||
UNIMPLEMENTED_METHOD;
|
||||
}
|
||||
|
||||
- (void)enableStillImageStabilization
|
||||
{
|
||||
UNIMPLEMENTED_METHOD;
|
||||
}
|
||||
|
||||
- (void)removeAsOutput:(AVCaptureSession *)session
|
||||
{
|
||||
UNIMPLEMENTED_METHOD;
|
||||
}
|
||||
|
||||
- (void)setPortraitModeCaptureEnabled:(BOOL)enabled
|
||||
{
|
||||
UNIMPLEMENTED_METHOD;
|
||||
}
|
||||
|
||||
- (void)setPortraitModePointOfInterest:(CGPoint)pointOfInterest
|
||||
{
|
||||
UNIMPLEMENTED_METHOD;
|
||||
}
|
||||
|
||||
- (void)captureStillImageWithAspectRatio:(CGFloat)aspectRatio
|
||||
atZoomFactor:(float)zoomFactor
|
||||
fieldOfView:(float)fieldOfView
|
||||
state:(SCManagedCapturerState *)state
|
||||
captureSessionID:(NSString *)captureSessionID
|
||||
shouldCaptureFromVideo:(BOOL)shouldCaptureFromVideo
|
||||
completionHandler:
|
||||
(sc_managed_still_image_capturer_capture_still_image_completion_handler_t)completionHandler
|
||||
{
|
||||
UNIMPLEMENTED_METHOD;
|
||||
}
|
||||
|
||||
#pragma mark - SCManagedDeviceCapacityAnalyzerListener
|
||||
|
||||
- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer
|
||||
didChangeAdjustingExposure:(BOOL)adjustingExposure
|
||||
{
|
||||
UNIMPLEMENTED_METHOD;
|
||||
}
|
||||
|
||||
- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer
|
||||
didChangeLightingCondition:(SCCapturerLightingConditionType)lightingCondition
|
||||
{
|
||||
UNIMPLEMENTED_METHOD;
|
||||
}
|
||||
|
||||
#pragma mark - SCManagedCapturerListener
|
||||
|
||||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer didChangeAdjustingExposure:(SCManagedCapturerState *)state
|
||||
{
|
||||
UNIMPLEMENTED_METHOD;
|
||||
}
|
||||
|
||||
- (UIImage *)imageFromData:(NSData *)data
|
||||
currentZoomFactor:(float)currentZoomFactor
|
||||
targetAspectRatio:(CGFloat)targetAspectRatio
|
||||
fieldOfView:(float)fieldOfView
|
||||
state:(SCManagedCapturerState *)state
|
||||
sampleBuffer:(CMSampleBufferRef)sampleBuffer
|
||||
{
|
||||
UIImage *capturedImage = [self imageFromImage:[UIImage sc_imageWithData:data]
|
||||
currentZoomFactor:currentZoomFactor
|
||||
targetAspectRatio:targetAspectRatio
|
||||
fieldOfView:fieldOfView
|
||||
state:state];
|
||||
// Check capture frame health before showing preview
|
||||
NSDictionary *metadata =
|
||||
[[SCManagedFrameHealthChecker sharedInstance] metadataForSampleBuffer:sampleBuffer
|
||||
photoCapturerEnabled:SCPhotoCapturerIsEnabled()
|
||||
lensEnabled:state.lensesActive
|
||||
lensID:[_lensAPI activeLensId]];
|
||||
[[SCManagedFrameHealthChecker sharedInstance] checkImageHealthForCaptureFrameImage:capturedImage
|
||||
captureSettings:metadata
|
||||
captureSessionID:_captureSessionID];
|
||||
_captureSessionID = nil;
|
||||
return capturedImage;
|
||||
}
|
||||
|
||||
- (UIImage *)imageFromData:(NSData *)data
|
||||
currentZoomFactor:(float)currentZoomFactor
|
||||
targetAspectRatio:(CGFloat)targetAspectRatio
|
||||
fieldOfView:(float)fieldOfView
|
||||
state:(SCManagedCapturerState *)state
|
||||
metadata:(NSDictionary *)metadata
|
||||
{
|
||||
UIImage *capturedImage = [self imageFromImage:[UIImage sc_imageWithData:data]
|
||||
currentZoomFactor:currentZoomFactor
|
||||
targetAspectRatio:targetAspectRatio
|
||||
fieldOfView:fieldOfView
|
||||
state:state];
|
||||
// Check capture frame health before showing preview
|
||||
NSDictionary *newMetadata =
|
||||
[[SCManagedFrameHealthChecker sharedInstance] metadataForMetadata:metadata
|
||||
photoCapturerEnabled:SCPhotoCapturerIsEnabled()
|
||||
lensEnabled:state.lensesActive
|
||||
lensID:[_lensAPI activeLensId]];
|
||||
[[SCManagedFrameHealthChecker sharedInstance] checkImageHealthForCaptureFrameImage:capturedImage
|
||||
captureSettings:newMetadata
|
||||
captureSessionID:_captureSessionID];
|
||||
_captureSessionID = nil;
|
||||
return capturedImage;
|
||||
}
|
||||
|
||||
- (UIImage *)imageFromImage:(UIImage *)image
|
||||
currentZoomFactor:(float)currentZoomFactor
|
||||
targetAspectRatio:(CGFloat)targetAspectRatio
|
||||
fieldOfView:(float)fieldOfView
|
||||
state:(SCManagedCapturerState *)state
|
||||
{
|
||||
UIImage *fullScreenImage = image;
|
||||
if (state.lensesActive && _lensAPI.isLensApplied) {
|
||||
fullScreenImage = [_lensAPI processImage:fullScreenImage
|
||||
maxPixelSize:[_lensAPI maxPixelSize]
|
||||
devicePosition:state.devicePosition
|
||||
fieldOfView:fieldOfView];
|
||||
}
|
||||
// Resize and crop
|
||||
return [self resizeImage:fullScreenImage currentZoomFactor:currentZoomFactor targetAspectRatio:targetAspectRatio];
|
||||
}
|
||||
|
||||
- (UIImage *)resizeImage:(UIImage *)image
|
||||
currentZoomFactor:(float)currentZoomFactor
|
||||
targetAspectRatio:(CGFloat)targetAspectRatio
|
||||
{
|
||||
SCTraceStart();
|
||||
if (currentZoomFactor == 1) {
|
||||
return SCCropImageToTargetAspectRatio(image, targetAspectRatio);
|
||||
} else {
|
||||
@autoreleasepool {
|
||||
return [self resizeImageUsingCG:image
|
||||
currentZoomFactor:currentZoomFactor
|
||||
targetAspectRatio:targetAspectRatio
|
||||
maxPixelSize:[_lensAPI maxPixelSize]];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
- (UIImage *)resizeImageUsingCG:(UIImage *)inputImage
|
||||
currentZoomFactor:(float)currentZoomFactor
|
||||
targetAspectRatio:(CGFloat)targetAspectRatio
|
||||
maxPixelSize:(CGFloat)maxPixelSize
|
||||
{
|
||||
size_t imageWidth = CGImageGetWidth(inputImage.CGImage);
|
||||
size_t imageHeight = CGImageGetHeight(inputImage.CGImage);
|
||||
SCLogGeneralInfo(@"Captured still image at %dx%d", (int)imageWidth, (int)imageHeight);
|
||||
size_t targetWidth, targetHeight;
|
||||
float zoomFactor = currentZoomFactor;
|
||||
if (imageWidth > imageHeight) {
|
||||
targetWidth = maxPixelSize;
|
||||
targetHeight = (maxPixelSize * imageHeight + imageWidth / 2) / imageWidth;
|
||||
// Update zoom factor here
|
||||
zoomFactor *= (float)maxPixelSize / imageWidth;
|
||||
} else {
|
||||
targetHeight = maxPixelSize;
|
||||
targetWidth = (maxPixelSize * imageWidth + imageHeight / 2) / imageHeight;
|
||||
zoomFactor *= (float)maxPixelSize / imageHeight;
|
||||
}
|
||||
if (targetAspectRatio != kSCManagedCapturerAspectRatioUnspecified) {
|
||||
SCCropImageSizeToAspectRatio(targetWidth, targetHeight, inputImage.imageOrientation, targetAspectRatio,
|
||||
&targetWidth, &targetHeight);
|
||||
}
|
||||
CGContextRef context =
|
||||
CGBitmapContextCreate(NULL, targetWidth, targetHeight, CGImageGetBitsPerComponent(inputImage.CGImage),
|
||||
CGImageGetBitsPerPixel(inputImage.CGImage) * targetWidth / 8,
|
||||
CGImageGetColorSpace(inputImage.CGImage), CGImageGetBitmapInfo(inputImage.CGImage));
|
||||
CGContextSetInterpolationQuality(context, kCGInterpolationHigh);
|
||||
CGContextDrawImage(context, CGRectMake(targetWidth * 0.5 - imageWidth * 0.5 * zoomFactor,
|
||||
targetHeight * 0.5 - imageHeight * 0.5 * zoomFactor, imageWidth * zoomFactor,
|
||||
imageHeight * zoomFactor),
|
||||
inputImage.CGImage);
|
||||
CGImageRef thumbnail = CGBitmapContextCreateImage(context);
|
||||
CGContextRelease(context);
|
||||
UIImage *image =
|
||||
[UIImage imageWithCGImage:thumbnail scale:inputImage.scale orientation:inputImage.imageOrientation];
|
||||
CGImageRelease(thumbnail);
|
||||
return image;
|
||||
}
|
||||
|
||||
- (CMTime)adjustedExposureDurationForNightModeWithCurrentExposureDuration:(CMTime)exposureDuration
|
||||
{
|
||||
CMTime adjustedExposureDuration = exposureDuration;
|
||||
if (_lightingConditionType == SCCapturerLightingConditionTypeDark) {
|
||||
adjustedExposureDuration = CMTimeMultiplyByFloat64(exposureDuration, 1.5);
|
||||
} else if (_lightingConditionType == SCCapturerLightingConditionTypeExtremeDark) {
|
||||
adjustedExposureDuration = CMTimeMultiplyByFloat64(exposureDuration, 2.5);
|
||||
}
|
||||
return adjustedExposureDuration;
|
||||
}
|
||||
|
||||
#pragma mark - SCManagedVideoDataSourceListener
|
||||
|
||||
- (void)managedVideoDataSource:(id<SCManagedVideoDataSource>)managedVideoDataSource
|
||||
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
|
||||
devicePosition:(SCManagedCaptureDevicePosition)devicePosition
|
||||
{
|
||||
SCTraceStart();
|
||||
SC_GUARD_ELSE_RETURN(_captureImageFromVideoImmediately);
|
||||
_captureImageFromVideoImmediately = NO;
|
||||
@weakify(self);
|
||||
CFRetain(sampleBuffer);
|
||||
[_performer performImmediatelyIfCurrentPerformer:^{
|
||||
SCTraceStart();
|
||||
@strongify(self);
|
||||
SC_GUARD_ELSE_RETURN(self);
|
||||
[self _didCapturePhotoFromVideoBuffer];
|
||||
UIImageOrientation orientation = devicePosition == SCManagedCaptureDevicePositionBack
|
||||
? UIImageOrientationRight
|
||||
: UIImageOrientationLeftMirrored;
|
||||
UIImage *videoImage = [UIImage imageWithPixelBufferRef:CMSampleBufferGetImageBuffer(sampleBuffer)
|
||||
backingType:UIImageBackingTypeCGImage
|
||||
orientation:orientation
|
||||
context:[CIContext contextWithOptions:nil]];
|
||||
UIImage *fullScreenImage = [self imageFromImage:videoImage
|
||||
currentZoomFactor:_zoomFactor
|
||||
targetAspectRatio:_aspectRatio
|
||||
fieldOfView:_fieldOfView
|
||||
state:_state];
|
||||
NSMutableDictionary *cameraInfo = [cameraInfoForBuffer(sampleBuffer) mutableCopy];
|
||||
cameraInfo[@"capture_image_from_video_buffer"] = @"enabled";
|
||||
[self _didFinishProcessingFromVideoBufferWithImage:fullScreenImage cameraInfo:cameraInfo];
|
||||
CFRelease(sampleBuffer);
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)_willBeginCapturePhotoFromVideoBuffer
|
||||
{
|
||||
SCTraceStart();
|
||||
@weakify(self);
|
||||
[_performer performImmediatelyIfCurrentPerformer:^{
|
||||
SCTraceStart();
|
||||
@strongify(self);
|
||||
SC_GUARD_ELSE_RETURN(self);
|
||||
if ([self->_delegate respondsToSelector:@selector(managedStillImageCapturerWillCapturePhoto:)]) {
|
||||
[self->_delegate managedStillImageCapturerWillCapturePhoto:self];
|
||||
}
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)_didCapturePhotoFromVideoBuffer
|
||||
{
|
||||
SCTraceStart();
|
||||
@weakify(self);
|
||||
[_performer performImmediatelyIfCurrentPerformer:^{
|
||||
SCTraceStart();
|
||||
@strongify(self);
|
||||
SC_GUARD_ELSE_RETURN(self);
|
||||
if ([self->_delegate respondsToSelector:@selector(managedStillImageCapturerDidCapturePhoto:)]) {
|
||||
[self->_delegate managedStillImageCapturerDidCapturePhoto:self];
|
||||
}
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)_didFinishProcessingFromVideoBufferWithImage:(UIImage *)image cameraInfo:(NSDictionary *)cameraInfo
|
||||
{
|
||||
SCTraceStart();
|
||||
@weakify(self);
|
||||
[_performer performImmediatelyIfCurrentPerformer:^{
|
||||
SCTraceStart();
|
||||
@strongify(self);
|
||||
SC_GUARD_ELSE_RETURN(self);
|
||||
[[SCLogger sharedInstance] logPreCaptureOperationFinishedAt:CACurrentMediaTime()];
|
||||
[[SCCoreCameraLogger sharedInstance]
|
||||
logCameraCreationDelaySplitPointPreCaptureOperationFinishedAt:CACurrentMediaTime()];
|
||||
sc_managed_still_image_capturer_capture_still_image_completion_handler_t completionHandler = _completionHandler;
|
||||
_completionHandler = nil;
|
||||
if (completionHandler) {
|
||||
completionHandler(image, cameraInfo, nil);
|
||||
}
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)captureStillImageFromVideoBuffer
|
||||
{
|
||||
SCTraceStart();
|
||||
@weakify(self);
|
||||
[_performer performImmediatelyIfCurrentPerformer:^{
|
||||
SCTraceStart();
|
||||
@strongify(self);
|
||||
SC_GUARD_ELSE_RETURN(self);
|
||||
AudioServicesPlaySystemSoundWithCompletion(kSCCameraShutterSoundID, nil);
|
||||
[self _willBeginCapturePhotoFromVideoBuffer];
|
||||
self->_captureImageFromVideoImmediately = YES;
|
||||
}];
|
||||
}
|
||||
|
||||
@end
|
21
ManagedCapturer/SCManagedStillImageCapturerHandler.h
Normal file
21
ManagedCapturer/SCManagedStillImageCapturerHandler.h
Normal file
@ -0,0 +1,21 @@
|
||||
//
|
||||
// SCManagedStillImageCapturerHandler.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Jingtian Yang on 11/12/2017.
|
||||
//
|
||||
|
||||
#import "SCManagedStillImageCapturer.h"
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
@class SCCaptureResource;
|
||||
@protocol SCDeviceMotionProvider
|
||||
, SCFileInputDecider;
|
||||
|
||||
@interface SCManagedStillImageCapturerHandler : NSObject <SCManagedStillImageCapturerDelegate>
|
||||
|
||||
SC_INIT_AND_NEW_UNAVAILABLE
|
||||
- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource;
|
||||
|
||||
@end
|
85
ManagedCapturer/SCManagedStillImageCapturerHandler.m
Normal file
85
ManagedCapturer/SCManagedStillImageCapturerHandler.m
Normal file
@ -0,0 +1,85 @@
|
||||
//
|
||||
// SCManagedStillImageCapturerHandler.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Jingtian Yang on 11/12/2017.
|
||||
//
|
||||
|
||||
#import "SCManagedStillImageCapturerHandler.h"
|
||||
|
||||
#import "SCCaptureResource.h"
|
||||
#import "SCManagedCaptureDevice+SCManagedCapturer.h"
|
||||
#import "SCManagedCapturer.h"
|
||||
#import "SCManagedCapturerLogging.h"
|
||||
#import "SCManagedCapturerSampleMetadata.h"
|
||||
#import "SCManagedCapturerState.h"
|
||||
|
||||
#import <SCFoundation/SCAssertWrapper.h>
|
||||
#import <SCFoundation/SCQueuePerformer.h>
|
||||
#import <SCFoundation/SCThreadHelpers.h>
|
||||
#import <SCFoundation/SCTraceODPCompatible.h>
|
||||
|
||||
@interface SCManagedStillImageCapturerHandler () {
|
||||
__weak SCCaptureResource *_captureResource;
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
@implementation SCManagedStillImageCapturerHandler
|
||||
|
||||
- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource
|
||||
{
|
||||
self = [super init];
|
||||
if (self) {
|
||||
SCAssert(captureResource, @"");
|
||||
_captureResource = captureResource;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)managedStillImageCapturerWillCapturePhoto:(SCManagedStillImageCapturer *)managedStillImageCapturer
|
||||
{
|
||||
SCTraceODPCompatibleStart(2);
|
||||
SCLogCapturerInfo(@"Will capture photo. stillImageCapturer:%@", _captureResource.stillImageCapturer);
|
||||
[_captureResource.queuePerformer performImmediatelyIfCurrentPerformer:^{
|
||||
SCTraceStart();
|
||||
if (_captureResource.stillImageCapturer) {
|
||||
SCManagedCapturerState *state = [_captureResource.state copy];
|
||||
SCManagedCapturerSampleMetadata *sampleMetadata = [[SCManagedCapturerSampleMetadata alloc]
|
||||
initWithPresentationTimestamp:kCMTimeZero
|
||||
fieldOfView:_captureResource.device.fieldOfView];
|
||||
runOnMainThreadAsynchronously(^{
|
||||
[_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance]
|
||||
willCapturePhoto:state
|
||||
sampleMetadata:sampleMetadata];
|
||||
});
|
||||
}
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)managedStillImageCapturerDidCapturePhoto:(SCManagedStillImageCapturer *)managedStillImageCapturer
|
||||
{
|
||||
SCTraceODPCompatibleStart(2);
|
||||
SCLogCapturerInfo(@"Did capture photo. stillImageCapturer:%@", _captureResource.stillImageCapturer);
|
||||
[_captureResource.queuePerformer performImmediatelyIfCurrentPerformer:^{
|
||||
SCTraceStart();
|
||||
if (_captureResource.stillImageCapturer) {
|
||||
SCManagedCapturerState *state = [_captureResource.state copy];
|
||||
runOnMainThreadAsynchronously(^{
|
||||
[_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance] didCapturePhoto:state];
|
||||
});
|
||||
}
|
||||
}];
|
||||
}
|
||||
|
||||
- (BOOL)managedStillImageCapturerIsUnderDeviceMotion:(SCManagedStillImageCapturer *)managedStillImageCapturer
|
||||
{
|
||||
return _captureResource.deviceMotionProvider.isUnderDeviceMotion;
|
||||
}
|
||||
|
||||
- (BOOL)managedStillImageCapturerShouldProcessFileInput:(SCManagedStillImageCapturer *)managedStillImageCapturer
|
||||
{
|
||||
return _captureResource.fileInputDecider.shouldProcessFileInput;
|
||||
}
|
||||
|
||||
@end
|
63
ManagedCapturer/SCManagedStillImageCapturer_Protected.h
Normal file
63
ManagedCapturer/SCManagedStillImageCapturer_Protected.h
Normal file
@ -0,0 +1,63 @@
|
||||
//
|
||||
// SCManagedStillImageCapturer_Protected.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Chao Pang on 10/4/16.
|
||||
// Copyright © 2016 Snapchat, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
SC_EXTERN_C_BEGIN
|
||||
extern NSDictionary *cameraInfoForBuffer(CMSampleBufferRef imageDataSampleBuffer);
|
||||
SC_EXTERN_C_END
|
||||
|
||||
extern NSString *const kSCManagedStillImageCapturerErrorDomain;
|
||||
|
||||
#if !TARGET_IPHONE_SIMULATOR
|
||||
extern NSInteger const kSCManagedStillImageCapturerNoStillImageConnection;
|
||||
#endif
|
||||
extern NSInteger const kSCManagedStillImageCapturerApplicationStateBackground;
|
||||
|
||||
// We will do the image capture regardless if these is still camera adjustment in progress after 0.4 seconds.
|
||||
extern NSTimeInterval const kSCManagedStillImageCapturerDeadline;
|
||||
extern NSTimeInterval const kSCCameraRetryInterval;
|
||||
|
||||
@protocol SCManagedCapturerLensAPI;
|
||||
|
||||
@interface SCManagedStillImageCapturer () {
|
||||
@protected
|
||||
id<SCManagedCapturerLensAPI> _lensAPI;
|
||||
id<SCPerforming> _performer;
|
||||
AVCaptureSession *_session;
|
||||
id<SCManagedStillImageCapturerDelegate> __weak _delegate;
|
||||
NSString *_captureSessionID;
|
||||
SCCapturerLightingConditionType _lightingConditionType;
|
||||
}
|
||||
|
||||
- (instancetype)initWithSession:(AVCaptureSession *)session
|
||||
performer:(id<SCPerforming>)performer
|
||||
lensProcessingCore:(id<SCManagedCapturerLensAPI>)lensProcessingCore
|
||||
delegate:(id<SCManagedStillImageCapturerDelegate>)delegate;
|
||||
|
||||
- (UIImage *)imageFromData:(NSData *)data
|
||||
currentZoomFactor:(float)currentZoomFactor
|
||||
targetAspectRatio:(CGFloat)targetAspectRatio
|
||||
fieldOfView:(float)fieldOfView
|
||||
state:(SCManagedCapturerState *)state
|
||||
sampleBuffer:(CMSampleBufferRef)sampleBuffer;
|
||||
|
||||
- (UIImage *)imageFromData:(NSData *)data
|
||||
currentZoomFactor:(float)currentZoomFactor
|
||||
targetAspectRatio:(CGFloat)targetAspectRatio
|
||||
fieldOfView:(float)fieldOfView
|
||||
state:(SCManagedCapturerState *)state
|
||||
metadata:(NSDictionary *)metadata;
|
||||
|
||||
- (UIImage *)imageFromImage:(UIImage *)image
|
||||
currentZoomFactor:(float)currentZoomFactor
|
||||
targetAspectRatio:(CGFloat)targetAspectRatio
|
||||
fieldOfView:(float)fieldOfView
|
||||
state:(SCManagedCapturerState *)state;
|
||||
|
||||
- (CMTime)adjustedExposureDurationForNightModeWithCurrentExposureDuration:(CMTime)exposureDuration;
|
||||
|
||||
@end
|
24
ManagedCapturer/SCManagedVideoARDataSource.h
Normal file
24
ManagedCapturer/SCManagedVideoARDataSource.h
Normal file
@ -0,0 +1,24 @@
|
||||
//
|
||||
// SCManagedVideoARDataSource.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Eyal Segal on 20/10/2017.
|
||||
//
|
||||
|
||||
#import "SCCapturerDefines.h"
|
||||
|
||||
#import <SCCameraFoundation/SCManagedVideoDataSource.h>
|
||||
|
||||
#import <ARKit/ARKit.h>
|
||||
|
||||
@protocol SCManagedVideoARDataSource <SCManagedVideoDataSource>
|
||||
|
||||
@property (atomic, strong) ARFrame *currentFrame NS_AVAILABLE_IOS(11_0);
|
||||
|
||||
#ifdef SC_USE_ARKIT_FACE
|
||||
@property (atomic, strong) AVDepthData *lastDepthData NS_AVAILABLE_IOS(11_0);
|
||||
#endif
|
||||
|
||||
@property (atomic, assign) float fieldOfView NS_AVAILABLE_IOS(11_0);
|
||||
|
||||
@end
|
102
ManagedCapturer/SCManagedVideoCapturer.h
Normal file
102
ManagedCapturer/SCManagedVideoCapturer.h
Normal file
@ -0,0 +1,102 @@
|
||||
//
|
||||
// SCManagedVideoCapturer.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Liu Liu on 5/1/15.
|
||||
// Copyright (c) 2015 Liu Liu. All rights reserved.
|
||||
//
|
||||
|
||||
#import "SCManagedRecordedVideo.h"
|
||||
#import "SCManagedVideoCapturerOutputSettings.h"
|
||||
#import "SCVideoCaptureSessionInfo.h"
|
||||
|
||||
#import <SCCameraFoundation/SCManagedAudioDataSource.h>
|
||||
#import <SCCameraFoundation/SCManagedVideoDataSourceListener.h>
|
||||
#import <SCFoundation/SCFuture.h>
|
||||
|
||||
#import <AVFoundation/AVFoundation.h>
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
typedef void (^sc_managed_video_capturer_recording_completion_handler_t)(NSURL *fileURL, NSError *error);
|
||||
|
||||
@class SCManagedVideoCapturer, SCTimedTask;
|
||||
|
||||
@protocol SCManagedVideoCapturerDelegate <NSObject>
|
||||
|
||||
// All these calbacks are invoked on a private queue for video recording channels
|
||||
|
||||
- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer
|
||||
didBeginVideoRecording:(SCVideoCaptureSessionInfo)sessionInfo;
|
||||
|
||||
- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer
|
||||
didBeginAudioRecording:(SCVideoCaptureSessionInfo)sessionInfo;
|
||||
|
||||
- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer
|
||||
willStopWithRecordedVideoFuture:(SCFuture<id<SCManagedRecordedVideo>> *)videoProviderFuture
|
||||
videoSize:(CGSize)videoSize
|
||||
placeholderImage:(UIImage *)placeholderImage
|
||||
session:(SCVideoCaptureSessionInfo)sessionInfo;
|
||||
|
||||
- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer
|
||||
didSucceedWithRecordedVideo:(SCManagedRecordedVideo *)recordedVideo
|
||||
session:(SCVideoCaptureSessionInfo)sessionInfo;
|
||||
|
||||
- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer
|
||||
didFailWithError:(NSError *)error
|
||||
session:(SCVideoCaptureSessionInfo)sessionInfo;
|
||||
|
||||
- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer
|
||||
didCancelVideoRecording:(SCVideoCaptureSessionInfo)sessionInfo;
|
||||
|
||||
- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer
|
||||
didGetError:(NSError *)error
|
||||
forType:(SCManagedVideoCapturerInfoType)type
|
||||
session:(SCVideoCaptureSessionInfo)sessionInfo;
|
||||
|
||||
- (NSDictionary *)managedVideoCapturerGetExtraFrameHealthInfo:(SCManagedVideoCapturer *)managedVideoCapturer;
|
||||
|
||||
- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer
|
||||
didAppendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer
|
||||
presentationTimestamp:(CMTime)presentationTimestamp;
|
||||
|
||||
@end
|
||||
|
||||
/**
|
||||
* AVFoundation backed class that writes frames to an output file. SCManagedVideoCapturer
|
||||
* uses SCManagedVideoCapturerOutputSettings to determine output settings. If no output
|
||||
* settings are passed in (nil) SCManagedVideoCapturer will fall back on default settings.
|
||||
*/
|
||||
@interface SCManagedVideoCapturer : NSObject <SCManagedVideoDataSourceListener, SCManagedAudioDataSource>
|
||||
|
||||
/**
|
||||
* Return the output URL that passed into beginRecordingToURL method
|
||||
*/
|
||||
@property (nonatomic, copy, readonly) NSURL *outputURL;
|
||||
|
||||
@property (nonatomic, weak) id<SCManagedVideoCapturerDelegate> delegate;
|
||||
@property (nonatomic, readonly) SCVideoCaptureSessionInfo activeSession;
|
||||
@property (nonatomic, assign, readonly) CMTime firstWrittenAudioBufferDelay;
|
||||
@property (nonatomic, assign, readonly) BOOL audioQueueStarted;
|
||||
|
||||
- (instancetype)initWithQueuePerformer:(SCQueuePerformer *)queuePerformer;
|
||||
|
||||
- (void)prepareForRecordingWithAudioConfiguration:(SCAudioConfiguration *)configuration;
|
||||
- (SCVideoCaptureSessionInfo)startRecordingAsynchronouslyWithOutputSettings:
|
||||
(SCManagedVideoCapturerOutputSettings *)outputSettings
|
||||
audioConfiguration:(SCAudioConfiguration *)audioConfiguration
|
||||
maxDuration:(NSTimeInterval)maxDuration
|
||||
toURL:(NSURL *)URL
|
||||
deviceFormat:(AVCaptureDeviceFormat *)deviceFormat
|
||||
orientation:(AVCaptureVideoOrientation)videoOrientation
|
||||
captureSessionID:(NSString *)captureSessionID;
|
||||
|
||||
- (void)stopRecordingAsynchronously;
|
||||
- (void)cancelRecordingAsynchronously;
|
||||
|
||||
// Schedule a task to run, it is thread safe.
|
||||
- (void)addTimedTask:(SCTimedTask *)task;
|
||||
|
||||
// Clear all tasks, it is thread safe.
|
||||
- (void)clearTimedTasks;
|
||||
|
||||
@end
|
1107
ManagedCapturer/SCManagedVideoCapturer.m
Normal file
1107
ManagedCapturer/SCManagedVideoCapturer.m
Normal file
File diff suppressed because it is too large
Load Diff
20
ManagedCapturer/SCManagedVideoCapturerHandler.h
Normal file
20
ManagedCapturer/SCManagedVideoCapturerHandler.h
Normal file
@ -0,0 +1,20 @@
|
||||
//
|
||||
// SCManagedVideoCapturerHandler.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Jingtian Yang on 11/12/2017.
|
||||
//
|
||||
|
||||
#import "SCManagedVideoCapturer.h"
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
@class SCCaptureResource;
|
||||
|
||||
@interface SCManagedVideoCapturerHandler : NSObject <SCManagedVideoCapturerDelegate>
|
||||
|
||||
- (instancetype)init NS_UNAVAILABLE;
|
||||
|
||||
- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource;
|
||||
|
||||
@end
|
252
ManagedCapturer/SCManagedVideoCapturerHandler.m
Normal file
252
ManagedCapturer/SCManagedVideoCapturerHandler.m
Normal file
@ -0,0 +1,252 @@
|
||||
//
|
||||
// SCManagedVideoCapturerHandler.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Jingtian Yang on 11/12/2017.
|
||||
//
|
||||
|
||||
#import "SCManagedVideoCapturerHandler.h"
|
||||
|
||||
#import "SCCaptureResource.h"
|
||||
#import "SCManagedCaptureDevice+SCManagedCapturer.h"
|
||||
#import "SCManagedCapturer.h"
|
||||
#import "SCManagedCapturerLensAPI.h"
|
||||
#import "SCManagedCapturerLogging.h"
|
||||
#import "SCManagedCapturerSampleMetadata.h"
|
||||
#import "SCManagedCapturerState.h"
|
||||
#import "SCManagedDeviceCapacityAnalyzer.h"
|
||||
#import "SCManagedFrontFlashController.h"
|
||||
#import "SCManagedVideoFileStreamer.h"
|
||||
#import "SCManagedVideoFrameSampler.h"
|
||||
#import "SCManagedVideoStreamer.h"
|
||||
|
||||
#import <SCCameraFoundation/SCManagedDataSource.h>
|
||||
#import <SCFoundation/SCAssertWrapper.h>
|
||||
#import <SCFoundation/SCQueuePerformer.h>
|
||||
#import <SCFoundation/SCThreadHelpers.h>
|
||||
#import <SCFoundation/SCTraceODPCompatible.h>
|
||||
|
||||
@interface SCManagedVideoCapturerHandler () {
|
||||
__weak SCCaptureResource *_captureResource;
|
||||
}
|
||||
@end
|
||||
|
||||
@implementation SCManagedVideoCapturerHandler
|
||||
|
||||
- (instancetype)initWithCaptureResource:(SCCaptureResource *)captureResource
|
||||
{
|
||||
self = [super init];
|
||||
if (self) {
|
||||
SCAssert(captureResource, @"");
|
||||
_captureResource = captureResource;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer
|
||||
didBeginVideoRecording:(SCVideoCaptureSessionInfo)sessionInfo
|
||||
{
|
||||
SCTraceODPCompatibleStart(2);
|
||||
SCLogCapturerInfo(@"Did begin video recording. sessionId:%u", sessionInfo.sessionId);
|
||||
[_captureResource.queuePerformer perform:^{
|
||||
SCTraceStart();
|
||||
SCManagedCapturerState *state = [_captureResource.state copy];
|
||||
runOnMainThreadAsynchronously(^{
|
||||
[_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance]
|
||||
didBeginVideoRecording:state
|
||||
session:sessionInfo];
|
||||
});
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer
|
||||
didBeginAudioRecording:(SCVideoCaptureSessionInfo)sessionInfo
|
||||
{
|
||||
SCTraceODPCompatibleStart(2);
|
||||
SCLogCapturerInfo(@"Did begin audio recording. sessionId:%u", sessionInfo.sessionId);
|
||||
[_captureResource.queuePerformer perform:^{
|
||||
if ([_captureResource.fileInputDecider shouldProcessFileInput]) {
|
||||
[_captureResource.videoDataSource startStreaming];
|
||||
}
|
||||
SCTraceStart();
|
||||
SCManagedCapturerState *state = [_captureResource.state copy];
|
||||
runOnMainThreadAsynchronously(^{
|
||||
[_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance]
|
||||
didBeginAudioRecording:state
|
||||
session:sessionInfo];
|
||||
});
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer
|
||||
willStopWithRecordedVideoFuture:(SCFuture<id<SCManagedRecordedVideo>> *)recordedVideoFuture
|
||||
videoSize:(CGSize)videoSize
|
||||
placeholderImage:(UIImage *)placeholderImage
|
||||
session:(SCVideoCaptureSessionInfo)sessionInfo
|
||||
{
|
||||
SCTraceODPCompatibleStart(2);
|
||||
SCLogCapturerInfo(@"Will stop recording. sessionId:%u placeHolderImage:%@ videoSize:(%f, %f)",
|
||||
sessionInfo.sessionId, placeholderImage, videoSize.width, videoSize.height);
|
||||
[_captureResource.queuePerformer perform:^{
|
||||
SCTraceStart();
|
||||
if (_captureResource.videoRecording) {
|
||||
SCManagedCapturerState *state = [_captureResource.state copy];
|
||||
// Then, sync back to main thread to notify will finish recording
|
||||
runOnMainThreadAsynchronously(^{
|
||||
[_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance]
|
||||
willFinishRecording:state
|
||||
session:sessionInfo
|
||||
recordedVideoFuture:recordedVideoFuture
|
||||
videoSize:videoSize
|
||||
placeholderImage:placeholderImage];
|
||||
});
|
||||
}
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer
|
||||
didSucceedWithRecordedVideo:(SCManagedRecordedVideo *)recordedVideo
|
||||
session:(SCVideoCaptureSessionInfo)sessionInfo
|
||||
{
|
||||
SCTraceODPCompatibleStart(2);
|
||||
SCLogCapturerInfo(@"Did succeed recording. sessionId:%u recordedVideo:%@", sessionInfo.sessionId, recordedVideo);
|
||||
[_captureResource.queuePerformer perform:^{
|
||||
SCTraceStart();
|
||||
if (_captureResource.videoRecording) {
|
||||
[self _videoRecordingCleanup];
|
||||
SCManagedCapturerState *state = [_captureResource.state copy];
|
||||
// Then, sync back to main thread to notify the finish recording
|
||||
runOnMainThreadAsynchronously(^{
|
||||
[_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance]
|
||||
didFinishRecording:state
|
||||
session:sessionInfo
|
||||
recordedVideo:recordedVideo];
|
||||
});
|
||||
}
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer
|
||||
didFailWithError:(NSError *)error
|
||||
session:(SCVideoCaptureSessionInfo)sessionInfo
|
||||
{
|
||||
SCTraceODPCompatibleStart(2);
|
||||
SCLogCapturerInfo(@"Did fail recording. sessionId:%u", sessionInfo.sessionId);
|
||||
[_captureResource.queuePerformer perform:^{
|
||||
SCTraceStart();
|
||||
if (_captureResource.videoRecording) {
|
||||
[self _videoRecordingCleanup];
|
||||
SCManagedCapturerState *state = [_captureResource.state copy];
|
||||
runOnMainThreadAsynchronously(^{
|
||||
[_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance]
|
||||
didFailRecording:state
|
||||
session:sessionInfo
|
||||
error:error];
|
||||
});
|
||||
}
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer
|
||||
didCancelVideoRecording:(SCVideoCaptureSessionInfo)sessionInfo
|
||||
{
|
||||
SCTraceODPCompatibleStart(2);
|
||||
SCLogCapturerInfo(@"Did cancel recording. sessionId:%u", sessionInfo.sessionId);
|
||||
[_captureResource.queuePerformer perform:^{
|
||||
SCTraceStart();
|
||||
if (_captureResource.videoRecording) {
|
||||
[self _videoRecordingCleanup];
|
||||
SCManagedCapturerState *state = [_captureResource.state copy];
|
||||
runOnMainThreadAsynchronously(^{
|
||||
[_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance]
|
||||
didCancelRecording:state
|
||||
session:sessionInfo];
|
||||
});
|
||||
}
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer
|
||||
didGetError:(NSError *)error
|
||||
forType:(SCManagedVideoCapturerInfoType)type
|
||||
session:(SCVideoCaptureSessionInfo)sessionInfo
|
||||
{
|
||||
SCTraceODPCompatibleStart(2);
|
||||
SCLogCapturerInfo(@"Did get error. sessionId:%u errorType:%lu, error:%@", sessionInfo.sessionId, (long)type, error);
|
||||
[_captureResource.queuePerformer perform:^{
|
||||
runOnMainThreadAsynchronously(^{
|
||||
[_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance]
|
||||
didGetError:error
|
||||
forType:type
|
||||
session:sessionInfo];
|
||||
});
|
||||
}];
|
||||
}
|
||||
|
||||
- (NSDictionary *)managedVideoCapturerGetExtraFrameHealthInfo:(SCManagedVideoCapturer *)managedVideoCapturer
|
||||
{
|
||||
SCTraceODPCompatibleStart(2);
|
||||
if (_captureResource.state.lensesActive) {
|
||||
return @{
|
||||
@"lens_active" : @(YES),
|
||||
@"lens_id" : ([_captureResource.lensProcessingCore activeLensId] ?: [NSNull null])
|
||||
};
|
||||
}
|
||||
return nil;
|
||||
}
|
||||
|
||||
- (void)managedVideoCapturer:(SCManagedVideoCapturer *)managedVideoCapturer
|
||||
didAppendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer
|
||||
presentationTimestamp:(CMTime)presentationTimestamp
|
||||
{
|
||||
CFRetain(sampleBuffer);
|
||||
[_captureResource.queuePerformer perform:^{
|
||||
SCManagedCapturerSampleMetadata *sampleMetadata =
|
||||
[[SCManagedCapturerSampleMetadata alloc] initWithPresentationTimestamp:presentationTimestamp
|
||||
fieldOfView:_captureResource.device.fieldOfView];
|
||||
[_captureResource.announcer managedCapturer:[SCManagedCapturer sharedInstance]
|
||||
didAppendVideoSampleBuffer:sampleBuffer
|
||||
sampleMetadata:sampleMetadata];
|
||||
CFRelease(sampleBuffer);
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)_videoRecordingCleanup
|
||||
{
|
||||
SCTraceODPCompatibleStart(2);
|
||||
SCAssert(_captureResource.videoRecording, @"clean up function only can be called if the "
|
||||
@"video recording is still in progress.");
|
||||
SCAssert([_captureResource.queuePerformer isCurrentPerformer], @"");
|
||||
SCLogCapturerInfo(@"Video recording cleanup. previous state:%@", _captureResource.state);
|
||||
[_captureResource.videoDataSource removeListener:_captureResource.videoCapturer];
|
||||
if (_captureResource.videoFrameSampler) {
|
||||
SCManagedVideoFrameSampler *sampler = _captureResource.videoFrameSampler;
|
||||
_captureResource.videoFrameSampler = nil;
|
||||
[_captureResource.announcer removeListener:sampler];
|
||||
}
|
||||
// Add back other listeners to video streamer
|
||||
[_captureResource.videoDataSource addListener:_captureResource.deviceCapacityAnalyzer];
|
||||
if (!_captureResource.state.torchActive) {
|
||||
// We should turn off torch for the device that we specifically turned on
|
||||
// for recording
|
||||
[_captureResource.device setTorchActive:NO];
|
||||
if (_captureResource.state.devicePosition == SCManagedCaptureDevicePositionFront) {
|
||||
_captureResource.frontFlashController.torchActive = NO;
|
||||
}
|
||||
}
|
||||
|
||||
// Unlock focus on both front and back camera if they were locked.
|
||||
// Even if ARKit was being used during recording, it'll be shut down by the time we get here
|
||||
// So DON'T match the ARKit check we use around [_ setRecording:YES]
|
||||
SCManagedCaptureDevice *front = [SCManagedCaptureDevice front];
|
||||
SCManagedCaptureDevice *back = [SCManagedCaptureDevice back];
|
||||
[front setRecording:NO];
|
||||
[back setRecording:NO];
|
||||
_captureResource.videoRecording = NO;
|
||||
if (_captureResource.state.lensesActive) {
|
||||
BOOL modifySource = _captureResource.videoRecording || _captureResource.state.liveVideoStreaming;
|
||||
[_captureResource.lensProcessingCore setModifySource:modifySource];
|
||||
}
|
||||
}
|
||||
|
||||
@end
|
27
ManagedCapturer/SCManagedVideoCapturerLogger.h
Normal file
27
ManagedCapturer/SCManagedVideoCapturerLogger.h
Normal file
@ -0,0 +1,27 @@
|
||||
//
|
||||
// SCCaptureLogger.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Pinlin on 12/04/2017.
|
||||
// Copyright © 2017 Snapchat, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
static NSString *const kSCCapturerStartingStepAudioSession = @"audio_session";
|
||||
static NSString *const kSCCapturerStartingStepTranscodeingVideoBitrate = @"transcoding_video_bitrate";
|
||||
static NSString *const kSCCapturerStartingStepOutputSettings = @"output_settings";
|
||||
static NSString *const kSCCapturerStartingStepVideoFrameRawData = @"video_frame_raw_data";
|
||||
static NSString *const kSCCapturerStartingStepAudioRecording = @"audio_recording";
|
||||
static NSString *const kSCCapturerStartingStepAssetWriterConfiguration = @"asset_writer_config";
|
||||
static NSString *const kSCCapturerStartingStepStartingWriting = @"start_writing";
|
||||
static NSString *const kCapturerStartingTotalDelay = @"total_delay";
|
||||
|
||||
@interface SCManagedVideoCapturerLogger : NSObject
|
||||
|
||||
- (void)prepareForStartingLog;
|
||||
- (void)logStartingStep:(NSString *)stepName;
|
||||
- (void)endLoggingForStarting;
|
||||
- (void)logEventIfStartingTooSlow;
|
||||
|
||||
@end
|
77
ManagedCapturer/SCManagedVideoCapturerLogger.m
Normal file
77
ManagedCapturer/SCManagedVideoCapturerLogger.m
Normal file
@ -0,0 +1,77 @@
|
||||
//
|
||||
// SCManagedVideoCapturerLogger.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Pinlin on 12/04/2017.
|
||||
// Copyright © 2017 Snapchat, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#import "SCManagedVideoCapturerLogger.h"
|
||||
|
||||
#import <SCFoundation/SCAssertWrapper.h>
|
||||
#import <SCFoundation/SCLog.h>
|
||||
#import <SCLogger/SCCameraMetrics.h>
|
||||
#import <SCLogger/SCLogger.h>
|
||||
|
||||
@import QuartzCore;
|
||||
|
||||
@interface SCManagedVideoCapturerLogger () {
|
||||
// For time profiles metric during start recording
|
||||
NSMutableDictionary *_startingStepsDelayTime;
|
||||
NSTimeInterval _beginStartTime;
|
||||
NSTimeInterval _lastCheckpointTime;
|
||||
NSTimeInterval _startedTime;
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
@implementation SCManagedVideoCapturerLogger
|
||||
|
||||
- (instancetype)init
|
||||
{
|
||||
self = [super init];
|
||||
if (self) {
|
||||
_startingStepsDelayTime = [NSMutableDictionary dictionary];
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)prepareForStartingLog
|
||||
{
|
||||
_beginStartTime = CACurrentMediaTime();
|
||||
_lastCheckpointTime = _beginStartTime;
|
||||
[_startingStepsDelayTime removeAllObjects];
|
||||
}
|
||||
|
||||
- (void)logStartingStep:(NSString *)stepname
|
||||
{
|
||||
SCAssert(_beginStartTime > 0, @"logger is not ready yet, please call prepareForStartingLog at first");
|
||||
NSTimeInterval currentCheckpointTime = CACurrentMediaTime();
|
||||
_startingStepsDelayTime[stepname] = @(currentCheckpointTime - _lastCheckpointTime);
|
||||
_lastCheckpointTime = currentCheckpointTime;
|
||||
}
|
||||
|
||||
- (void)endLoggingForStarting
|
||||
{
|
||||
SCAssert(_beginStartTime > 0, @"logger is not ready yet, please call prepareForStartingLog at first");
|
||||
_startedTime = CACurrentMediaTime();
|
||||
[self logStartingStep:kSCCapturerStartingStepStartingWriting];
|
||||
_startingStepsDelayTime[kCapturerStartingTotalDelay] = @(CACurrentMediaTime() - _beginStartTime);
|
||||
}
|
||||
|
||||
- (void)logEventIfStartingTooSlow
|
||||
{
|
||||
if (_beginStartTime > 0) {
|
||||
if (_startingStepsDelayTime.count == 0) {
|
||||
// It should not be here. We only need to log once.
|
||||
return;
|
||||
}
|
||||
SCLogGeneralWarning(@"Capturer starting delay(in second):%f", _startedTime - _beginStartTime);
|
||||
[[SCLogger sharedInstance] logEvent:kSCCameraMetricsVideoCapturerStartDelay parameters:_startingStepsDelayTime];
|
||||
// Clean all delay times after logging
|
||||
[_startingStepsDelayTime removeAllObjects];
|
||||
_beginStartTime = 0;
|
||||
}
|
||||
}
|
||||
|
||||
@end
|
48
ManagedCapturer/SCManagedVideoCapturerOutputSettings.h
Normal file
48
ManagedCapturer/SCManagedVideoCapturerOutputSettings.h
Normal file
@ -0,0 +1,48 @@
|
||||
// 42f6113daff3eebf06d809a073c99651867c42ea
|
||||
// Generated by the value-object.rb DO NOT EDIT!!
|
||||
|
||||
#import "SCManagedVideoCapturerOutputType.h"
|
||||
|
||||
#import <AvailabilityMacros.h>
|
||||
|
||||
#import <CoreGraphics/CoreGraphics.h>
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
@protocol SCManagedVideoCapturerOutputSettings <NSObject, NSCoding, NSCopying>
|
||||
|
||||
@property (nonatomic, assign, readonly) CGFloat width;
|
||||
|
||||
@property (nonatomic, assign, readonly) CGFloat height;
|
||||
|
||||
@property (nonatomic, assign, readonly) CGFloat videoBitRate;
|
||||
|
||||
@property (nonatomic, assign, readonly) CGFloat audioBitRate;
|
||||
|
||||
@property (nonatomic, assign, readonly) NSUInteger keyFrameInterval;
|
||||
|
||||
@property (nonatomic, assign, readonly) SCManagedVideoCapturerOutputType outputType;
|
||||
|
||||
@end
|
||||
|
||||
@interface SCManagedVideoCapturerOutputSettings : NSObject <SCManagedVideoCapturerOutputSettings>
|
||||
|
||||
@property (nonatomic, assign, readonly) CGFloat width;
|
||||
|
||||
@property (nonatomic, assign, readonly) CGFloat height;
|
||||
|
||||
@property (nonatomic, assign, readonly) CGFloat videoBitRate;
|
||||
|
||||
@property (nonatomic, assign, readonly) CGFloat audioBitRate;
|
||||
|
||||
@property (nonatomic, assign, readonly) NSUInteger keyFrameInterval;
|
||||
|
||||
@property (nonatomic, assign, readonly) SCManagedVideoCapturerOutputType outputType;
|
||||
|
||||
- (instancetype)initWithWidth:(CGFloat)width
|
||||
height:(CGFloat)height
|
||||
videoBitRate:(CGFloat)videoBitRate
|
||||
audioBitRate:(CGFloat)audioBitRate
|
||||
keyFrameInterval:(NSUInteger)keyFrameInterval
|
||||
outputType:(SCManagedVideoCapturerOutputType)outputType;
|
||||
|
||||
@end
|
221
ManagedCapturer/SCManagedVideoCapturerOutputSettings.m
Normal file
221
ManagedCapturer/SCManagedVideoCapturerOutputSettings.m
Normal file
@ -0,0 +1,221 @@
|
||||
// 42f6113daff3eebf06d809a073c99651867c42ea
|
||||
// Generated by the value-object.rb DO NOT EDIT!!
|
||||
|
||||
#import "SCManagedVideoCapturerOutputSettings.h"
|
||||
|
||||
#import <SCFoundation/SCValueObjectHelpers.h>
|
||||
|
||||
#import <FastCoding/FastCoder.h>
|
||||
|
||||
@implementation SCManagedVideoCapturerOutputSettings
|
||||
|
||||
static ptrdiff_t sSCManagedVideoCapturerOutputSettingsOffsets[0];
|
||||
static BOOL sSCManagedVideoCapturerOutputSettingsHasOffsets;
|
||||
|
||||
- (instancetype)initWithWidth:(CGFloat)width
|
||||
height:(CGFloat)height
|
||||
videoBitRate:(CGFloat)videoBitRate
|
||||
audioBitRate:(CGFloat)audioBitRate
|
||||
keyFrameInterval:(NSUInteger)keyFrameInterval
|
||||
outputType:(SCManagedVideoCapturerOutputType)outputType
|
||||
{
|
||||
self = [super init];
|
||||
if (self) {
|
||||
_width = width;
|
||||
_height = height;
|
||||
_videoBitRate = videoBitRate;
|
||||
_audioBitRate = audioBitRate;
|
||||
_keyFrameInterval = keyFrameInterval;
|
||||
_outputType = outputType;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
#pragma mark - NSCopying
|
||||
|
||||
- (instancetype)copyWithZone:(NSZone *)zone
|
||||
{
|
||||
// Immutable object, bypass copy
|
||||
return self;
|
||||
}
|
||||
|
||||
#pragma mark - NSCoding
|
||||
|
||||
- (instancetype)initWithCoder:(NSCoder *)aDecoder
|
||||
{
|
||||
self = [super init];
|
||||
if (self) {
|
||||
_width = [aDecoder decodeFloatForKey:@"width"];
|
||||
_height = [aDecoder decodeFloatForKey:@"height"];
|
||||
_videoBitRate = [aDecoder decodeFloatForKey:@"videoBitRate"];
|
||||
_audioBitRate = [aDecoder decodeFloatForKey:@"audioBitRate"];
|
||||
_keyFrameInterval = [[aDecoder decodeObjectForKey:@"keyFrameInterval"] unsignedIntegerValue];
|
||||
_outputType = (SCManagedVideoCapturerOutputType)[aDecoder decodeIntegerForKey:@"outputType"];
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)encodeWithCoder:(NSCoder *)aCoder
|
||||
{
|
||||
[aCoder encodeFloat:_width forKey:@"width"];
|
||||
[aCoder encodeFloat:_height forKey:@"height"];
|
||||
[aCoder encodeFloat:_videoBitRate forKey:@"videoBitRate"];
|
||||
[aCoder encodeFloat:_audioBitRate forKey:@"audioBitRate"];
|
||||
[aCoder encodeObject:@(_keyFrameInterval) forKey:@"keyFrameInterval"];
|
||||
[aCoder encodeInteger:(NSInteger)_outputType forKey:@"outputType"];
|
||||
}
|
||||
|
||||
#pragma mark - FasterCoding
|
||||
|
||||
- (BOOL)preferFasterCoding
|
||||
{
|
||||
return YES;
|
||||
}
|
||||
|
||||
- (void)encodeWithFasterCoder:(id<FCFasterCoder>)fasterCoder
|
||||
{
|
||||
[fasterCoder encodeFloat64:_audioBitRate];
|
||||
[fasterCoder encodeFloat64:_height];
|
||||
[fasterCoder encodeUInt64:_keyFrameInterval];
|
||||
[fasterCoder encodeSInt32:_outputType];
|
||||
[fasterCoder encodeFloat64:_videoBitRate];
|
||||
[fasterCoder encodeFloat64:_width];
|
||||
}
|
||||
|
||||
- (void)decodeWithFasterDecoder:(id<FCFasterDecoder>)fasterDecoder
|
||||
{
|
||||
_audioBitRate = (CGFloat)[fasterDecoder decodeFloat64];
|
||||
_height = (CGFloat)[fasterDecoder decodeFloat64];
|
||||
_keyFrameInterval = (NSUInteger)[fasterDecoder decodeUInt64];
|
||||
_outputType = (SCManagedVideoCapturerOutputType)[fasterDecoder decodeSInt32];
|
||||
_videoBitRate = (CGFloat)[fasterDecoder decodeFloat64];
|
||||
_width = (CGFloat)[fasterDecoder decodeFloat64];
|
||||
}
|
||||
|
||||
- (void)setSInt32:(int32_t)val forUInt64Key:(uint64_t)key
|
||||
{
|
||||
switch (key) {
|
||||
case 54425104364133881ULL:
|
||||
_outputType = (SCManagedVideoCapturerOutputType)val;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
- (void)setUInt64:(uint64_t)val forUInt64Key:(uint64_t)key
|
||||
{
|
||||
switch (key) {
|
||||
case 47327990652274883ULL:
|
||||
_keyFrameInterval = (NSUInteger)val;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
- (void)setFloat64:(double)val forUInt64Key:(uint64_t)key
|
||||
{
|
||||
switch (key) {
|
||||
case 50995534680662654ULL:
|
||||
_audioBitRate = (CGFloat)val;
|
||||
break;
|
||||
case 11656660716170763ULL:
|
||||
_height = (CGFloat)val;
|
||||
break;
|
||||
case 29034524155663716ULL:
|
||||
_videoBitRate = (CGFloat)val;
|
||||
break;
|
||||
case 30689178641753681ULL:
|
||||
_width = (CGFloat)val;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
+ (uint64_t)fasterCodingVersion
|
||||
{
|
||||
return 14709152111692666517ULL;
|
||||
}
|
||||
|
||||
+ (uint64_t *)fasterCodingKeys
|
||||
{
|
||||
static uint64_t keys[] = {
|
||||
6 /* Total */,
|
||||
FC_ENCODE_KEY_TYPE(50995534680662654, FCEncodeTypeFloat64),
|
||||
FC_ENCODE_KEY_TYPE(11656660716170763, FCEncodeTypeFloat64),
|
||||
FC_ENCODE_KEY_TYPE(47327990652274883, FCEncodeTypeUInt64),
|
||||
FC_ENCODE_KEY_TYPE(54425104364133881, FCEncodeTypeSInt32),
|
||||
FC_ENCODE_KEY_TYPE(29034524155663716, FCEncodeTypeFloat64),
|
||||
FC_ENCODE_KEY_TYPE(30689178641753681, FCEncodeTypeFloat64),
|
||||
};
|
||||
return keys;
|
||||
}
|
||||
|
||||
#pragma mark - isEqual
|
||||
|
||||
- (BOOL)isEqual:(id)object
|
||||
{
|
||||
if (!SCObjectsIsEqual(self, object, &sSCManagedVideoCapturerOutputSettingsHasOffsets,
|
||||
sSCManagedVideoCapturerOutputSettingsOffsets, 6, 0)) {
|
||||
return NO;
|
||||
}
|
||||
SCManagedVideoCapturerOutputSettings *other = (SCManagedVideoCapturerOutputSettings *)object;
|
||||
if (other->_width != _width) {
|
||||
return NO;
|
||||
}
|
||||
|
||||
if (other->_height != _height) {
|
||||
return NO;
|
||||
}
|
||||
|
||||
if (other->_videoBitRate != _videoBitRate) {
|
||||
return NO;
|
||||
}
|
||||
|
||||
if (other->_audioBitRate != _audioBitRate) {
|
||||
return NO;
|
||||
}
|
||||
|
||||
if (other->_keyFrameInterval != _keyFrameInterval) {
|
||||
return NO;
|
||||
}
|
||||
|
||||
if (other->_outputType != _outputType) {
|
||||
return NO;
|
||||
}
|
||||
|
||||
return YES;
|
||||
}
|
||||
|
||||
- (NSUInteger)hash
|
||||
{
|
||||
NSUInteger subhashes[] = {(NSUInteger)_width, (NSUInteger)_height, (NSUInteger)_videoBitRate,
|
||||
(NSUInteger)_audioBitRate, (NSUInteger)_keyFrameInterval, (NSUInteger)_outputType};
|
||||
NSUInteger result = subhashes[0];
|
||||
for (int i = 1; i < 6; i++) {
|
||||
unsigned long long base = (((unsigned long long)result) << 32 | subhashes[i]);
|
||||
base = (~base) + (base << 18);
|
||||
base ^= (base >> 31);
|
||||
base *= 21;
|
||||
base ^= (base >> 11);
|
||||
base += (base << 6);
|
||||
base ^= (base >> 22);
|
||||
result = (NSUInteger)base;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
#pragma mark - Print description in console: lldb> po #{variable name}
|
||||
|
||||
- (NSString *)description
|
||||
{
|
||||
NSMutableString *desc = [NSMutableString string];
|
||||
[desc appendString:@"{\n"];
|
||||
[desc appendFormat:@"\twidth:%@\n", [@(_width) description]];
|
||||
[desc appendFormat:@"\theight:%@\n", [@(_height) description]];
|
||||
[desc appendFormat:@"\tvideoBitRate:%@\n", [@(_videoBitRate) description]];
|
||||
[desc appendFormat:@"\taudioBitRate:%@\n", [@(_audioBitRate) description]];
|
||||
[desc appendFormat:@"\tkeyFrameInterval:%@\n", [@(_keyFrameInterval) description]];
|
||||
[desc appendFormat:@"\toutputType:%@\n", [@(_outputType) description]];
|
||||
[desc appendString:@"}\n"];
|
||||
|
||||
return [desc copy];
|
||||
}
|
||||
|
||||
@end
|
10
ManagedCapturer/SCManagedVideoCapturerOutputSettings.value
Normal file
10
ManagedCapturer/SCManagedVideoCapturerOutputSettings.value
Normal file
@ -0,0 +1,10 @@
|
||||
#import "SCManagedVideoCapturerOutputType.h"
|
||||
|
||||
interface SCManagedVideoCapturerOutputSettings
|
||||
CGFloat width
|
||||
CGFloat height
|
||||
CGFloat videoBitRate
|
||||
CGFloat audioBitRate
|
||||
NSUInteger keyFrameInterval
|
||||
enum SCManagedVideoCapturerOutputType outputType
|
||||
end
|
14
ManagedCapturer/SCManagedVideoCapturerOutputType.h
Normal file
14
ManagedCapturer/SCManagedVideoCapturerOutputType.h
Normal file
@ -0,0 +1,14 @@
|
||||
//
|
||||
// SCManagedVideoCapturerOutputType.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Chao Pang on 8/8/16.
|
||||
// Copyright © 2016 Snapchat, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
typedef NS_ENUM(NSInteger, SCManagedVideoCapturerOutputType) {
|
||||
SCManagedVideoCapturerOutputTypeVideoSnap = 0,
|
||||
SCManagedVideoCapturerOutputTypeVideoNote,
|
||||
};
|
25
ManagedCapturer/SCManagedVideoCapturerTimeObserver.h
Normal file
25
ManagedCapturer/SCManagedVideoCapturerTimeObserver.h
Normal file
@ -0,0 +1,25 @@
|
||||
//
|
||||
// SCManagedVideoCapturerTimeObserver.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Michel Loenngren on 4/3/17.
|
||||
// Copyright © 2017 Snapchat, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#import <CoreMedia/CoreMedia.h>
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
@class SCTimedTask;
|
||||
|
||||
/*
|
||||
Class keeping track of SCTimedTasks and firing them on the main thread
|
||||
when needed.
|
||||
*/
|
||||
@interface SCManagedVideoCapturerTimeObserver : NSObject
|
||||
|
||||
- (void)addTimedTask:(SCTimedTask *_Nonnull)task;
|
||||
|
||||
- (void)processTime:(CMTime)relativePresentationTime
|
||||
sessionStartTimeDelayInSecond:(CGFloat)sessionStartTimeDelayInSecond;
|
||||
|
||||
@end
|
61
ManagedCapturer/SCManagedVideoCapturerTimeObserver.m
Normal file
61
ManagedCapturer/SCManagedVideoCapturerTimeObserver.m
Normal file
@ -0,0 +1,61 @@
|
||||
//
|
||||
// SCManagedVideoCapturerTimeObserver.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Michel Loenngren on 4/3/17.
|
||||
// Copyright © 2017 Snapchat, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#import "SCManagedVideoCapturerTimeObserver.h"
|
||||
|
||||
#import "SCTimedTask.h"
|
||||
|
||||
#import <SCFoundation/SCAssertWrapper.h>
|
||||
#import <SCFoundation/SCLog.h>
|
||||
#import <SCFoundation/SCThreadHelpers.h>
|
||||
|
||||
@implementation SCManagedVideoCapturerTimeObserver {
|
||||
NSMutableArray<SCTimedTask *> *_tasks;
|
||||
BOOL _isProcessing;
|
||||
}
|
||||
|
||||
- (instancetype)init
|
||||
{
|
||||
if (self = [super init]) {
|
||||
_tasks = [NSMutableArray new];
|
||||
_isProcessing = NO;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)addTimedTask:(SCTimedTask *_Nonnull)task
|
||||
{
|
||||
SCAssert(!_isProcessing,
|
||||
@"[SCManagedVideoCapturerTimeObserver] Trying to add an SCTimedTask after streaming started.");
|
||||
SCAssert(CMTIME_IS_VALID(task.targetTime),
|
||||
@"[SCManagedVideoCapturerTimeObserver] Trying to add an SCTimedTask with invalid time.");
|
||||
[_tasks addObject:task];
|
||||
[_tasks sortUsingComparator:^NSComparisonResult(SCTimedTask *_Nonnull obj1, SCTimedTask *_Nonnull obj2) {
|
||||
return (NSComparisonResult)CMTimeCompare(obj2.targetTime, obj1.targetTime);
|
||||
}];
|
||||
SCLogGeneralInfo(@"[SCManagedVideoCapturerTimeObserver] Adding task: %@, task count: %lu", task,
|
||||
(unsigned long)_tasks.count);
|
||||
}
|
||||
|
||||
- (void)processTime:(CMTime)relativePresentationTime
|
||||
sessionStartTimeDelayInSecond:(CGFloat)sessionStartTimeDelayInSecond
|
||||
{
|
||||
_isProcessing = YES;
|
||||
SCTimedTask *last = _tasks.lastObject;
|
||||
while (last && last.task && CMTimeCompare(relativePresentationTime, last.targetTime) >= 0) {
|
||||
[_tasks removeLastObject];
|
||||
void (^task)(CMTime relativePresentationTime, CGFloat sessionStartTimeDelay) = last.task;
|
||||
last.task = nil;
|
||||
runOnMainThreadAsynchronously(^{
|
||||
task(relativePresentationTime, sessionStartTimeDelayInSecond);
|
||||
});
|
||||
last = _tasks.lastObject;
|
||||
}
|
||||
}
|
||||
|
||||
@end
|
26
ManagedCapturer/SCManagedVideoFileStreamer.h
Normal file
26
ManagedCapturer/SCManagedVideoFileStreamer.h
Normal file
@ -0,0 +1,26 @@
|
||||
//
|
||||
// SCManagedVideoFileStreamer.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Alexander Grytsiuk on 3/4/16.
|
||||
// Copyright © 2016 Snapchat, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#import <SCCameraFoundation/SCManagedVideoDataSource.h>
|
||||
|
||||
#import <AVFoundation/AVFoundation.h>
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
typedef void (^sc_managed_video_file_streamer_pixel_buffer_completion_handler_t)(CVPixelBufferRef pixelBuffer);
|
||||
|
||||
/**
|
||||
* SCManagedVideoFileStreamer reads a video file from provided NSURL to create
|
||||
* and publish video output frames. SCManagedVideoFileStreamer also conforms
|
||||
* to SCManagedVideoDataSource allowing chained consumption of video frames.
|
||||
*/
|
||||
@interface SCManagedVideoFileStreamer : NSObject <SCManagedVideoDataSource>
|
||||
|
||||
- (instancetype)initWithPlaybackForURL:(NSURL *)URL;
|
||||
- (void)getNextPixelBufferWithCompletion:(sc_managed_video_file_streamer_pixel_buffer_completion_handler_t)completion;
|
||||
|
||||
@end
|
299
ManagedCapturer/SCManagedVideoFileStreamer.m
Normal file
299
ManagedCapturer/SCManagedVideoFileStreamer.m
Normal file
@ -0,0 +1,299 @@
|
||||
//
|
||||
// SCManagedVideoFileStreamer.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Alexander Grytsiuk on 3/4/16.
|
||||
// Copyright © 2016 Snapchat, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#import "SCManagedVideoFileStreamer.h"
|
||||
|
||||
#import "SCManagedCapturePreviewLayerController.h"
|
||||
|
||||
#import <SCCameraFoundation/SCManagedVideoDataSourceListenerAnnouncer.h>
|
||||
#import <SCFoundation/SCLog.h>
|
||||
#import <SCFoundation/SCPlayer.h>
|
||||
#import <SCFoundation/SCQueuePerformer.h>
|
||||
#import <SCFoundation/SCTrace.h>
|
||||
|
||||
@import AVFoundation;
|
||||
@import CoreMedia;
|
||||
|
||||
static char *const kSCManagedVideoFileStreamerQueueLabel = "com.snapchat.managed-video-file-streamer";
|
||||
|
||||
@interface SCManagedVideoFileStreamer () <AVPlayerItemOutputPullDelegate>
|
||||
@end
|
||||
|
||||
@implementation SCManagedVideoFileStreamer {
|
||||
SCManagedVideoDataSourceListenerAnnouncer *_announcer;
|
||||
SCManagedCaptureDevicePosition _devicePosition;
|
||||
sc_managed_video_file_streamer_pixel_buffer_completion_handler_t _nextPixelBufferHandler;
|
||||
|
||||
id _notificationToken;
|
||||
id<SCPerforming> _performer;
|
||||
dispatch_semaphore_t _semaphore;
|
||||
|
||||
CADisplayLink *_displayLink;
|
||||
AVPlayerItemVideoOutput *_videoOutput;
|
||||
AVPlayer *_player;
|
||||
|
||||
BOOL _sampleBufferDisplayEnabled;
|
||||
id<SCManagedSampleBufferDisplayController> _sampleBufferDisplayController;
|
||||
}
|
||||
|
||||
@synthesize isStreaming = _isStreaming;
|
||||
@synthesize performer = _performer;
|
||||
@synthesize videoOrientation = _videoOrientation;
|
||||
|
||||
- (instancetype)initWithPlaybackForURL:(NSURL *)URL
|
||||
{
|
||||
SCTraceStart();
|
||||
self = [super init];
|
||||
if (self) {
|
||||
_videoOrientation = AVCaptureVideoOrientationLandscapeRight;
|
||||
_announcer = [[SCManagedVideoDataSourceListenerAnnouncer alloc] init];
|
||||
_semaphore = dispatch_semaphore_create(1);
|
||||
_performer = [[SCQueuePerformer alloc] initWithLabel:kSCManagedVideoFileStreamerQueueLabel
|
||||
qualityOfService:QOS_CLASS_UNSPECIFIED
|
||||
queueType:DISPATCH_QUEUE_SERIAL
|
||||
context:SCQueuePerformerContextStories];
|
||||
|
||||
// Setup CADisplayLink which will callback displayPixelBuffer: at every vsync.
|
||||
_displayLink = [CADisplayLink displayLinkWithTarget:self selector:@selector(displayLinkCallback:)];
|
||||
[_displayLink addToRunLoop:[NSRunLoop mainRunLoop] forMode:NSDefaultRunLoopMode];
|
||||
[_displayLink setPaused:YES];
|
||||
|
||||
// Prepare player
|
||||
_player = [[SCPlayer alloc] initWithPlayerDomain:SCPlayerDomainCameraFileStreamer URL:URL];
|
||||
#if TARGET_IPHONE_SIMULATOR
|
||||
_player.volume = 0.0;
|
||||
#endif
|
||||
// Configure output
|
||||
[self configureOutput];
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)addSampleBufferDisplayController:(id<SCManagedSampleBufferDisplayController>)sampleBufferDisplayController
|
||||
{
|
||||
_sampleBufferDisplayController = sampleBufferDisplayController;
|
||||
}
|
||||
|
||||
- (void)setSampleBufferDisplayEnabled:(BOOL)sampleBufferDisplayEnabled
|
||||
{
|
||||
_sampleBufferDisplayEnabled = sampleBufferDisplayEnabled;
|
||||
SCLogGeneralInfo(@"[SCManagedVideoFileStreamer] sampleBufferDisplayEnabled set to:%d", _sampleBufferDisplayEnabled);
|
||||
}
|
||||
|
||||
- (void)setKeepLateFrames:(BOOL)keepLateFrames
|
||||
{
|
||||
// Do nothing
|
||||
}
|
||||
|
||||
- (BOOL)getKeepLateFrames
|
||||
{
|
||||
// return default NO value
|
||||
return NO;
|
||||
}
|
||||
|
||||
- (void)waitUntilSampleBufferDisplayed:(dispatch_queue_t)queue completionHandler:(dispatch_block_t)completionHandler
|
||||
{
|
||||
SCAssert(queue, @"callback queue must be provided");
|
||||
SCAssert(completionHandler, @"completion handler must be provided");
|
||||
dispatch_async(queue, completionHandler);
|
||||
}
|
||||
|
||||
- (void)startStreaming
|
||||
{
|
||||
SCTraceStart();
|
||||
if (!_isStreaming) {
|
||||
_isStreaming = YES;
|
||||
[self addDidPlayToEndTimeNotificationForPlayerItem:_player.currentItem];
|
||||
[_player play];
|
||||
}
|
||||
}
|
||||
|
||||
- (void)stopStreaming
|
||||
{
|
||||
SCTraceStart();
|
||||
if (_isStreaming) {
|
||||
_isStreaming = NO;
|
||||
[_player pause];
|
||||
[self removePlayerObservers];
|
||||
}
|
||||
}
|
||||
|
||||
- (void)pauseStreaming
|
||||
{
|
||||
[self stopStreaming];
|
||||
}
|
||||
|
||||
- (void)addListener:(id<SCManagedVideoDataSourceListener>)listener
|
||||
{
|
||||
SCTraceStart();
|
||||
[_announcer addListener:listener];
|
||||
}
|
||||
|
||||
- (void)removeListener:(id<SCManagedVideoDataSourceListener>)listener
|
||||
{
|
||||
SCTraceStart();
|
||||
[_announcer removeListener:listener];
|
||||
}
|
||||
|
||||
- (void)setAsOutput:(AVCaptureSession *)session devicePosition:(SCManagedCaptureDevicePosition)devicePosition
|
||||
{
|
||||
_devicePosition = devicePosition;
|
||||
}
|
||||
|
||||
- (void)setDevicePosition:(SCManagedCaptureDevicePosition)devicePosition
|
||||
{
|
||||
_devicePosition = devicePosition;
|
||||
}
|
||||
|
||||
- (void)setVideoOrientation:(AVCaptureVideoOrientation)videoOrientation
|
||||
{
|
||||
_videoOrientation = videoOrientation;
|
||||
}
|
||||
|
||||
- (void)removeAsOutput:(AVCaptureSession *)session
|
||||
{
|
||||
// Ignored
|
||||
}
|
||||
|
||||
- (void)setVideoStabilizationEnabledIfSupported:(BOOL)videoStabilizationIfSupported
|
||||
{
|
||||
// Ignored
|
||||
}
|
||||
|
||||
- (void)beginConfiguration
|
||||
{
|
||||
// Ignored
|
||||
}
|
||||
|
||||
- (void)commitConfiguration
|
||||
{
|
||||
// Ignored
|
||||
}
|
||||
|
||||
- (void)setPortraitModePointOfInterest:(CGPoint)pointOfInterest
|
||||
{
|
||||
// Ignored
|
||||
}
|
||||
|
||||
#pragma mark - AVPlayerItemOutputPullDelegate
|
||||
|
||||
- (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender
|
||||
{
|
||||
if (![_videoOutput hasNewPixelBufferForItemTime:CMTimeMake(1, 10)]) {
|
||||
[self configureOutput];
|
||||
}
|
||||
[_displayLink setPaused:NO];
|
||||
}
|
||||
|
||||
#pragma mark - Internal
|
||||
|
||||
- (void)displayLinkCallback:(CADisplayLink *)sender
|
||||
{
|
||||
CFTimeInterval nextVSync = [sender timestamp] + [sender duration];
|
||||
|
||||
CMTime time = [_videoOutput itemTimeForHostTime:nextVSync];
|
||||
if (dispatch_semaphore_wait(_semaphore, DISPATCH_TIME_NOW) == 0) {
|
||||
[_performer perform:^{
|
||||
if ([_videoOutput hasNewPixelBufferForItemTime:time]) {
|
||||
CVPixelBufferRef pixelBuffer = [_videoOutput copyPixelBufferForItemTime:time itemTimeForDisplay:NULL];
|
||||
if (pixelBuffer != NULL) {
|
||||
if (_nextPixelBufferHandler) {
|
||||
_nextPixelBufferHandler(pixelBuffer);
|
||||
_nextPixelBufferHandler = nil;
|
||||
} else {
|
||||
CMSampleBufferRef sampleBuffer =
|
||||
[self createSampleBufferFromPixelBuffer:pixelBuffer
|
||||
presentationTime:CMTimeMake(CACurrentMediaTime() * 1000, 1000)];
|
||||
if (sampleBuffer) {
|
||||
if (_sampleBufferDisplayEnabled) {
|
||||
[_sampleBufferDisplayController enqueueSampleBuffer:sampleBuffer];
|
||||
}
|
||||
[_announcer managedVideoDataSource:self
|
||||
didOutputSampleBuffer:sampleBuffer
|
||||
devicePosition:_devicePosition];
|
||||
CFRelease(sampleBuffer);
|
||||
}
|
||||
}
|
||||
CVBufferRelease(pixelBuffer);
|
||||
}
|
||||
}
|
||||
dispatch_semaphore_signal(_semaphore);
|
||||
}];
|
||||
}
|
||||
}
|
||||
|
||||
- (CMSampleBufferRef)createSampleBufferFromPixelBuffer:(CVPixelBufferRef)pixelBuffer presentationTime:(CMTime)time
|
||||
{
|
||||
CMSampleBufferRef sampleBuffer = NULL;
|
||||
CMVideoFormatDescriptionRef formatDesc = NULL;
|
||||
|
||||
OSStatus err = CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, &formatDesc);
|
||||
if (err != noErr) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
CMSampleTimingInfo sampleTimingInfo = {kCMTimeInvalid, time, kCMTimeInvalid};
|
||||
CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, true, NULL, NULL, formatDesc,
|
||||
&sampleTimingInfo, &sampleBuffer);
|
||||
|
||||
CFRelease(formatDesc);
|
||||
|
||||
return sampleBuffer;
|
||||
}
|
||||
|
||||
- (void)configureOutput
|
||||
{
|
||||
// Remove old output
|
||||
if (_videoOutput) {
|
||||
[[_player currentItem] removeOutput:_videoOutput];
|
||||
}
|
||||
|
||||
// Setup AVPlayerItemVideoOutput with the required pixelbuffer attributes.
|
||||
_videoOutput = [[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:@{
|
||||
(id) kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
|
||||
}];
|
||||
_videoOutput.suppressesPlayerRendering = YES;
|
||||
[_videoOutput setDelegate:self queue:_performer.queue];
|
||||
|
||||
// Add new output
|
||||
[[_player currentItem] addOutput:_videoOutput];
|
||||
[_videoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:1.0 / 30.0];
|
||||
}
|
||||
|
||||
- (void)getNextPixelBufferWithCompletion:(sc_managed_video_file_streamer_pixel_buffer_completion_handler_t)completion
|
||||
{
|
||||
_nextPixelBufferHandler = completion;
|
||||
}
|
||||
|
||||
- (void)addDidPlayToEndTimeNotificationForPlayerItem:(AVPlayerItem *)item
|
||||
{
|
||||
if (_notificationToken) {
|
||||
_notificationToken = nil;
|
||||
}
|
||||
|
||||
_player.actionAtItemEnd = AVPlayerActionAtItemEndNone;
|
||||
_notificationToken =
|
||||
[[NSNotificationCenter defaultCenter] addObserverForName:AVPlayerItemDidPlayToEndTimeNotification
|
||||
object:item
|
||||
queue:[NSOperationQueue mainQueue]
|
||||
usingBlock:^(NSNotification *note) {
|
||||
[[_player currentItem] seekToTime:kCMTimeZero];
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)removePlayerObservers
|
||||
{
|
||||
if (_notificationToken) {
|
||||
[[NSNotificationCenter defaultCenter] removeObserver:_notificationToken
|
||||
name:AVPlayerItemDidPlayToEndTimeNotification
|
||||
object:_player.currentItem];
|
||||
_notificationToken = nil;
|
||||
}
|
||||
}
|
||||
|
||||
@end
|
22
ManagedCapturer/SCManagedVideoFrameSampler.h
Normal file
22
ManagedCapturer/SCManagedVideoFrameSampler.h
Normal file
@ -0,0 +1,22 @@
|
||||
//
|
||||
// SCManagedVideoFrameSampler.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Michel Loenngren on 3/10/17.
|
||||
// Copyright © 2017 Snapchat, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#import "SCManagedCapturerListener.h"
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
/**
|
||||
Allows consumer to register a block to sample the next CMSampleBufferRef and
|
||||
automatically leverages Core image to convert the pixel buffer to a UIImage.
|
||||
Returned image will be a copy.
|
||||
*/
|
||||
@interface SCManagedVideoFrameSampler : NSObject <SCManagedCapturerListener>
|
||||
|
||||
- (void)sampleNextFrame:(void (^)(UIImage *frame, CMTime presentationTime))completeBlock;
|
||||
|
||||
@end
|
65
ManagedCapturer/SCManagedVideoFrameSampler.m
Normal file
65
ManagedCapturer/SCManagedVideoFrameSampler.m
Normal file
@ -0,0 +1,65 @@
|
||||
//
|
||||
// SCManagedVideoFrameSampler.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Michel Loenngren on 3/10/17.
|
||||
// Copyright © 2017 Snapchat, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#import "SCManagedVideoFrameSampler.h"
|
||||
|
||||
#import <SCFoundation/SCThreadHelpers.h>
|
||||
#import <SCFoundation/UIImage+CVPixelBufferRef.h>
|
||||
|
||||
@import CoreImage;
|
||||
@import ImageIO;
|
||||
|
||||
@interface SCManagedVideoFrameSampler ()
|
||||
|
||||
@property (nonatomic, copy) void (^frameSampleBlock)(UIImage *, CMTime);
|
||||
@property (nonatomic, strong) CIContext *ciContext;
|
||||
|
||||
@end
|
||||
|
||||
@implementation SCManagedVideoFrameSampler
|
||||
|
||||
- (void)sampleNextFrame:(void (^)(UIImage *, CMTime))completeBlock
|
||||
{
|
||||
_frameSampleBlock = completeBlock;
|
||||
}
|
||||
|
||||
#pragma mark - SCManagedCapturerListener
|
||||
|
||||
- (void)managedCapturer:(id<SCCapturer>)managedCapturer
|
||||
didAppendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer
|
||||
sampleMetadata:(SCManagedCapturerSampleMetadata *)sampleMetadata
|
||||
{
|
||||
void (^block)(UIImage *, CMTime) = _frameSampleBlock;
|
||||
_frameSampleBlock = nil;
|
||||
|
||||
if (!block) {
|
||||
return;
|
||||
}
|
||||
|
||||
CVImageBufferRef cvImageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
|
||||
CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
|
||||
UIImage *image;
|
||||
if (cvImageBuffer) {
|
||||
CGImageRef cgImage = SCCreateCGImageFromPixelBufferRef(cvImageBuffer);
|
||||
image = [[UIImage alloc] initWithCGImage:cgImage scale:1.0 orientation:UIImageOrientationRight];
|
||||
CGImageRelease(cgImage);
|
||||
}
|
||||
runOnMainThreadAsynchronously(^{
|
||||
block(image, presentationTime);
|
||||
});
|
||||
}
|
||||
|
||||
- (CIContext *)ciContext
|
||||
{
|
||||
if (!_ciContext) {
|
||||
_ciContext = [CIContext context];
|
||||
}
|
||||
return _ciContext;
|
||||
}
|
||||
|
||||
@end
|
44
ManagedCapturer/SCManagedVideoNoSoundLogger.h
Normal file
44
ManagedCapturer/SCManagedVideoNoSoundLogger.h
Normal file
@ -0,0 +1,44 @@
|
||||
//
|
||||
// SCManagedVideoNoSoundLogger.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Pinlin Chen on 15/07/2017.
|
||||
//
|
||||
//
|
||||
|
||||
#import <SCBase/SCMacros.h>
|
||||
|
||||
#import <AVFoundation/AVFoundation.h>
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
@protocol SCManiphestTicketCreator;
|
||||
|
||||
@interface SCManagedVideoNoSoundLogger : NSObject
|
||||
|
||||
@property (nonatomic, strong) NSError *audioSessionError;
|
||||
@property (nonatomic, strong) NSError *audioQueueError;
|
||||
@property (nonatomic, strong) NSError *assetWriterError;
|
||||
@property (nonatomic, assign) BOOL retryAudioQueueSuccess;
|
||||
@property (nonatomic, assign) BOOL retryAudioQueueSuccessSetDataSource;
|
||||
@property (nonatomic, strong) NSString *brokenMicCodeType;
|
||||
@property (nonatomic, assign) BOOL lenseActiveWhileRecording;
|
||||
@property (nonatomic, strong) NSString *activeLensId;
|
||||
@property (nonatomic, assign) CMTime firstWrittenAudioBufferDelay;
|
||||
@property (nonatomic, assign) BOOL audioQueueStarted;
|
||||
|
||||
SC_INIT_AND_NEW_UNAVAILABLE
|
||||
- (instancetype)initWithTicketCreator:(id<SCManiphestTicketCreator>)ticketCreator;
|
||||
|
||||
/* Use to counting how many no sound issue we have fixed */
|
||||
// Call at the place where we have fixed the AVPlayer leak before
|
||||
+ (void)startCountingVideoNoSoundHaveBeenFixed;
|
||||
|
||||
/* Use to report the detail of new no sound issue */
|
||||
// Reset all the properties of recording error
|
||||
- (void)resetAll;
|
||||
// Log if the audio track is empty
|
||||
- (void)checkVideoFileAndLogIfNeeded:(NSURL *)videoURL;
|
||||
// called by AVCameraViewController when lense resume audio
|
||||
- (void)managedLensesProcessorDidCallResumeAllSounds;
|
||||
|
||||
@end
|
283
ManagedCapturer/SCManagedVideoNoSoundLogger.m
Normal file
283
ManagedCapturer/SCManagedVideoNoSoundLogger.m
Normal file
@ -0,0 +1,283 @@
|
||||
//
|
||||
// SCManagedVideoNoSoundLogger.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Pinlin Chen on 15/07/2017.
|
||||
//
|
||||
//
|
||||
|
||||
#import "SCManagedVideoNoSoundLogger.h"
|
||||
|
||||
#import "SCManagedCapturer.h"
|
||||
#import "SCManiphestTicketCreator.h"
|
||||
|
||||
#import <SCAudio/SCAudioSession+Debug.h>
|
||||
#import <SCAudio/SCAudioSession.h>
|
||||
#import <SCFoundation/NSString+Helpers.h>
|
||||
#import <SCFoundation/SCLog.h>
|
||||
#import <SCFoundation/SCLogHelper.h>
|
||||
#import <SCFoundation/SCThreadHelpers.h>
|
||||
#import <SCFoundation/SCUUID.h>
|
||||
#import <SCLogger/SCCameraMetrics.h>
|
||||
#import <SCLogger/SCLogger.h>
|
||||
|
||||
@import AVFoundation;
|
||||
|
||||
static BOOL s_startCountingVideoNoSoundFixed;
|
||||
// Count the number of no sound errors for an App session
|
||||
static NSUInteger s_noSoundCaseCount = 0;
|
||||
|
||||
@interface SCManagedVideoNoSoundLogger () {
|
||||
BOOL _isAudioSessionDeactivated;
|
||||
int _lenseResumeCount;
|
||||
}
|
||||
|
||||
@property (nonatomic) id<SCManiphestTicketCreator> ticketCreator;
|
||||
|
||||
@end
|
||||
|
||||
@implementation SCManagedVideoNoSoundLogger
|
||||
|
||||
- (instancetype)initWithTicketCreator:(id<SCManiphestTicketCreator>)ticketCreator
|
||||
{
|
||||
if (self = [super init]) {
|
||||
_ticketCreator = ticketCreator;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
+ (NSUInteger)noSoundCount
|
||||
{
|
||||
return s_noSoundCaseCount;
|
||||
}
|
||||
|
||||
+ (void)increaseNoSoundCount
|
||||
{
|
||||
s_noSoundCaseCount += 1;
|
||||
}
|
||||
|
||||
+ (void)startCountingVideoNoSoundHaveBeenFixed
|
||||
{
|
||||
static dispatch_once_t onceToken;
|
||||
dispatch_once(&onceToken, ^{
|
||||
s_startCountingVideoNoSoundFixed = YES;
|
||||
SCLogGeneralInfo(@"start counting video no sound have been fixed");
|
||||
});
|
||||
}
|
||||
|
||||
+ (NSString *)appSessionIdForNoSound
|
||||
{
|
||||
static dispatch_once_t onceToken;
|
||||
static NSString *s_AppSessionIdForNoSound = @"SCDefaultSession";
|
||||
dispatch_once(&onceToken, ^{
|
||||
s_AppSessionIdForNoSound = SCUUID();
|
||||
});
|
||||
return s_AppSessionIdForNoSound;
|
||||
}
|
||||
|
||||
+ (void)logVideoNoSoundHaveBeenFixedIfNeeded
|
||||
{
|
||||
if (s_startCountingVideoNoSoundFixed) {
|
||||
[[SCLogger sharedInstance] logUnsampledEvent:kSCCameraMetricsVideoNoSoundError
|
||||
parameters:@{
|
||||
@"have_been_fixed" : @"true",
|
||||
@"fixed_type" : @"player_leak",
|
||||
@"asset_writer_success" : @"true",
|
||||
@"audio_session_success" : @"true",
|
||||
@"audio_queue_success" : @"true",
|
||||
}
|
||||
secretParameters:nil
|
||||
metrics:nil];
|
||||
}
|
||||
}
|
||||
|
||||
+ (void)logAudioSessionCategoryHaveBeenFixed
|
||||
{
|
||||
[[SCLogger sharedInstance] logUnsampledEvent:kSCCameraMetricsVideoNoSoundError
|
||||
parameters:@{
|
||||
@"have_been_fixed" : @"true",
|
||||
@"fixed_type" : @"audio_session_category_mismatch",
|
||||
@"asset_writer_success" : @"true",
|
||||
@"audio_session_success" : @"true",
|
||||
@"audio_queue_success" : @"true",
|
||||
}
|
||||
secretParameters:nil
|
||||
metrics:nil];
|
||||
}
|
||||
|
||||
+ (void)logAudioSessionBrokenMicHaveBeenFixed:(NSString *)type
|
||||
{
|
||||
[[SCLogger sharedInstance]
|
||||
logUnsampledEvent:kSCCameraMetricsVideoNoSoundError
|
||||
parameters:@{
|
||||
@"have_been_fixed" : @"true",
|
||||
@"fixed_type" : @"broken_microphone",
|
||||
@"asset_writer_success" : @"true",
|
||||
@"audio_session_success" : @"true",
|
||||
@"audio_queue_success" : @"true",
|
||||
@"mic_broken_type" : SC_NULL_STRING_IF_NIL(type),
|
||||
@"audio_session_debug_info" :
|
||||
[SCAudioSession sharedInstance].lastRecordingRequestDebugInfo ?: @"(null)",
|
||||
}
|
||||
secretParameters:nil
|
||||
metrics:nil];
|
||||
}
|
||||
|
||||
- (instancetype)init
|
||||
{
|
||||
if (self = [super init]) {
|
||||
[[NSNotificationCenter defaultCenter] addObserver:self
|
||||
selector:@selector(_audioSessionWillDeactivate)
|
||||
name:SCAudioSessionWillDeactivateNotification
|
||||
object:nil];
|
||||
[[NSNotificationCenter defaultCenter] addObserver:self
|
||||
selector:@selector(_audioSessionDidActivate)
|
||||
name:SCAudioSessionActivatedNotification
|
||||
object:nil];
|
||||
_firstWrittenAudioBufferDelay = kCMTimeInvalid;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)resetAll
|
||||
{
|
||||
_audioQueueError = nil;
|
||||
_audioSessionError = nil;
|
||||
_assetWriterError = nil;
|
||||
_retryAudioQueueSuccess = NO;
|
||||
_retryAudioQueueSuccessSetDataSource = NO;
|
||||
_brokenMicCodeType = nil;
|
||||
_lenseActiveWhileRecording = NO;
|
||||
_lenseResumeCount = 0;
|
||||
_activeLensId = nil;
|
||||
self.firstWrittenAudioBufferDelay = kCMTimeInvalid;
|
||||
}
|
||||
|
||||
- (void)checkVideoFileAndLogIfNeeded:(NSURL *)videoURL
|
||||
{
|
||||
AVURLAsset *asset = [AVURLAsset assetWithURL:videoURL];
|
||||
|
||||
__block BOOL hasAudioTrack = ([asset tracksWithMediaType:AVMediaTypeAudio].count > 0);
|
||||
|
||||
dispatch_block_t block = ^{
|
||||
|
||||
// Log no audio issues have been fixed
|
||||
if (hasAudioTrack) {
|
||||
if (_retryAudioQueueSuccess) {
|
||||
[SCManagedVideoNoSoundLogger logAudioSessionCategoryHaveBeenFixed];
|
||||
} else if (_retryAudioQueueSuccessSetDataSource) {
|
||||
[SCManagedVideoNoSoundLogger logAudioSessionBrokenMicHaveBeenFixed:_brokenMicCodeType];
|
||||
} else {
|
||||
[SCManagedVideoNoSoundLogger logVideoNoSoundHaveBeenFixedIfNeeded];
|
||||
}
|
||||
} else {
|
||||
// Log no audio issues caused by no permission into "wont_fixed_type", won't show in Grafana
|
||||
BOOL isPermissonGranted =
|
||||
[[SCAudioSession sharedInstance] recordPermission] == AVAudioSessionRecordPermissionGranted;
|
||||
if (!isPermissonGranted) {
|
||||
[SCManagedVideoNoSoundLogger increaseNoSoundCount];
|
||||
[[SCLogger sharedInstance]
|
||||
logUnsampledEvent:kSCCameraMetricsVideoNoSoundError
|
||||
parameters:@{
|
||||
@"wont_fix_type" : @"no_permission",
|
||||
@"no_sound_count" :
|
||||
[@([SCManagedVideoNoSoundLogger noSoundCount]) stringValue] ?: @"(null)",
|
||||
@"session_id" : [SCManagedVideoNoSoundLogger appSessionIdForNoSound] ?: @"(null)"
|
||||
}
|
||||
secretParameters:nil
|
||||
metrics:nil];
|
||||
|
||||
}
|
||||
// Log no audio issues caused by microphone occupied into "wont_fixed_type", for example Phone Call,
|
||||
// It won't show in Grafana
|
||||
// TODO: maybe we should prompt the user of these errors in the future
|
||||
else if (_audioSessionError.code == AVAudioSessionErrorInsufficientPriority ||
|
||||
_audioQueueError.code == AVAudioSessionErrorInsufficientPriority) {
|
||||
NSDictionary *parameters = @{
|
||||
@"wont_fix_type" : @"microphone_in_use",
|
||||
@"asset_writer_error" : _assetWriterError ? [_assetWriterError description] : @"(null)",
|
||||
@"audio_session_error" : _audioSessionError.userInfo ?: @"(null)",
|
||||
@"audio_queue_error" : _audioQueueError.userInfo ?: @"(null)",
|
||||
@"audio_session_deactivated" : _isAudioSessionDeactivated ? @"true" : @"false",
|
||||
@"audio_session_debug_info" :
|
||||
[SCAudioSession sharedInstance].lastRecordingRequestDebugInfo ?: @"(null)",
|
||||
@"no_sound_count" : [@([SCManagedVideoNoSoundLogger noSoundCount]) stringValue] ?: @"(null)",
|
||||
@"session_id" : [SCManagedVideoNoSoundLogger appSessionIdForNoSound] ?: @"(null)"
|
||||
};
|
||||
|
||||
[SCManagedVideoNoSoundLogger increaseNoSoundCount];
|
||||
[[SCLogger sharedInstance] logUnsampledEvent:kSCCameraMetricsVideoNoSoundError
|
||||
parameters:parameters
|
||||
secretParameters:nil
|
||||
metrics:nil];
|
||||
[_ticketCreator createAndFileBetaReport:JSONStringSerializeObjectForLogging(parameters)];
|
||||
} else {
|
||||
// Log other new no audio issues, use "have_been_fixed=false" to show in Grafana
|
||||
NSDictionary *parameters = @{
|
||||
@"have_been_fixed" : @"false",
|
||||
@"asset_writer_error" : _assetWriterError ? [_assetWriterError description] : @"(null)",
|
||||
@"audio_session_error" : _audioSessionError.userInfo ?: @"(null)",
|
||||
@"audio_queue_error" : _audioQueueError.userInfo ?: @"(null)",
|
||||
@"asset_writer_success" : [NSString stringWithBool:_assetWriterError == nil],
|
||||
@"audio_session_success" : [NSString stringWithBool:_audioSessionError == nil],
|
||||
@"audio_queue_success" : [NSString stringWithBool:_audioQueueError == nil],
|
||||
@"audio_session_deactivated" : _isAudioSessionDeactivated ? @"true" : @"false",
|
||||
@"video_duration" : [NSString sc_stringWithFormat:@"%f", CMTimeGetSeconds(asset.duration)],
|
||||
@"is_audio_session_nil" :
|
||||
[[SCAudioSession sharedInstance] noSoundCheckAudioSessionIsNil] ? @"true" : @"false",
|
||||
@"lenses_active" : [NSString stringWithBool:self.lenseActiveWhileRecording],
|
||||
@"active_lense_id" : self.activeLensId ?: @"(null)",
|
||||
@"lense_audio_resume_count" : @(_lenseResumeCount),
|
||||
@"first_audio_buffer_delay" :
|
||||
[NSString sc_stringWithFormat:@"%f", CMTimeGetSeconds(self.firstWrittenAudioBufferDelay)],
|
||||
@"audio_session_debug_info" :
|
||||
[SCAudioSession sharedInstance].lastRecordingRequestDebugInfo ?: @"(null)",
|
||||
@"audio_queue_started" : [NSString stringWithBool:_audioQueueStarted],
|
||||
@"no_sound_count" : [@([SCManagedVideoNoSoundLogger noSoundCount]) stringValue] ?: @"(null)",
|
||||
@"session_id" : [SCManagedVideoNoSoundLogger appSessionIdForNoSound] ?: @"(null)"
|
||||
};
|
||||
[SCManagedVideoNoSoundLogger increaseNoSoundCount];
|
||||
[[SCLogger sharedInstance] logUnsampledEvent:kSCCameraMetricsVideoNoSoundError
|
||||
parameters:parameters
|
||||
secretParameters:nil
|
||||
metrics:nil];
|
||||
[_ticketCreator createAndFileBetaReport:JSONStringSerializeObjectForLogging(parameters)];
|
||||
}
|
||||
}
|
||||
};
|
||||
if (hasAudioTrack) {
|
||||
block();
|
||||
} else {
|
||||
// Wait for all tracks to be loaded, in case of error counting the metric
|
||||
[asset loadValuesAsynchronouslyForKeys:@[ @"tracks" ]
|
||||
completionHandler:^{
|
||||
// Return when the tracks couldn't be loaded
|
||||
NSError *error = nil;
|
||||
if ([asset statusOfValueForKey:@"tracks" error:&error] != AVKeyValueStatusLoaded ||
|
||||
error != nil) {
|
||||
return;
|
||||
}
|
||||
|
||||
// check audio track again
|
||||
hasAudioTrack = ([asset tracksWithMediaType:AVMediaTypeAudio].count > 0);
|
||||
runOnMainThreadAsynchronously(block);
|
||||
}];
|
||||
}
|
||||
}
|
||||
|
||||
- (void)_audioSessionWillDeactivate
|
||||
{
|
||||
_isAudioSessionDeactivated = YES;
|
||||
}
|
||||
|
||||
- (void)_audioSessionDidActivate
|
||||
{
|
||||
_isAudioSessionDeactivated = NO;
|
||||
}
|
||||
|
||||
- (void)managedLensesProcessorDidCallResumeAllSounds
|
||||
{
|
||||
_lenseResumeCount += 1;
|
||||
}
|
||||
|
||||
@end
|
35
ManagedCapturer/SCManagedVideoScanner.h
Normal file
35
ManagedCapturer/SCManagedVideoScanner.h
Normal file
@ -0,0 +1,35 @@
|
||||
//
|
||||
// SCManagedVideoScanner.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Liu Liu on 5/5/15.
|
||||
// Copyright (c) 2015 Snapchat, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#import "SCManagedCapturer.h"
|
||||
#import "SCManagedDeviceCapacityAnalyzerListener.h"
|
||||
|
||||
#import <SCCameraFoundation/SCManagedVideoDataSourceListener.h>
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
@class SCScanConfiguration;
|
||||
|
||||
@interface SCManagedVideoScanner : NSObject <SCManagedVideoDataSourceListener, SCManagedDeviceCapacityAnalyzerListener>
|
||||
|
||||
/**
|
||||
* Calling this method to start scan, scan will automatically stop when a snapcode detected
|
||||
*/
|
||||
- (void)startScanAsynchronouslyWithScanConfiguration:(SCScanConfiguration *)configuration;
|
||||
|
||||
/**
|
||||
* Calling this method to stop scan immediately (it is still possible that a successful scan can happen after this is
|
||||
* called)
|
||||
*/
|
||||
- (void)stopScanAsynchronously;
|
||||
|
||||
- (instancetype)initWithMaxFrameDefaultDuration:(NSTimeInterval)maxFrameDefaultDuration
|
||||
maxFramePassiveDuration:(NSTimeInterval)maxFramePassiveDuration
|
||||
restCycle:(float)restCycle;
|
||||
|
||||
@end
|
299
ManagedCapturer/SCManagedVideoScanner.m
Normal file
299
ManagedCapturer/SCManagedVideoScanner.m
Normal file
@ -0,0 +1,299 @@
|
||||
//
|
||||
// SCManagedVideoScanner.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Liu Liu on 5/5/15.
|
||||
// Copyright (c) 2015 Snapchat, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#import "SCManagedVideoScanner.h"
|
||||
|
||||
#import "SCScanConfiguration.h"
|
||||
|
||||
#import <SCFeatureSettings/SCFeatureSettingsManager+Property.h>
|
||||
#import <SCFoundation/NSData+Base64.h>
|
||||
#import <SCFoundation/NSString+SCFormat.h>
|
||||
#import <SCFoundation/SCAssertWrapper.h>
|
||||
#import <SCFoundation/SCLog.h>
|
||||
#import <SCFoundation/SCQueuePerformer.h>
|
||||
#import <SCFoundation/SCThreadHelpers.h>
|
||||
#import <SCFoundation/SCTrace.h>
|
||||
#import <SCFoundation/UIDevice+Filter.h>
|
||||
#import <SCLogger/SCLogger.h>
|
||||
#import <SCScanTweaks/SCScanTweaks.h>
|
||||
#import <SCScanner/SCMachineReadableCodeResult.h>
|
||||
#import <SCScanner/SCSnapScanner.h>
|
||||
#import <SCVisualProductSearchTweaks/SCVisualProductSearchTweaks.h>
|
||||
|
||||
// In seconds
|
||||
static NSTimeInterval const kDefaultScanTimeout = 60;
|
||||
|
||||
static const char *kSCManagedVideoScannerQueueLabel = "com.snapchat.scvideoscanningcapturechannel.video.snapcode-scan";
|
||||
|
||||
@interface SCManagedVideoScanner ()
|
||||
|
||||
@end
|
||||
|
||||
@implementation SCManagedVideoScanner {
|
||||
SCSnapScanner *_snapScanner;
|
||||
dispatch_semaphore_t _activeSemaphore;
|
||||
NSTimeInterval _maxFrameDuration; // Used to restrict how many frames the scanner processes
|
||||
NSTimeInterval _maxFrameDefaultDuration;
|
||||
NSTimeInterval _maxFramePassiveDuration;
|
||||
float _restCycleOfBusyCycle;
|
||||
NSTimeInterval _scanStartTime;
|
||||
BOOL _active;
|
||||
BOOL _shouldEmitEvent;
|
||||
dispatch_block_t _completionHandler;
|
||||
NSTimeInterval _scanTimeout;
|
||||
SCManagedCaptureDevicePosition _devicePosition;
|
||||
SCQueuePerformer *_performer;
|
||||
BOOL _adjustingFocus;
|
||||
NSArray *_codeTypes;
|
||||
NSArray *_codeTypesOld;
|
||||
sc_managed_capturer_scan_results_handler_t _scanResultsHandler;
|
||||
|
||||
SCUserSession *_userSession;
|
||||
}
|
||||
|
||||
- (instancetype)initWithMaxFrameDefaultDuration:(NSTimeInterval)maxFrameDefaultDuration
|
||||
maxFramePassiveDuration:(NSTimeInterval)maxFramePassiveDuration
|
||||
restCycle:(float)restCycle
|
||||
{
|
||||
SCTraceStart();
|
||||
self = [super init];
|
||||
if (self) {
|
||||
_snapScanner = [SCSnapScanner sharedInstance];
|
||||
_performer = [[SCQueuePerformer alloc] initWithLabel:kSCManagedVideoScannerQueueLabel
|
||||
qualityOfService:QOS_CLASS_UNSPECIFIED
|
||||
queueType:DISPATCH_QUEUE_SERIAL
|
||||
context:SCQueuePerformerContextCamera];
|
||||
_activeSemaphore = dispatch_semaphore_create(0);
|
||||
SCAssert(restCycle >= 0 && restCycle < 1, @"rest cycle should be between 0 to 1");
|
||||
_maxFrameDefaultDuration = maxFrameDefaultDuration;
|
||||
_maxFramePassiveDuration = maxFramePassiveDuration;
|
||||
_restCycleOfBusyCycle = restCycle / (1 - restCycle); // Give CPU time to rest
|
||||
}
|
||||
return self;
|
||||
}
|
||||
#pragma mark - Public methods
|
||||
|
||||
- (void)startScanAsynchronouslyWithScanConfiguration:(SCScanConfiguration *)configuration
|
||||
{
|
||||
SCTraceStart();
|
||||
[_performer perform:^{
|
||||
_shouldEmitEvent = YES;
|
||||
_completionHandler = nil;
|
||||
_scanResultsHandler = configuration.scanResultsHandler;
|
||||
_userSession = configuration.userSession;
|
||||
_scanTimeout = kDefaultScanTimeout;
|
||||
_maxFrameDuration = _maxFrameDefaultDuration;
|
||||
_codeTypes = [self _scanCodeTypes];
|
||||
_codeTypesOld = @[ @(SCCodeTypeSnapcode18x18Old), @(SCCodeTypeQRCode) ];
|
||||
|
||||
SCTraceStart();
|
||||
// Set the scan start time properly, if we call startScan multiple times while it is active,
|
||||
// This makes sure we can scan long enough.
|
||||
_scanStartTime = CACurrentMediaTime();
|
||||
// we are not active, need to send the semaphore to start the scan
|
||||
if (!_active) {
|
||||
_active = YES;
|
||||
|
||||
// Signal the semaphore that we can start scan!
|
||||
dispatch_semaphore_signal(_activeSemaphore);
|
||||
}
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)stopScanAsynchronously
|
||||
{
|
||||
SCTraceStart();
|
||||
[_performer perform:^{
|
||||
SCTraceStart();
|
||||
if (_active) {
|
||||
SCLogScanDebug(@"VideoScanner:stopScanAsynchronously turn off from active");
|
||||
_active = NO;
|
||||
_scanStartTime = 0;
|
||||
_scanResultsHandler = nil;
|
||||
_userSession = nil;
|
||||
} else {
|
||||
SCLogScanDebug(@"VideoScanner:stopScanAsynchronously off already");
|
||||
}
|
||||
}];
|
||||
}
|
||||
|
||||
#pragma mark - Private Methods
|
||||
|
||||
- (void)_handleSnapScanResult:(SCSnapScannedData *)scannedData
|
||||
{
|
||||
if (scannedData.hasScannedData) {
|
||||
if (scannedData.codeType == SCCodeTypeSnapcode18x18 || scannedData.codeType == SCCodeTypeSnapcodeBitmoji ||
|
||||
scannedData.codeType == SCCodeTypeSnapcode18x18Old) {
|
||||
NSString *data = [scannedData.rawData base64EncodedString];
|
||||
NSString *version = [NSString sc_stringWithFormat:@"%i", scannedData.codeTypeMeta];
|
||||
[[SCLogger sharedInstance] logEvent:@"SNAPCODE_18x18_SCANNED_FROM_CAMERA"
|
||||
parameters:@{
|
||||
@"version" : version
|
||||
}
|
||||
secretParameters:@{
|
||||
@"data" : data
|
||||
}];
|
||||
|
||||
if (_completionHandler != nil) {
|
||||
runOnMainThreadAsynchronously(_completionHandler);
|
||||
_completionHandler = nil;
|
||||
}
|
||||
} else if (scannedData.codeType == SCCodeTypeBarcode) {
|
||||
if (!_userSession || !_userSession.featureSettingsManager.barCodeScanEnabled) {
|
||||
return;
|
||||
}
|
||||
NSString *data = scannedData.data;
|
||||
NSString *type = [SCSnapScannedData stringFromBarcodeType:scannedData.codeTypeMeta];
|
||||
[[SCLogger sharedInstance] logEvent:@"BARCODE_SCANNED_FROM_CAMERA"
|
||||
parameters:@{
|
||||
@"type" : type
|
||||
}
|
||||
secretParameters:@{
|
||||
@"data" : data
|
||||
}];
|
||||
} else if (scannedData.codeType == SCCodeTypeQRCode) {
|
||||
if (!_userSession || !_userSession.featureSettingsManager.qrCodeScanEnabled) {
|
||||
return;
|
||||
}
|
||||
NSURL *url = [NSURL URLWithString:scannedData.data];
|
||||
[[SCLogger sharedInstance] logEvent:@"QR_CODE_SCANNED_FROM_CAMERA"
|
||||
parameters:@{
|
||||
@"type" : (url) ? @"url" : @"other"
|
||||
}
|
||||
secretParameters:@{}];
|
||||
}
|
||||
|
||||
if (_shouldEmitEvent) {
|
||||
sc_managed_capturer_scan_results_handler_t scanResultsHandler = _scanResultsHandler;
|
||||
runOnMainThreadAsynchronously(^{
|
||||
if (scanResultsHandler != nil && scannedData) {
|
||||
SCMachineReadableCodeResult *result =
|
||||
[SCMachineReadableCodeResult machineReadableCodeResultWithScannedData:scannedData];
|
||||
scanResultsHandler(result);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
- (NSArray *)_scanCodeTypes
|
||||
{
|
||||
// Scan types are defined by codetypes. SnapScan will scan the frame based on codetype.
|
||||
NSMutableArray *codeTypes = [[NSMutableArray alloc]
|
||||
initWithObjects:@(SCCodeTypeSnapcode18x18), @(SCCodeTypeQRCode), @(SCCodeTypeSnapcodeBitmoji), nil];
|
||||
if (SCSearchEnableBarcodeProductSearch()) {
|
||||
[codeTypes addObject:@(SCCodeTypeBarcode)];
|
||||
}
|
||||
return [codeTypes copy];
|
||||
}
|
||||
|
||||
#pragma mark - SCManagedVideoDataSourceListener
|
||||
|
||||
- (void)managedVideoDataSource:(id<SCManagedVideoDataSource>)managedVideoDataSource
|
||||
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
|
||||
devicePosition:(SCManagedCaptureDevicePosition)devicePosition
|
||||
{
|
||||
SCTraceStart();
|
||||
_devicePosition = devicePosition;
|
||||
|
||||
if (!_active) {
|
||||
SCLogScanDebug(@"VideoScanner: Scanner is not active");
|
||||
return;
|
||||
}
|
||||
SCLogScanDebug(@"VideoScanner: Scanner is active");
|
||||
|
||||
// If we have the semaphore now, enqueue a new buffer, otherwise drop the buffer
|
||||
if (dispatch_semaphore_wait(_activeSemaphore, DISPATCH_TIME_NOW) == 0) {
|
||||
CFRetain(sampleBuffer);
|
||||
NSTimeInterval startTime = CACurrentMediaTime();
|
||||
[_performer perform:^{
|
||||
SCTraceStart();
|
||||
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
|
||||
SCLogScanInfo(@"VideoScanner: Scanner will scan a frame");
|
||||
SCSnapScannedData *scannedData;
|
||||
|
||||
SCLogScanInfo(@"VideoScanner:Use new scanner without false alarm check");
|
||||
scannedData = [_snapScanner scanPixelBuffer:pixelBuffer forCodeTypes:_codeTypes];
|
||||
|
||||
if ([UIDevice shouldLogPerfEvents]) {
|
||||
NSInteger loadingMs = (CACurrentMediaTime() - startTime) * 1000;
|
||||
// Since there are too many unsuccessful scans, we will only log 1/10 of them for now.
|
||||
if (scannedData.hasScannedData || (!scannedData.hasScannedData && arc4random() % 10 == 0)) {
|
||||
[[SCLogger sharedInstance] logEvent:@"SCAN_SINGLE_FRAME"
|
||||
parameters:@{
|
||||
@"time_span" : @(loadingMs),
|
||||
@"has_scanned_data" : @(scannedData.hasScannedData),
|
||||
}];
|
||||
}
|
||||
}
|
||||
|
||||
[self _handleSnapScanResult:scannedData];
|
||||
// If it is not turned off, we will continue to scan if result is not presetn
|
||||
if (_active) {
|
||||
_active = !scannedData.hasScannedData;
|
||||
}
|
||||
|
||||
// Clean up if result is reported for scan
|
||||
if (!_active) {
|
||||
_scanResultsHandler = nil;
|
||||
_completionHandler = nil;
|
||||
}
|
||||
|
||||
CFRelease(sampleBuffer);
|
||||
|
||||
NSTimeInterval currentTime = CACurrentMediaTime();
|
||||
SCLogScanInfo(@"VideoScanner:Scan time %f maxFrameDuration:%f timeout:%f", currentTime - startTime,
|
||||
_maxFrameDuration, _scanTimeout);
|
||||
// Haven't found the scanned data yet, haven't reached maximum scan timeout yet, haven't turned this off
|
||||
// yet, ready for the next frame
|
||||
if (_active && currentTime < _scanStartTime + _scanTimeout) {
|
||||
// We've finished processing current sample buffer, ready for next one, but before that, we need to rest
|
||||
// a bit (if possible)
|
||||
if (currentTime - startTime >= _maxFrameDuration && _restCycleOfBusyCycle < FLT_MIN) {
|
||||
// If we already reached deadline (used too much time) and don't want to rest CPU, give the signal
|
||||
// now to grab the next frame
|
||||
SCLogScanInfo(@"VideoScanner:Signal to get next frame for snapcode scanner");
|
||||
dispatch_semaphore_signal(_activeSemaphore);
|
||||
} else {
|
||||
NSTimeInterval afterTime = MAX((currentTime - startTime) * _restCycleOfBusyCycle,
|
||||
_maxFrameDuration - (currentTime - startTime));
|
||||
// If we need to wait more than 0 second, then do that, otherwise grab the next frame immediately
|
||||
if (afterTime > 0) {
|
||||
[_performer perform:^{
|
||||
SCLogScanInfo(
|
||||
@"VideoScanner:Waited and now signaling to get next frame for snapcode scanner");
|
||||
dispatch_semaphore_signal(_activeSemaphore);
|
||||
}
|
||||
after:afterTime];
|
||||
} else {
|
||||
SCLogScanInfo(@"VideoScanner:Now signaling to get next frame for snapcode scanner");
|
||||
dispatch_semaphore_signal(_activeSemaphore);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// We are not active, and not going to be active any more.
|
||||
SCLogScanInfo(@"VideoScanner:not active anymore");
|
||||
_active = NO;
|
||||
_scanResultsHandler = nil;
|
||||
_completionHandler = nil;
|
||||
}
|
||||
}];
|
||||
}
|
||||
}
|
||||
|
||||
#pragma mark - SCManagedDeviceCapacityAnalyzerListener
|
||||
|
||||
- (void)managedDeviceCapacityAnalyzer:(SCManagedDeviceCapacityAnalyzer *)managedDeviceCapacityAnalyzer
|
||||
didChangeAdjustingFocus:(BOOL)adjustingFocus
|
||||
{
|
||||
[_performer perform:^{
|
||||
_adjustingFocus = adjustingFocus;
|
||||
}];
|
||||
}
|
||||
|
||||
@end
|
15
ManagedCapturer/SCManagedVideoStreamReporter.h
Normal file
15
ManagedCapturer/SCManagedVideoStreamReporter.h
Normal file
@ -0,0 +1,15 @@
|
||||
//
|
||||
// SCManagedVideoStreamReporter.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Liu Liu on 5/16/15.
|
||||
// Copyright (c) 2015 Snapchat, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#import <SCCameraFoundation/SCManagedVideoDataSourceListener.h>
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
@interface SCManagedVideoStreamReporter : NSObject <SCManagedVideoDataSourceListener>
|
||||
|
||||
@end
|
58
ManagedCapturer/SCManagedVideoStreamReporter.m
Normal file
58
ManagedCapturer/SCManagedVideoStreamReporter.m
Normal file
@ -0,0 +1,58 @@
|
||||
//
|
||||
// SCManagedVideoStreamReporter.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Liu Liu on 5/16/15.
|
||||
// Copyright (c) 2015 Snapchat, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#import "SCManagedVideoStreamReporter.h"
|
||||
|
||||
#import <SCFoundation/SCLog.h>
|
||||
#import <SCLogger/SCLogger.h>
|
||||
|
||||
static NSTimeInterval const SCManagedVideoStreamReporterInterval = 10;
|
||||
|
||||
@implementation SCManagedVideoStreamReporter {
|
||||
NSUInteger _droppedSampleBuffers;
|
||||
NSUInteger _outputSampleBuffers;
|
||||
NSTimeInterval _lastReportTime;
|
||||
}
|
||||
|
||||
- (instancetype)init
|
||||
{
|
||||
self = [super init];
|
||||
if (self) {
|
||||
_lastReportTime = CACurrentMediaTime();
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)_reportIfNeeded
|
||||
{
|
||||
NSTimeInterval currentTime = CACurrentMediaTime();
|
||||
if (currentTime - _lastReportTime > SCManagedVideoStreamReporterInterval) {
|
||||
SCLogGeneralInfo(@"Time: (%.3f - %.3f], Video Streamer Dropped %tu, Output %tu", _lastReportTime, currentTime,
|
||||
_droppedSampleBuffers, _outputSampleBuffers);
|
||||
_droppedSampleBuffers = _outputSampleBuffers = 0;
|
||||
_lastReportTime = currentTime;
|
||||
}
|
||||
}
|
||||
|
||||
- (void)managedVideoDataSource:(id<SCManagedVideoDataSource>)managedVideoDataSource
|
||||
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
|
||||
devicePosition:(SCManagedCaptureDevicePosition)devicePosition
|
||||
{
|
||||
++_outputSampleBuffers;
|
||||
[self _reportIfNeeded];
|
||||
}
|
||||
|
||||
- (void)managedVideoDataSource:(id<SCManagedVideoDataSource>)managedVideoDataSource
|
||||
didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
|
||||
devicePosition:(SCManagedCaptureDevicePosition)devicePosition
|
||||
{
|
||||
++_droppedSampleBuffers;
|
||||
[self _reportIfNeeded];
|
||||
}
|
||||
|
||||
@end
|
36
ManagedCapturer/SCManagedVideoStreamer.h
Normal file
36
ManagedCapturer/SCManagedVideoStreamer.h
Normal file
@ -0,0 +1,36 @@
|
||||
//
|
||||
// SCManagedVideoStreamer.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Liu Liu on 4/30/15.
|
||||
// Copyright (c) 2015 Liu Liu. All rights reserved.
|
||||
//
|
||||
|
||||
#import "SCManagedVideoARDataSource.h"
|
||||
|
||||
#import <SCCameraFoundation/SCManagedVideoDataSource.h>
|
||||
|
||||
#import <AVFoundation/AVFoundation.h>
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
@class ARSession;
|
||||
|
||||
/**
|
||||
* SCManagedVideoStreamer uses the current AVCaptureSession to create
|
||||
* and publish video output frames. SCManagedVideoStreamer also conforms
|
||||
* to SCManagedVideoDataSource allowing chained consumption of video frames.
|
||||
*/
|
||||
@interface SCManagedVideoStreamer : NSObject <SCManagedVideoDataSource, SCManagedVideoARDataSource>
|
||||
|
||||
- (instancetype)initWithSession:(AVCaptureSession *)session
|
||||
devicePosition:(SCManagedCaptureDevicePosition)devicePosition;
|
||||
|
||||
- (instancetype)initWithSession:(AVCaptureSession *)session
|
||||
arSession:(ARSession *)arSession
|
||||
devicePosition:(SCManagedCaptureDevicePosition)devicePosition NS_AVAILABLE_IOS(11_0);
|
||||
|
||||
- (void)setupWithSession:(AVCaptureSession *)session devicePosition:(SCManagedCaptureDevicePosition)devicePosition;
|
||||
|
||||
- (void)setupWithARSession:(ARSession *)arSession NS_AVAILABLE_IOS(11_0);
|
||||
|
||||
@end
|
823
ManagedCapturer/SCManagedVideoStreamer.m
Normal file
823
ManagedCapturer/SCManagedVideoStreamer.m
Normal file
@ -0,0 +1,823 @@
|
||||
//
|
||||
// SCManagedVideoStreamer.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Liu Liu on 4/30/15.
|
||||
// Copyright (c) 2015 Liu Liu. All rights reserved.
|
||||
//
|
||||
|
||||
#import "SCManagedVideoStreamer.h"
|
||||
|
||||
#import "ARConfiguration+SCConfiguration.h"
|
||||
#import "SCCameraTweaks.h"
|
||||
#import "SCCapturerDefines.h"
|
||||
#import "SCLogger+Camera.h"
|
||||
#import "SCManagedCapturePreviewLayerController.h"
|
||||
#import "SCMetalUtils.h"
|
||||
#import "SCProcessingPipeline.h"
|
||||
#import "SCProcessingPipelineBuilder.h"
|
||||
|
||||
#import <SCCameraFoundation/SCManagedVideoDataSourceListenerAnnouncer.h>
|
||||
#import <SCFoundation/NSString+SCFormat.h>
|
||||
#import <SCFoundation/SCLog.h>
|
||||
#import <SCFoundation/SCQueuePerformer.h>
|
||||
#import <SCFoundation/SCTrace.h>
|
||||
#import <SCLogger/SCCameraMetrics.h>
|
||||
|
||||
#import <Looksery/Looksery.h>
|
||||
|
||||
#import <libkern/OSAtomic.h>
|
||||
#import <stdatomic.h>
|
||||
|
||||
@import ARKit;
|
||||
@import AVFoundation;
|
||||
|
||||
#define SCLogVideoStreamerInfo(fmt, ...) SCLogCoreCameraInfo(@"[SCManagedVideoStreamer] " fmt, ##__VA_ARGS__)
|
||||
#define SCLogVideoStreamerWarning(fmt, ...) SCLogCoreCameraWarning(@"[SCManagedVideoStreamer] " fmt, ##__VA_ARGS__)
|
||||
#define SCLogVideoStreamerError(fmt, ...) SCLogCoreCameraError(@"[SCManagedVideoStreamer] " fmt, ##__VA_ARGS__)
|
||||
|
||||
static NSInteger const kSCCaptureFrameRate = 30;
|
||||
static CGFloat const kSCLogInterval = 3.0;
|
||||
static char *const kSCManagedVideoStreamerQueueLabel = "com.snapchat.managed-video-streamer";
|
||||
static char *const kSCManagedVideoStreamerCallbackQueueLabel = "com.snapchat.managed-video-streamer.dequeue";
|
||||
static NSTimeInterval const kSCManagedVideoStreamerMaxAllowedLatency = 1; // Drop the frame if it is 1 second late.
|
||||
|
||||
static NSTimeInterval const kSCManagedVideoStreamerStalledDisplay =
|
||||
5; // If the frame is not updated for 5 seconds, it is considered to be stalled.
|
||||
|
||||
static NSTimeInterval const kSCManagedVideoStreamerARSessionFramerateCap =
|
||||
1.0 / (kSCCaptureFrameRate + 1); // Restrict ARSession to 30fps
|
||||
static int32_t const kSCManagedVideoStreamerMaxProcessingBuffers = 15;
|
||||
|
||||
@interface SCManagedVideoStreamer () <AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureDepthDataOutputDelegate,
|
||||
AVCaptureDataOutputSynchronizerDelegate, ARSessionDelegate>
|
||||
|
||||
@property (nonatomic, strong) AVCaptureSession *captureSession;
|
||||
|
||||
@end
|
||||
|
||||
@implementation SCManagedVideoStreamer {
|
||||
AVCaptureVideoDataOutput *_videoDataOutput;
|
||||
AVCaptureDepthDataOutput *_depthDataOutput NS_AVAILABLE_IOS(11_0);
|
||||
AVCaptureDataOutputSynchronizer *_dataOutputSynchronizer NS_AVAILABLE_IOS(11_0);
|
||||
BOOL _performingConfigurations;
|
||||
SCManagedCaptureDevicePosition _devicePosition;
|
||||
BOOL _videoStabilizationEnabledIfSupported;
|
||||
SCManagedVideoDataSourceListenerAnnouncer *_announcer;
|
||||
|
||||
BOOL _sampleBufferDisplayEnabled;
|
||||
id<SCManagedSampleBufferDisplayController> _sampleBufferDisplayController;
|
||||
dispatch_block_t _flushOutdatedPreviewBlock;
|
||||
NSMutableArray<NSArray *> *_waitUntilSampleBufferDisplayedBlocks;
|
||||
SCProcessingPipeline *_processingPipeline;
|
||||
|
||||
NSTimeInterval _lastDisplayedFrameTimestamp;
|
||||
#ifdef SC_USE_ARKIT_FACE
|
||||
NSTimeInterval _lastDisplayedDepthFrameTimestamp;
|
||||
#endif
|
||||
|
||||
BOOL _depthCaptureEnabled;
|
||||
CGPoint _portraitModePointOfInterest;
|
||||
|
||||
// For sticky video tweaks
|
||||
BOOL _keepLateFrames;
|
||||
SCQueuePerformer *_callbackPerformer;
|
||||
atomic_int _processingBuffersCount;
|
||||
}
|
||||
|
||||
@synthesize isStreaming = _isStreaming;
|
||||
@synthesize performer = _performer;
|
||||
@synthesize currentFrame = _currentFrame;
|
||||
@synthesize fieldOfView = _fieldOfView;
|
||||
#ifdef SC_USE_ARKIT_FACE
|
||||
@synthesize lastDepthData = _lastDepthData;
|
||||
#endif
|
||||
@synthesize videoOrientation = _videoOrientation;
|
||||
|
||||
- (instancetype)initWithSession:(AVCaptureSession *)session
|
||||
devicePosition:(SCManagedCaptureDevicePosition)devicePosition
|
||||
{
|
||||
SCTraceStart();
|
||||
self = [super init];
|
||||
if (self) {
|
||||
_sampleBufferDisplayEnabled = YES;
|
||||
_announcer = [[SCManagedVideoDataSourceListenerAnnouncer alloc] init];
|
||||
// We discard frames to support lenses in real time
|
||||
_keepLateFrames = NO;
|
||||
_performer = [[SCQueuePerformer alloc] initWithLabel:kSCManagedVideoStreamerQueueLabel
|
||||
qualityOfService:QOS_CLASS_USER_INTERACTIVE
|
||||
queueType:DISPATCH_QUEUE_SERIAL
|
||||
context:SCQueuePerformerContextCamera];
|
||||
|
||||
_videoOrientation = AVCaptureVideoOrientationLandscapeRight;
|
||||
|
||||
[self setupWithSession:session devicePosition:devicePosition];
|
||||
SCLogVideoStreamerInfo(@"init with position:%lu", (unsigned long)devicePosition);
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (instancetype)initWithSession:(AVCaptureSession *)session
|
||||
arSession:(ARSession *)arSession
|
||||
devicePosition:(SCManagedCaptureDevicePosition)devicePosition NS_AVAILABLE_IOS(11_0)
|
||||
{
|
||||
self = [self initWithSession:session devicePosition:devicePosition];
|
||||
if (self) {
|
||||
[self setupWithARSession:arSession];
|
||||
self.currentFrame = nil;
|
||||
#ifdef SC_USE_ARKIT_FACE
|
||||
self.lastDepthData = nil;
|
||||
#endif
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (AVCaptureVideoDataOutput *)_newVideoDataOutput
|
||||
{
|
||||
AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init];
|
||||
// All inbound frames are going to be the native format of the camera avoid
|
||||
// any need for transcoding.
|
||||
output.videoSettings =
|
||||
@{(NSString *) kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) };
|
||||
return output;
|
||||
}
|
||||
|
||||
- (void)setupWithSession:(AVCaptureSession *)session devicePosition:(SCManagedCaptureDevicePosition)devicePosition
|
||||
{
|
||||
[self stopStreaming];
|
||||
self.captureSession = session;
|
||||
_devicePosition = devicePosition;
|
||||
|
||||
_videoDataOutput = [self _newVideoDataOutput];
|
||||
if (SCDeviceSupportsMetal()) {
|
||||
// We default to start the streaming if the Metal is supported at startup time.
|
||||
_isStreaming = YES;
|
||||
// Set the sample buffer delegate before starting it.
|
||||
[_videoDataOutput setSampleBufferDelegate:self queue:[self callbackPerformer].queue];
|
||||
}
|
||||
|
||||
if ([session canAddOutput:_videoDataOutput]) {
|
||||
[session addOutput:_videoDataOutput];
|
||||
[self _enableVideoMirrorForDevicePosition:devicePosition];
|
||||
}
|
||||
|
||||
if (SCCameraTweaksEnablePortraitModeButton()) {
|
||||
if (@available(iOS 11.0, *)) {
|
||||
_depthDataOutput = [[AVCaptureDepthDataOutput alloc] init];
|
||||
[[_depthDataOutput connectionWithMediaType:AVMediaTypeDepthData] setEnabled:NO];
|
||||
if ([session canAddOutput:_depthDataOutput]) {
|
||||
[session addOutput:_depthDataOutput];
|
||||
[_depthDataOutput setDelegate:self callbackQueue:_performer.queue];
|
||||
}
|
||||
_depthCaptureEnabled = NO;
|
||||
}
|
||||
_portraitModePointOfInterest = CGPointMake(0.5, 0.5);
|
||||
}
|
||||
|
||||
[self setVideoStabilizationEnabledIfSupported:YES];
|
||||
}
|
||||
|
||||
- (void)setupWithARSession:(ARSession *)arSession NS_AVAILABLE_IOS(11_0)
|
||||
{
|
||||
arSession.delegateQueue = _performer.queue;
|
||||
arSession.delegate = self;
|
||||
}
|
||||
|
||||
- (void)addSampleBufferDisplayController:(id<SCManagedSampleBufferDisplayController>)sampleBufferDisplayController
|
||||
{
|
||||
[_performer perform:^{
|
||||
_sampleBufferDisplayController = sampleBufferDisplayController;
|
||||
SCLogVideoStreamerInfo(@"add sampleBufferDisplayController:%@", _sampleBufferDisplayController);
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)setSampleBufferDisplayEnabled:(BOOL)sampleBufferDisplayEnabled
|
||||
{
|
||||
[_performer perform:^{
|
||||
_sampleBufferDisplayEnabled = sampleBufferDisplayEnabled;
|
||||
SCLogVideoStreamerInfo(@"sampleBufferDisplayEnabled set to:%d", _sampleBufferDisplayEnabled);
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)waitUntilSampleBufferDisplayed:(dispatch_queue_t)queue completionHandler:(dispatch_block_t)completionHandler
|
||||
{
|
||||
SCAssert(queue, @"callback queue must be provided");
|
||||
SCAssert(completionHandler, @"completion handler must be provided");
|
||||
SCLogVideoStreamerInfo(@"waitUntilSampleBufferDisplayed queue:%@ completionHandler:%p isStreaming:%d", queue,
|
||||
completionHandler, _isStreaming);
|
||||
if (_isStreaming) {
|
||||
[_performer perform:^{
|
||||
if (!_waitUntilSampleBufferDisplayedBlocks) {
|
||||
_waitUntilSampleBufferDisplayedBlocks = [NSMutableArray array];
|
||||
}
|
||||
[_waitUntilSampleBufferDisplayedBlocks addObject:@[ queue, completionHandler ]];
|
||||
SCLogVideoStreamerInfo(@"waitUntilSampleBufferDisplayed add block:%p", completionHandler);
|
||||
}];
|
||||
} else {
|
||||
dispatch_async(queue, completionHandler);
|
||||
}
|
||||
}
|
||||
|
||||
- (void)startStreaming
|
||||
{
|
||||
SCTraceStart();
|
||||
SCLogVideoStreamerInfo(@"start streaming. _isStreaming:%d", _isStreaming);
|
||||
if (!_isStreaming) {
|
||||
_isStreaming = YES;
|
||||
[self _cancelFlushOutdatedPreview];
|
||||
if (@available(ios 11.0, *)) {
|
||||
if (_depthCaptureEnabled) {
|
||||
[[_depthDataOutput connectionWithMediaType:AVMediaTypeDepthData] setEnabled:YES];
|
||||
}
|
||||
}
|
||||
[_videoDataOutput setSampleBufferDelegate:self queue:[self callbackPerformer].queue];
|
||||
}
|
||||
}
|
||||
|
||||
- (void)setAsOutput:(AVCaptureSession *)session devicePosition:(SCManagedCaptureDevicePosition)devicePosition
|
||||
{
|
||||
SCTraceStart();
|
||||
if ([session canAddOutput:_videoDataOutput]) {
|
||||
SCLogVideoStreamerError(@"add videoDataOutput:%@", _videoDataOutput);
|
||||
[session addOutput:_videoDataOutput];
|
||||
[self _enableVideoMirrorForDevicePosition:devicePosition];
|
||||
} else {
|
||||
SCLogVideoStreamerError(@"cannot add videoDataOutput:%@ to session:%@", _videoDataOutput, session);
|
||||
}
|
||||
[self _enableVideoStabilizationIfSupported];
|
||||
}
|
||||
|
||||
- (void)removeAsOutput:(AVCaptureSession *)session
|
||||
{
|
||||
SCTraceStart();
|
||||
SCLogVideoStreamerInfo(@"remove videoDataOutput:%@ from session:%@", _videoDataOutput, session);
|
||||
[session removeOutput:_videoDataOutput];
|
||||
}
|
||||
|
||||
- (void)_cancelFlushOutdatedPreview
|
||||
{
|
||||
SCLogVideoStreamerInfo(@"cancel flush outdated preview:%p", _flushOutdatedPreviewBlock);
|
||||
if (_flushOutdatedPreviewBlock) {
|
||||
dispatch_block_cancel(_flushOutdatedPreviewBlock);
|
||||
_flushOutdatedPreviewBlock = nil;
|
||||
}
|
||||
}
|
||||
|
||||
- (SCQueuePerformer *)callbackPerformer
|
||||
{
|
||||
// If sticky video tweak is on, use a separated performer queue
|
||||
if (_keepLateFrames) {
|
||||
if (!_callbackPerformer) {
|
||||
_callbackPerformer = [[SCQueuePerformer alloc] initWithLabel:kSCManagedVideoStreamerCallbackQueueLabel
|
||||
qualityOfService:QOS_CLASS_USER_INTERACTIVE
|
||||
queueType:DISPATCH_QUEUE_SERIAL
|
||||
context:SCQueuePerformerContextCamera];
|
||||
}
|
||||
return _callbackPerformer;
|
||||
}
|
||||
return _performer;
|
||||
}
|
||||
|
||||
- (void)pauseStreaming
|
||||
{
|
||||
SCTraceStart();
|
||||
SCLogVideoStreamerInfo(@"pauseStreaming isStreaming:%d", _isStreaming);
|
||||
if (_isStreaming) {
|
||||
_isStreaming = NO;
|
||||
[_videoDataOutput setSampleBufferDelegate:nil queue:NULL];
|
||||
if (@available(ios 11.0, *)) {
|
||||
if (_depthCaptureEnabled) {
|
||||
[[_depthDataOutput connectionWithMediaType:AVMediaTypeDepthData] setEnabled:NO];
|
||||
}
|
||||
}
|
||||
@weakify(self);
|
||||
_flushOutdatedPreviewBlock = dispatch_block_create(0, ^{
|
||||
SCLogVideoStreamerInfo(@"execute flushOutdatedPreviewBlock");
|
||||
@strongify(self);
|
||||
SC_GUARD_ELSE_RETURN(self);
|
||||
[self->_sampleBufferDisplayController flushOutdatedPreview];
|
||||
});
|
||||
[_performer perform:_flushOutdatedPreviewBlock
|
||||
after:SCCameraTweaksEnableKeepLastFrameOnCamera() ? kSCManagedVideoStreamerStalledDisplay : 0];
|
||||
[_performer perform:^{
|
||||
[self _performCompletionHandlersForWaitUntilSampleBufferDisplayed];
|
||||
}];
|
||||
}
|
||||
}
|
||||
|
||||
- (void)stopStreaming
|
||||
{
|
||||
SCTraceStart();
|
||||
SCLogVideoStreamerInfo(@"stopStreaming isStreaming:%d", _isStreaming);
|
||||
if (_isStreaming) {
|
||||
_isStreaming = NO;
|
||||
[_videoDataOutput setSampleBufferDelegate:nil queue:NULL];
|
||||
if (@available(ios 11.0, *)) {
|
||||
if (_depthCaptureEnabled) {
|
||||
[[_depthDataOutput connectionWithMediaType:AVMediaTypeDepthData] setEnabled:NO];
|
||||
}
|
||||
}
|
||||
}
|
||||
[self _cancelFlushOutdatedPreview];
|
||||
[_performer perform:^{
|
||||
SCLogVideoStreamerInfo(@"stopStreaming in perfome queue");
|
||||
[_sampleBufferDisplayController flushOutdatedPreview];
|
||||
[self _performCompletionHandlersForWaitUntilSampleBufferDisplayed];
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)beginConfiguration
|
||||
{
|
||||
SCLogVideoStreamerInfo(@"enter beginConfiguration");
|
||||
[_performer perform:^{
|
||||
SCLogVideoStreamerInfo(@"performingConfigurations set to YES");
|
||||
_performingConfigurations = YES;
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)setDevicePosition:(SCManagedCaptureDevicePosition)devicePosition
|
||||
{
|
||||
SCLogVideoStreamerInfo(@"setDevicePosition with newPosition:%lu", (unsigned long)devicePosition);
|
||||
[self _enableVideoMirrorForDevicePosition:devicePosition];
|
||||
[self _enableVideoStabilizationIfSupported];
|
||||
[_performer perform:^{
|
||||
SCLogVideoStreamerInfo(@"setDevicePosition in perform queue oldPosition:%lu newPosition:%lu",
|
||||
(unsigned long)_devicePosition, (unsigned long)devicePosition);
|
||||
if (_devicePosition != devicePosition) {
|
||||
_devicePosition = devicePosition;
|
||||
}
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)setVideoOrientation:(AVCaptureVideoOrientation)videoOrientation
|
||||
{
|
||||
SCTraceStart();
|
||||
// It is not neccessary call these changes on private queue, because is is just only data output configuration.
|
||||
// It should be called from manged capturer queue to prevent lock capture session in two different(private and
|
||||
// managed capturer) queues that will cause the deadlock.
|
||||
SCLogVideoStreamerInfo(@"setVideoOrientation oldOrientation:%lu newOrientation:%lu",
|
||||
(unsigned long)_videoOrientation, (unsigned long)videoOrientation);
|
||||
_videoOrientation = videoOrientation;
|
||||
AVCaptureConnection *connection = [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
|
||||
connection.videoOrientation = _videoOrientation;
|
||||
}
|
||||
|
||||
- (void)setKeepLateFrames:(BOOL)keepLateFrames
|
||||
{
|
||||
SCTraceStart();
|
||||
[_performer perform:^{
|
||||
SCTraceStart();
|
||||
if (keepLateFrames != _keepLateFrames) {
|
||||
_keepLateFrames = keepLateFrames;
|
||||
// Get and set corresponding queue base on keepLateFrames.
|
||||
// We don't use AVCaptureVideoDataOutput.alwaysDiscardsLateVideo anymore, because it will potentially
|
||||
// result in lenses regression, and we could use all 15 sample buffers by adding a separated calllback
|
||||
// queue.
|
||||
[_videoDataOutput setSampleBufferDelegate:self queue:[self callbackPerformer].queue];
|
||||
SCLogVideoStreamerInfo(@"keepLateFrames was set to:%d", keepLateFrames);
|
||||
}
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)setDepthCaptureEnabled:(BOOL)enabled NS_AVAILABLE_IOS(11_0)
|
||||
{
|
||||
_depthCaptureEnabled = enabled;
|
||||
[[_depthDataOutput connectionWithMediaType:AVMediaTypeDepthData] setEnabled:enabled];
|
||||
if (enabled) {
|
||||
_dataOutputSynchronizer =
|
||||
[[AVCaptureDataOutputSynchronizer alloc] initWithDataOutputs:@[ _videoDataOutput, _depthDataOutput ]];
|
||||
[_dataOutputSynchronizer setDelegate:self queue:_performer.queue];
|
||||
} else {
|
||||
_dataOutputSynchronizer = nil;
|
||||
}
|
||||
}
|
||||
|
||||
- (void)setPortraitModePointOfInterest:(CGPoint)pointOfInterest
|
||||
{
|
||||
_portraitModePointOfInterest = pointOfInterest;
|
||||
}
|
||||
|
||||
- (BOOL)getKeepLateFrames
|
||||
{
|
||||
return _keepLateFrames;
|
||||
}
|
||||
|
||||
- (void)commitConfiguration
|
||||
{
|
||||
SCLogVideoStreamerInfo(@"enter commitConfiguration");
|
||||
[_performer perform:^{
|
||||
SCLogVideoStreamerInfo(@"performingConfigurations set to NO");
|
||||
_performingConfigurations = NO;
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)addListener:(id<SCManagedVideoDataSourceListener>)listener
|
||||
{
|
||||
SCTraceStart();
|
||||
SCLogVideoStreamerInfo(@"add listener:%@", listener);
|
||||
[_announcer addListener:listener];
|
||||
}
|
||||
|
||||
- (void)removeListener:(id<SCManagedVideoDataSourceListener>)listener
|
||||
{
|
||||
SCTraceStart();
|
||||
SCLogVideoStreamerInfo(@"remove listener:%@", listener);
|
||||
[_announcer removeListener:listener];
|
||||
}
|
||||
|
||||
- (void)addProcessingPipeline:(SCProcessingPipeline *)processingPipeline
|
||||
{
|
||||
SCLogVideoStreamerInfo(@"enter addProcessingPipeline:%@", processingPipeline);
|
||||
[_performer perform:^{
|
||||
SCLogVideoStreamerInfo(@"processingPipeline set to %@", processingPipeline);
|
||||
_processingPipeline = processingPipeline;
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)removeProcessingPipeline
|
||||
{
|
||||
SCLogVideoStreamerInfo(@"enter removeProcessingPipeline");
|
||||
[_performer perform:^{
|
||||
SCLogVideoStreamerInfo(@"processingPipeline set to nil");
|
||||
_processingPipeline = nil;
|
||||
}];
|
||||
}
|
||||
|
||||
- (BOOL)isVideoMirrored
|
||||
{
|
||||
SCTraceStart();
|
||||
AVCaptureConnection *connection = [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
|
||||
return connection.isVideoMirrored;
|
||||
}
|
||||
|
||||
#pragma mark - Common Sample Buffer Handling
|
||||
|
||||
- (void)didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
|
||||
{
|
||||
return [self didOutputSampleBuffer:sampleBuffer depthData:nil];
|
||||
}
|
||||
|
||||
- (void)didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer depthData:(CVPixelBufferRef)depthDataMap
|
||||
{
|
||||
// Don't send the sample buffer if we are perform configurations
|
||||
if (_performingConfigurations) {
|
||||
SCLogVideoStreamerError(@"didOutputSampleBuffer return because performingConfigurations is YES");
|
||||
return;
|
||||
}
|
||||
SC_GUARD_ELSE_RETURN([_performer isCurrentPerformer]);
|
||||
|
||||
// We can't set alwaysDiscardsLateVideoFrames to YES when lens is activated because it will cause camera freezing.
|
||||
// When alwaysDiscardsLateVideoFrames is set to NO, the late frames will not be dropped until it reach 15 frames,
|
||||
// so we should simulate the dropping behaviour as AVFoundation do.
|
||||
NSTimeInterval presentationTime = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer));
|
||||
_lastDisplayedFrameTimestamp = presentationTime;
|
||||
NSTimeInterval frameLatency = CACurrentMediaTime() - presentationTime;
|
||||
// Log interval definied in macro LOG_INTERVAL, now is 3.0s
|
||||
BOOL shouldLog =
|
||||
(long)(CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) * kSCCaptureFrameRate) %
|
||||
((long)(kSCCaptureFrameRate * kSCLogInterval)) ==
|
||||
0;
|
||||
if (shouldLog) {
|
||||
SCLogVideoStreamerInfo(@"didOutputSampleBuffer:%p", sampleBuffer);
|
||||
}
|
||||
if (_processingPipeline) {
|
||||
RenderData renderData = {
|
||||
.sampleBuffer = sampleBuffer,
|
||||
.depthDataMap = depthDataMap,
|
||||
.depthBlurPointOfInterest =
|
||||
SCCameraTweaksEnablePortraitModeAutofocus() || SCCameraTweaksEnablePortraitModeTapToFocus()
|
||||
? &_portraitModePointOfInterest
|
||||
: nil,
|
||||
};
|
||||
// Ensure we are doing all render operations (i.e. accessing textures) on performer to prevent race condition
|
||||
SCAssertPerformer(_performer);
|
||||
sampleBuffer = [_processingPipeline render:renderData];
|
||||
|
||||
if (shouldLog) {
|
||||
SCLogVideoStreamerInfo(@"rendered sampleBuffer:%p in processingPipeline:%@", sampleBuffer,
|
||||
_processingPipeline);
|
||||
}
|
||||
}
|
||||
|
||||
if (sampleBuffer && _sampleBufferDisplayEnabled) {
|
||||
// Send the buffer only if it is valid, set it to be displayed immediately (See the enqueueSampleBuffer method
|
||||
// header, need to get attachments array and set the dictionary).
|
||||
CFArrayRef attachmentsArray = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, YES);
|
||||
if (!attachmentsArray) {
|
||||
SCLogVideoStreamerError(@"Error getting attachment array for CMSampleBuffer");
|
||||
} else if (CFArrayGetCount(attachmentsArray) > 0) {
|
||||
CFMutableDictionaryRef attachment = (CFMutableDictionaryRef)CFArrayGetValueAtIndex(attachmentsArray, 0);
|
||||
CFDictionarySetValue(attachment, kCMSampleAttachmentKey_DisplayImmediately, kCFBooleanTrue);
|
||||
}
|
||||
// Warn if frame that went through is not most recent enough.
|
||||
if (frameLatency >= kSCManagedVideoStreamerMaxAllowedLatency) {
|
||||
SCLogVideoStreamerWarning(
|
||||
@"The sample buffer we received is too late, why? presentationTime:%lf frameLatency:%f",
|
||||
presentationTime, frameLatency);
|
||||
}
|
||||
[_sampleBufferDisplayController enqueueSampleBuffer:sampleBuffer];
|
||||
if (shouldLog) {
|
||||
SCLogVideoStreamerInfo(@"displayed sampleBuffer:%p in Metal", sampleBuffer);
|
||||
}
|
||||
|
||||
[self _performCompletionHandlersForWaitUntilSampleBufferDisplayed];
|
||||
}
|
||||
|
||||
if (shouldLog) {
|
||||
SCLogVideoStreamerInfo(@"begin annoucing sampleBuffer:%p of devicePosition:%lu", sampleBuffer,
|
||||
(unsigned long)_devicePosition);
|
||||
}
|
||||
[_announcer managedVideoDataSource:self didOutputSampleBuffer:sampleBuffer devicePosition:_devicePosition];
|
||||
if (shouldLog) {
|
||||
SCLogVideoStreamerInfo(@"end annoucing sampleBuffer:%p", sampleBuffer);
|
||||
}
|
||||
}
|
||||
|
||||
- (void)didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
|
||||
{
|
||||
if (_performingConfigurations) {
|
||||
return;
|
||||
}
|
||||
SC_GUARD_ELSE_RETURN([_performer isCurrentPerformer]);
|
||||
NSTimeInterval currentProcessingTime = CACurrentMediaTime();
|
||||
NSTimeInterval currentSampleTime = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer));
|
||||
// Only logging it when sticky tweak is on, which means sticky time is too long, and AVFoundation have to drop the
|
||||
// sampleBuffer
|
||||
if (_keepLateFrames) {
|
||||
SCLogVideoStreamerInfo(@"didDropSampleBuffer:%p timestamp:%f latency:%f", sampleBuffer, currentProcessingTime,
|
||||
currentSampleTime);
|
||||
}
|
||||
[_announcer managedVideoDataSource:self didDropSampleBuffer:sampleBuffer devicePosition:_devicePosition];
|
||||
}
|
||||
|
||||
#pragma mark - AVCaptureVideoDataOutputSampleBufferDelegate
|
||||
|
||||
- (void)captureOutput:(AVCaptureOutput *)captureOutput
|
||||
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
|
||||
fromConnection:(AVCaptureConnection *)connection NS_AVAILABLE_IOS(11_0)
|
||||
{
|
||||
// Sticky video tweak is off, i.e. lenses is on,
|
||||
// we use same queue for callback and processing, and let AVFoundation decide which frame should be dropped
|
||||
if (!_keepLateFrames) {
|
||||
[self didOutputSampleBuffer:sampleBuffer];
|
||||
}
|
||||
// Sticky video tweak is on
|
||||
else {
|
||||
if ([_performer isCurrentPerformer]) {
|
||||
// Note: there might be one frame callbacked in processing queue when switching callback queue,
|
||||
// it should be fine. But if following log appears too much, it is not our design.
|
||||
SCLogVideoStreamerWarning(@"The callback queue should be a separated queue when sticky tweak is on");
|
||||
}
|
||||
// TODO: In sticky video v2, we should consider check free memory
|
||||
if (_processingBuffersCount >= kSCManagedVideoStreamerMaxProcessingBuffers - 1) {
|
||||
SCLogVideoStreamerWarning(@"processingBuffersCount reached to the max. current count:%d",
|
||||
_processingBuffersCount);
|
||||
[self didDropSampleBuffer:sampleBuffer];
|
||||
return;
|
||||
}
|
||||
atomic_fetch_add(&_processingBuffersCount, 1);
|
||||
CFRetain(sampleBuffer);
|
||||
// _performer should always be the processing queue
|
||||
[_performer perform:^{
|
||||
[self didOutputSampleBuffer:sampleBuffer];
|
||||
CFRelease(sampleBuffer);
|
||||
atomic_fetch_sub(&_processingBuffersCount, 1);
|
||||
}];
|
||||
}
|
||||
}
|
||||
|
||||
- (void)captureOutput:(AVCaptureOutput *)captureOutput
|
||||
didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
|
||||
fromConnection:(AVCaptureConnection *)connection
|
||||
{
|
||||
[self didDropSampleBuffer:sampleBuffer];
|
||||
}
|
||||
|
||||
#pragma mark - AVCaptureDataOutputSynchronizer (Video + Depth)
|
||||
|
||||
- (void)dataOutputSynchronizer:(AVCaptureDataOutputSynchronizer *)synchronizer
|
||||
didOutputSynchronizedDataCollection:(AVCaptureSynchronizedDataCollection *)synchronizedDataCollection
|
||||
NS_AVAILABLE_IOS(11_0)
|
||||
{
|
||||
AVCaptureSynchronizedDepthData *syncedDepthData = (AVCaptureSynchronizedDepthData *)[synchronizedDataCollection
|
||||
synchronizedDataForCaptureOutput:_depthDataOutput];
|
||||
AVDepthData *depthData = nil;
|
||||
if (syncedDepthData && !syncedDepthData.depthDataWasDropped) {
|
||||
depthData = syncedDepthData.depthData;
|
||||
}
|
||||
|
||||
AVCaptureSynchronizedSampleBufferData *syncedVideoData =
|
||||
(AVCaptureSynchronizedSampleBufferData *)[synchronizedDataCollection
|
||||
synchronizedDataForCaptureOutput:_videoDataOutput];
|
||||
if (syncedVideoData && !syncedVideoData.sampleBufferWasDropped) {
|
||||
CMSampleBufferRef videoSampleBuffer = syncedVideoData.sampleBuffer;
|
||||
[self didOutputSampleBuffer:videoSampleBuffer depthData:depthData ? depthData.depthDataMap : nil];
|
||||
}
|
||||
}
|
||||
|
||||
#pragma mark - ARSessionDelegate
|
||||
|
||||
- (void)session:(ARSession *)session cameraDidChangeTrackingState:(ARCamera *)camera NS_AVAILABLE_IOS(11_0)
|
||||
{
|
||||
NSString *state = nil;
|
||||
NSString *reason = nil;
|
||||
switch (camera.trackingState) {
|
||||
case ARTrackingStateNormal:
|
||||
state = @"Normal";
|
||||
break;
|
||||
case ARTrackingStateLimited:
|
||||
state = @"Limited";
|
||||
break;
|
||||
case ARTrackingStateNotAvailable:
|
||||
state = @"Not Available";
|
||||
break;
|
||||
}
|
||||
switch (camera.trackingStateReason) {
|
||||
case ARTrackingStateReasonNone:
|
||||
reason = @"None";
|
||||
break;
|
||||
case ARTrackingStateReasonInitializing:
|
||||
reason = @"Initializing";
|
||||
break;
|
||||
case ARTrackingStateReasonExcessiveMotion:
|
||||
reason = @"Excessive Motion";
|
||||
break;
|
||||
case ARTrackingStateReasonInsufficientFeatures:
|
||||
reason = @"Insufficient Features";
|
||||
break;
|
||||
#if SC_AT_LEAST_SDK_11_3
|
||||
case ARTrackingStateReasonRelocalizing:
|
||||
reason = @"Relocalizing";
|
||||
break;
|
||||
#endif
|
||||
}
|
||||
SCLogVideoStreamerInfo(@"ARKit changed tracking state - %@ (reason: %@)", state, reason);
|
||||
}
|
||||
|
||||
- (void)session:(ARSession *)session didUpdateFrame:(ARFrame *)frame NS_AVAILABLE_IOS(11_0)
|
||||
{
|
||||
#ifdef SC_USE_ARKIT_FACE
|
||||
// This is extremely weird, but LOOK-10251 indicates that despite the class having it defined, on some specific
|
||||
// devices there are ARFrame instances that don't respond to `capturedDepthData`.
|
||||
// (note: this was discovered to be due to some people staying on iOS 11 betas).
|
||||
AVDepthData *depth = nil;
|
||||
if ([frame respondsToSelector:@selector(capturedDepthData)]) {
|
||||
depth = frame.capturedDepthData;
|
||||
}
|
||||
#endif
|
||||
|
||||
CGFloat timeSince = frame.timestamp - _lastDisplayedFrameTimestamp;
|
||||
// Don't deliver more than 30 frames per sec
|
||||
BOOL framerateMinimumElapsed = timeSince >= kSCManagedVideoStreamerARSessionFramerateCap;
|
||||
|
||||
#ifdef SC_USE_ARKIT_FACE
|
||||
if (depth) {
|
||||
CGFloat timeSince = frame.timestamp - _lastDisplayedDepthFrameTimestamp;
|
||||
framerateMinimumElapsed |= timeSince >= kSCManagedVideoStreamerARSessionFramerateCap;
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
SC_GUARD_ELSE_RETURN(framerateMinimumElapsed);
|
||||
|
||||
#ifdef SC_USE_ARKIT_FACE
|
||||
if (depth) {
|
||||
self.lastDepthData = depth;
|
||||
_lastDisplayedDepthFrameTimestamp = frame.timestamp;
|
||||
}
|
||||
#endif
|
||||
|
||||
// Make sure that current frame is no longer being used, otherwise drop current frame.
|
||||
SC_GUARD_ELSE_RETURN(self.currentFrame == nil);
|
||||
|
||||
CVPixelBufferRef pixelBuffer = frame.capturedImage;
|
||||
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
|
||||
CMTime time = CMTimeMakeWithSeconds(frame.timestamp, 1000000);
|
||||
CMSampleTimingInfo timing = {kCMTimeInvalid, time, kCMTimeInvalid};
|
||||
|
||||
CMVideoFormatDescriptionRef videoInfo;
|
||||
CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, &videoInfo);
|
||||
|
||||
CMSampleBufferRef buffer;
|
||||
CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, YES, nil, nil, videoInfo, &timing, &buffer);
|
||||
CFRelease(videoInfo);
|
||||
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
|
||||
|
||||
self.currentFrame = frame;
|
||||
[self didOutputSampleBuffer:buffer];
|
||||
[self _updateFieldOfViewWithARFrame:frame];
|
||||
|
||||
CFRelease(buffer);
|
||||
}
|
||||
|
||||
- (void)session:(ARSession *)session didAddAnchors:(NSArray<ARAnchor *> *)anchors NS_AVAILABLE_IOS(11_0)
|
||||
{
|
||||
for (ARAnchor *anchor in anchors) {
|
||||
if ([anchor isKindOfClass:[ARPlaneAnchor class]]) {
|
||||
SCLogVideoStreamerInfo(@"ARKit added plane anchor");
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
- (void)session:(ARSession *)session didFailWithError:(NSError *)error NS_AVAILABLE_IOS(11_0)
|
||||
{
|
||||
SCLogVideoStreamerError(@"ARKit session failed with error: %@. Resetting", error);
|
||||
[session runWithConfiguration:[ARConfiguration sc_configurationForDevicePosition:_devicePosition]];
|
||||
}
|
||||
|
||||
- (void)sessionWasInterrupted:(ARSession *)session NS_AVAILABLE_IOS(11_0)
|
||||
{
|
||||
SCLogVideoStreamerWarning(@"ARKit session interrupted");
|
||||
}
|
||||
|
||||
- (void)sessionInterruptionEnded:(ARSession *)session NS_AVAILABLE_IOS(11_0)
|
||||
{
|
||||
SCLogVideoStreamerInfo(@"ARKit interruption ended");
|
||||
}
|
||||
|
||||
#pragma mark - Private methods
|
||||
|
||||
- (void)_performCompletionHandlersForWaitUntilSampleBufferDisplayed
|
||||
{
|
||||
for (NSArray *completion in _waitUntilSampleBufferDisplayedBlocks) {
|
||||
// Call the completion handlers.
|
||||
dispatch_async(completion[0], completion[1]);
|
||||
}
|
||||
[_waitUntilSampleBufferDisplayedBlocks removeAllObjects];
|
||||
}
|
||||
|
||||
// This is the magic that ensures the VideoDataOutput will have the correct
|
||||
// orientation.
|
||||
- (void)_enableVideoMirrorForDevicePosition:(SCManagedCaptureDevicePosition)devicePosition
|
||||
{
|
||||
SCLogVideoStreamerInfo(@"enable video mirror for device position:%lu", (unsigned long)devicePosition);
|
||||
AVCaptureConnection *connection = [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
|
||||
connection.videoOrientation = _videoOrientation;
|
||||
if (devicePosition == SCManagedCaptureDevicePositionFront) {
|
||||
connection.videoMirrored = YES;
|
||||
}
|
||||
}
|
||||
|
||||
- (void)_enableVideoStabilizationIfSupported
|
||||
{
|
||||
SCTraceStart();
|
||||
if (!SCCameraTweaksEnableVideoStabilization()) {
|
||||
SCLogVideoStreamerWarning(@"SCCameraTweaksEnableVideoStabilization is NO, won't enable video stabilization");
|
||||
return;
|
||||
}
|
||||
|
||||
AVCaptureConnection *videoConnection = [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
|
||||
if (!videoConnection) {
|
||||
SCLogVideoStreamerError(@"cannot get videoConnection from videoDataOutput:%@", videoConnection);
|
||||
return;
|
||||
}
|
||||
// Set the video stabilization mode to auto. Default is off.
|
||||
if ([videoConnection isVideoStabilizationSupported]) {
|
||||
videoConnection.preferredVideoStabilizationMode = _videoStabilizationEnabledIfSupported
|
||||
? AVCaptureVideoStabilizationModeStandard
|
||||
: AVCaptureVideoStabilizationModeOff;
|
||||
NSDictionary *params = @{ @"iOS8_Mode" : @(videoConnection.activeVideoStabilizationMode) };
|
||||
[[SCLogger sharedInstance] logEvent:@"VIDEO_STABILIZATION_MODE" parameters:params];
|
||||
SCLogVideoStreamerInfo(@"set video stabilization mode:%ld to videoConnection:%@",
|
||||
(long)videoConnection.preferredVideoStabilizationMode, videoConnection);
|
||||
} else {
|
||||
SCLogVideoStreamerInfo(@"video stabilization isn't supported on videoConnection:%@", videoConnection);
|
||||
}
|
||||
}
|
||||
|
||||
- (void)setVideoStabilizationEnabledIfSupported:(BOOL)videoStabilizationIfSupported
|
||||
{
|
||||
SCLogVideoStreamerInfo(@"setVideoStabilizationEnabledIfSupported:%d", videoStabilizationIfSupported);
|
||||
_videoStabilizationEnabledIfSupported = videoStabilizationIfSupported;
|
||||
[self _enableVideoStabilizationIfSupported];
|
||||
}
|
||||
|
||||
- (void)_updateFieldOfViewWithARFrame:(ARFrame *)frame NS_AVAILABLE_IOS(11_0)
|
||||
{
|
||||
SC_GUARD_ELSE_RETURN(frame.camera);
|
||||
CGSize imageResolution = frame.camera.imageResolution;
|
||||
matrix_float3x3 intrinsics = frame.camera.intrinsics;
|
||||
float xFovDegrees = 2 * atan(imageResolution.width / (2 * intrinsics.columns[0][0])) * 180 / M_PI;
|
||||
if (_fieldOfView != xFovDegrees) {
|
||||
self.fieldOfView = xFovDegrees;
|
||||
}
|
||||
}
|
||||
|
||||
- (NSString *)description
|
||||
{
|
||||
return [self debugDescription];
|
||||
}
|
||||
|
||||
- (NSString *)debugDescription
|
||||
{
|
||||
NSDictionary *debugDict = @{
|
||||
@"_sampleBufferDisplayEnabled" : _sampleBufferDisplayEnabled ? @"Yes" : @"No",
|
||||
@"_videoStabilizationEnabledIfSupported" : _videoStabilizationEnabledIfSupported ? @"Yes" : @"No",
|
||||
@"_performingConfigurations" : _performingConfigurations ? @"Yes" : @"No",
|
||||
@"alwaysDiscardLateVideoFrames" : _videoDataOutput.alwaysDiscardsLateVideoFrames ? @"Yes" : @"No"
|
||||
};
|
||||
return [NSString sc_stringWithFormat:@"%@", debugDict];
|
||||
}
|
||||
|
||||
@end
|
63
ManagedCapturer/SCMetalUtils.h
Normal file
63
ManagedCapturer/SCMetalUtils.h
Normal file
@ -0,0 +1,63 @@
|
||||
//
|
||||
// SCMetalUtils.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Michel Loenngren on 7/11/17.
|
||||
//
|
||||
// Utility class for metal related helpers.
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
#if !TARGET_IPHONE_SIMULATOR
|
||||
#import <Metal/Metal.h>
|
||||
#endif
|
||||
#import <AVFoundation/AVFoundation.h>
|
||||
|
||||
#import <SCBase/SCMacros.h>
|
||||
|
||||
SC_EXTERN_C_BEGIN
|
||||
|
||||
#if !TARGET_IPHONE_SIMULATOR
|
||||
extern id<MTLDevice> SCGetManagedCaptureMetalDevice(void);
|
||||
#endif
|
||||
|
||||
static SC_ALWAYS_INLINE BOOL SCDeviceSupportsMetal(void)
|
||||
{
|
||||
#if TARGET_CPU_ARM64
|
||||
return YES; // All 64 bit system supports Metal.
|
||||
#else
|
||||
return NO;
|
||||
#endif
|
||||
}
|
||||
|
||||
#if !TARGET_IPHONE_SIMULATOR
|
||||
static inline id<MTLTexture> SCMetalTextureFromPixelBuffer(CVPixelBufferRef pixelBuffer, size_t planeIndex,
|
||||
MTLPixelFormat pixelFormat,
|
||||
CVMetalTextureCacheRef textureCache)
|
||||
{
|
||||
size_t width = CVPixelBufferGetWidthOfPlane(pixelBuffer, planeIndex);
|
||||
size_t height = CVPixelBufferGetHeightOfPlane(pixelBuffer, planeIndex);
|
||||
CVMetalTextureRef textureRef;
|
||||
if (kCVReturnSuccess != CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault, textureCache, pixelBuffer,
|
||||
nil, pixelFormat, width, height, planeIndex,
|
||||
&textureRef)) {
|
||||
return nil;
|
||||
}
|
||||
id<MTLTexture> texture = CVMetalTextureGetTexture(textureRef);
|
||||
CVBufferRelease(textureRef);
|
||||
return texture;
|
||||
}
|
||||
|
||||
static inline void SCMetalCopyTexture(id<MTLTexture> texture, CVPixelBufferRef pixelBuffer, NSUInteger planeIndex)
|
||||
{
|
||||
CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
|
||||
void *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, planeIndex);
|
||||
NSUInteger bytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, planeIndex);
|
||||
MTLRegion region = MTLRegionMake2D(0, 0, CVPixelBufferGetWidthOfPlane(pixelBuffer, planeIndex),
|
||||
CVPixelBufferGetHeightOfPlane(pixelBuffer, planeIndex));
|
||||
|
||||
[texture getBytes:baseAddress bytesPerRow:bytesPerRow fromRegion:region mipmapLevel:0];
|
||||
CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
|
||||
}
|
||||
#endif
|
||||
|
||||
SC_EXTERN_C_END
|
25
ManagedCapturer/SCMetalUtils.m
Normal file
25
ManagedCapturer/SCMetalUtils.m
Normal file
@ -0,0 +1,25 @@
|
||||
//
|
||||
// SCMetalUtils.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Michel Loenngren on 8/16/17.
|
||||
//
|
||||
//
|
||||
|
||||
#import "SCMetalUtils.h"
|
||||
|
||||
#import <SCFoundation/SCTrace.h>
|
||||
|
||||
id<MTLDevice> SCGetManagedCaptureMetalDevice(void)
|
||||
{
|
||||
#if !TARGET_IPHONE_SIMULATOR
|
||||
SCTraceStart();
|
||||
static dispatch_once_t onceToken;
|
||||
static id<MTLDevice> device;
|
||||
dispatch_once(&onceToken, ^{
|
||||
device = MTLCreateSystemDefaultDevice();
|
||||
});
|
||||
return device;
|
||||
#endif
|
||||
return nil;
|
||||
}
|
18
ManagedCapturer/SCScanConfiguration.h
Normal file
18
ManagedCapturer/SCScanConfiguration.h
Normal file
@ -0,0 +1,18 @@
|
||||
//
|
||||
// SCScanConfiguration.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Yang Dai on 3/7/17.
|
||||
// Copyright © 2017 Snapchat, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#import "SCManagedCapturer.h"
|
||||
|
||||
#import <SCSession/SCUserSession.h>
|
||||
|
||||
@interface SCScanConfiguration : NSObject
|
||||
|
||||
@property (nonatomic, strong) sc_managed_capturer_scan_results_handler_t scanResultsHandler;
|
||||
@property (nonatomic, strong) SCUserSession *userSession;
|
||||
|
||||
@end
|
13
ManagedCapturer/SCScanConfiguration.m
Normal file
13
ManagedCapturer/SCScanConfiguration.m
Normal file
@ -0,0 +1,13 @@
|
||||
//
|
||||
// SCScanConfiguration.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Yang Dai on 3/7/17.
|
||||
// Copyright © 2017 Snapchat, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#import "SCScanConfiguration.h"
|
||||
|
||||
@implementation SCScanConfiguration
|
||||
|
||||
@end
|
17
ManagedCapturer/SCSingleFrameStreamCapturer.h
Normal file
17
ManagedCapturer/SCSingleFrameStreamCapturer.h
Normal file
@ -0,0 +1,17 @@
|
||||
//
|
||||
// SCSingleFrameStreamCapturer.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Benjamin Hollis on 5/3/16.
|
||||
// Copyright © 2016 Snapchat, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#import "SCCaptureCommon.h"
|
||||
|
||||
#import <SCCameraFoundation/SCManagedVideoDataSourceListener.h>
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
@interface SCSingleFrameStreamCapturer : NSObject <SCManagedVideoDataSourceListener>
|
||||
- (instancetype)initWithCompletion:(sc_managed_capturer_capture_video_frame_completion_handler_t)completionHandler;
|
||||
@end
|
103
ManagedCapturer/SCSingleFrameStreamCapturer.m
Normal file
103
ManagedCapturer/SCSingleFrameStreamCapturer.m
Normal file
@ -0,0 +1,103 @@
|
||||
//
|
||||
// SCSingleFrameStreamCapturer.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Benjamin Hollis on 5/3/16.
|
||||
// Copyright © 2016 Snapchat, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#import "SCSingleFrameStreamCapturer.h"
|
||||
|
||||
#import "SCManagedCapturer.h"
|
||||
|
||||
@implementation SCSingleFrameStreamCapturer {
|
||||
sc_managed_capturer_capture_video_frame_completion_handler_t _callback;
|
||||
}
|
||||
|
||||
- (instancetype)initWithCompletion:(sc_managed_capturer_capture_video_frame_completion_handler_t)completionHandler
|
||||
{
|
||||
self = [super init];
|
||||
if (self) {
|
||||
_callback = completionHandler;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
#pragma mark - SCManagedVideoDataSourceListener
|
||||
|
||||
- (void)managedVideoDataSource:(id<SCManagedVideoDataSource>)managedVideoDataSource
|
||||
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
|
||||
devicePosition:(SCManagedCaptureDevicePosition)devicePosition
|
||||
{
|
||||
if (_callback) {
|
||||
UIImage *image = [self imageFromSampleBuffer:sampleBuffer];
|
||||
_callback(image);
|
||||
}
|
||||
_callback = nil;
|
||||
}
|
||||
|
||||
/**
|
||||
* Decode a CMSampleBufferRef to our native camera format (kCVPixelFormatType_420YpCbCr8BiPlanarFullRange,
|
||||
* as set in SCManagedVideoStreamer) to a UIImage.
|
||||
*
|
||||
* Code from http://stackoverflow.com/a/31553521/11284
|
||||
*/
|
||||
#define clamp(a) (a > 255 ? 255 : (a < 0 ? 0 : a))
|
||||
// TODO: Use the transform code from SCImageProcessIdentityYUVCommand
|
||||
- (UIImage *)imageFromSampleBuffer:(CMSampleBufferRef)sampleBuffer
|
||||
{
|
||||
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
|
||||
CVPixelBufferLockBaseAddress(imageBuffer, 0);
|
||||
|
||||
size_t width = CVPixelBufferGetWidth(imageBuffer);
|
||||
size_t height = CVPixelBufferGetHeight(imageBuffer);
|
||||
uint8_t *yBuffer = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);
|
||||
size_t yPitch = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 0);
|
||||
uint8_t *cbCrBuffer = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 1);
|
||||
size_t cbCrPitch = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 1);
|
||||
|
||||
int bytesPerPixel = 4;
|
||||
uint8_t *rgbBuffer = malloc(width * height * bytesPerPixel);
|
||||
|
||||
for (int y = 0; y < height; y++) {
|
||||
uint8_t *rgbBufferLine = &rgbBuffer[y * width * bytesPerPixel];
|
||||
uint8_t *yBufferLine = &yBuffer[y * yPitch];
|
||||
uint8_t *cbCrBufferLine = &cbCrBuffer[(y >> 1) * cbCrPitch];
|
||||
|
||||
for (int x = 0; x < width; x++) {
|
||||
int16_t y = yBufferLine[x];
|
||||
int16_t cb = cbCrBufferLine[x & ~1] - 128;
|
||||
int16_t cr = cbCrBufferLine[x | 1] - 128;
|
||||
|
||||
uint8_t *rgbOutput = &rgbBufferLine[x * bytesPerPixel];
|
||||
|
||||
int16_t r = (int16_t)roundf(y + cr * 1.4);
|
||||
int16_t g = (int16_t)roundf(y + cb * -0.343 + cr * -0.711);
|
||||
int16_t b = (int16_t)roundf(y + cb * 1.765);
|
||||
|
||||
rgbOutput[0] = 0xff;
|
||||
rgbOutput[1] = clamp(b);
|
||||
rgbOutput[2] = clamp(g);
|
||||
rgbOutput[3] = clamp(r);
|
||||
}
|
||||
}
|
||||
|
||||
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
|
||||
CGContextRef context = CGBitmapContextCreate(rgbBuffer, width, height, 8, width * bytesPerPixel, colorSpace,
|
||||
kCGBitmapByteOrder32Little | kCGImageAlphaNoneSkipLast);
|
||||
CGImageRef quartzImage = CGBitmapContextCreateImage(context);
|
||||
|
||||
// TODO: Hardcoding UIImageOrientationRight seems cheesy
|
||||
UIImage *image = [UIImage imageWithCGImage:quartzImage scale:1.0 orientation:UIImageOrientationRight];
|
||||
|
||||
CGContextRelease(context);
|
||||
CGColorSpaceRelease(colorSpace);
|
||||
CGImageRelease(quartzImage);
|
||||
free(rgbBuffer);
|
||||
|
||||
CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
|
||||
|
||||
return image;
|
||||
}
|
||||
|
||||
@end
|
19
ManagedCapturer/SCStillImageCaptureVideoInputMethod.h
Normal file
19
ManagedCapturer/SCStillImageCaptureVideoInputMethod.h
Normal file
@ -0,0 +1,19 @@
|
||||
//
|
||||
// SCStillImageCaptureVideoInputMethod.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Alexander Grytsiuk on 3/16/16.
|
||||
// Copyright © 2016 Snapchat, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#import "SCManagedCapturerState.h"
|
||||
|
||||
#import <AVFoundation/AVFoundation.h>
|
||||
|
||||
@interface SCStillImageCaptureVideoInputMethod : NSObject
|
||||
|
||||
- (void)captureStillImageWithCapturerState:(SCManagedCapturerState *)state
|
||||
successBlock:(void (^)(NSData *imageData, NSDictionary *cameraInfo,
|
||||
NSError *error))successBlock
|
||||
failureBlock:(void (^)(NSError *error))failureBlock;
|
||||
@end
|
140
ManagedCapturer/SCStillImageCaptureVideoInputMethod.m
Normal file
140
ManagedCapturer/SCStillImageCaptureVideoInputMethod.m
Normal file
@ -0,0 +1,140 @@
|
||||
//
|
||||
// SCStillImageCaptureVideoInputMethod.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Alexander Grytsiuk on 3/16/16.
|
||||
// Copyright © 2016 Snapchat, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#import "SCStillImageCaptureVideoInputMethod.h"
|
||||
|
||||
#import "SCManagedCapturer.h"
|
||||
#import "SCManagedVideoFileStreamer.h"
|
||||
|
||||
typedef unsigned char uchar_t;
|
||||
int clamp(int val, int low, int high)
|
||||
{
|
||||
if (val < low)
|
||||
val = low;
|
||||
if (val > high)
|
||||
val = high;
|
||||
return val;
|
||||
}
|
||||
|
||||
void yuv2rgb(uchar_t yValue, uchar_t uValue, uchar_t vValue, uchar_t *r, uchar_t *g, uchar_t *b)
|
||||
{
|
||||
double red = yValue + (1.370705 * (vValue - 128));
|
||||
double green = yValue - (0.698001 * (vValue - 128)) - (0.337633 * (uValue - 128));
|
||||
double blue = yValue + (1.732446 * (uValue - 128));
|
||||
*r = clamp(red, 0, 255);
|
||||
*g = clamp(green, 0, 255);
|
||||
*b = clamp(blue, 0, 255);
|
||||
}
|
||||
|
||||
void convertNV21DataToRGBData(int width, int height, uchar_t *nv21Data, uchar_t *rgbData, int rgbBytesPerPixel,
|
||||
int rgbBytesPerRow)
|
||||
{
|
||||
uchar_t *uvData = nv21Data + height * width;
|
||||
for (int h = 0; h < height; h++) {
|
||||
uchar_t *yRowBegin = nv21Data + h * width;
|
||||
uchar_t *uvRowBegin = uvData + h / 2 * width;
|
||||
uchar_t *rgbRowBegin = rgbData + rgbBytesPerRow * h;
|
||||
for (int w = 0; w < width; w++) {
|
||||
uchar_t *rgbPixelBegin = rgbRowBegin + rgbBytesPerPixel * w;
|
||||
yuv2rgb(yRowBegin[w], uvRowBegin[w / 2 * 2], uvRowBegin[w / 2 * 2 + 1], &(rgbPixelBegin[0]),
|
||||
&(rgbPixelBegin[1]), &(rgbPixelBegin[2]));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@implementation SCStillImageCaptureVideoInputMethod
|
||||
|
||||
- (void)captureStillImageWithCapturerState:(SCManagedCapturerState *)state
|
||||
successBlock:(void (^)(NSData *imageData, NSDictionary *cameraInfo,
|
||||
NSError *error))successBlock
|
||||
failureBlock:(void (^)(NSError *error))failureBlock
|
||||
{
|
||||
id<SCManagedVideoDataSource> videoDataSource = [[SCManagedCapturer sharedInstance] currentVideoDataSource];
|
||||
if ([videoDataSource isKindOfClass:[SCManagedVideoFileStreamer class]]) {
|
||||
SCManagedVideoFileStreamer *videoFileStreamer = (SCManagedVideoFileStreamer *)videoDataSource;
|
||||
[videoFileStreamer getNextPixelBufferWithCompletion:^(CVPixelBufferRef pixelBuffer) {
|
||||
BOOL shouldFlip = state.devicePosition == SCManagedCaptureDevicePositionFront;
|
||||
#if TARGET_IPHONE_SIMULATOR
|
||||
UIImage *uiImage = [self imageWithCVPixelBuffer:pixelBuffer];
|
||||
CGImageRef videoImage = uiImage.CGImage;
|
||||
UIImage *capturedImage = [UIImage
|
||||
imageWithCGImage:shouldFlip ? [self flipCGImage:videoImage size:uiImage.size].CGImage : videoImage
|
||||
scale:1.0
|
||||
orientation:UIImageOrientationRight];
|
||||
#else
|
||||
CIImage *ciImage = [CIImage imageWithCVPixelBuffer:pixelBuffer];
|
||||
CIContext *temporaryContext = [CIContext contextWithOptions:nil];
|
||||
|
||||
CGSize size = CGSizeMake(CVPixelBufferGetWidth(pixelBuffer), CVPixelBufferGetHeight(pixelBuffer));
|
||||
CGImageRef videoImage =
|
||||
[temporaryContext createCGImage:ciImage fromRect:CGRectMake(0, 0, size.width, size.height)];
|
||||
|
||||
UIImage *capturedImage =
|
||||
[UIImage imageWithCGImage:shouldFlip ? [self flipCGImage:videoImage size:size].CGImage : videoImage
|
||||
scale:1.0
|
||||
orientation:UIImageOrientationRight];
|
||||
|
||||
CGImageRelease(videoImage);
|
||||
#endif
|
||||
if (successBlock) {
|
||||
successBlock(UIImageJPEGRepresentation(capturedImage, 1.0), nil, nil);
|
||||
}
|
||||
}];
|
||||
} else {
|
||||
if (failureBlock) {
|
||||
failureBlock([NSError errorWithDomain:NSStringFromClass(self.class) code:-1 userInfo:nil]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
- (UIImage *)flipCGImage:(CGImageRef)cgImage size:(CGSize)size
|
||||
{
|
||||
UIGraphicsBeginImageContext(size);
|
||||
CGContextDrawImage(UIGraphicsGetCurrentContext(), CGRectMake(0, 0, size.width, size.height), cgImage);
|
||||
UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
|
||||
UIGraphicsEndImageContext();
|
||||
return image;
|
||||
}
|
||||
|
||||
- (UIImage *)imageWithCVPixelBuffer:(CVPixelBufferRef)imageBuffer
|
||||
{
|
||||
CVPixelBufferLockBaseAddress(imageBuffer, 0);
|
||||
|
||||
size_t width = CVPixelBufferGetWidth(imageBuffer);
|
||||
size_t height = CVPixelBufferGetHeight(imageBuffer);
|
||||
size_t rgbBytesPerPixel = 4;
|
||||
size_t rgbBytesPerRow = width * rgbBytesPerPixel;
|
||||
|
||||
uchar_t *nv21Data = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);
|
||||
uchar_t *rgbData = malloc(rgbBytesPerRow * height);
|
||||
|
||||
convertNV21DataToRGBData((int)width, (int)height, nv21Data, rgbData, (int)rgbBytesPerPixel, (int)rgbBytesPerRow);
|
||||
|
||||
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
|
||||
CGContextRef context =
|
||||
CGBitmapContextCreate(rgbData, width, height, 8, rgbBytesPerRow, colorSpace, kCGImageAlphaNoneSkipLast);
|
||||
CGImageRef cgImage = CGBitmapContextCreateImage(context);
|
||||
|
||||
UIImage *result = [UIImage imageWithCGImage:cgImage];
|
||||
|
||||
CGImageRelease(cgImage);
|
||||
CGContextRelease(context);
|
||||
CGColorSpaceRelease(colorSpace);
|
||||
free(rgbData);
|
||||
|
||||
CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
- (NSString *)methodName
|
||||
{
|
||||
return @"VideoInput";
|
||||
}
|
||||
|
||||
@end
|
28
ManagedCapturer/SCTimedTask.h
Normal file
28
ManagedCapturer/SCTimedTask.h
Normal file
@ -0,0 +1,28 @@
|
||||
//
|
||||
// SCTimedTask.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Michel Loenngren on 4/2/17.
|
||||
// Copyright © 2017 Snapchat, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#import <AVFoundation/AVFoundation.h>
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
/*
|
||||
Block based timed task
|
||||
*/
|
||||
@interface SCTimedTask : NSObject
|
||||
|
||||
@property (nonatomic, assign) CMTime targetTime;
|
||||
@property (nonatomic, copy) void (^task)(CMTime relativePresentationTime, CGFloat sessionStartTimeDelayInSecond);
|
||||
|
||||
- (instancetype)init NS_UNAVAILABLE;
|
||||
|
||||
- (instancetype)initWithTargetTime:(CMTime)targetTime
|
||||
task:(void (^)(CMTime relativePresentationTime,
|
||||
CGFloat sessionStartTimeDelayInSecond))task;
|
||||
|
||||
- (NSString *)description;
|
||||
|
||||
@end
|
32
ManagedCapturer/SCTimedTask.m
Normal file
32
ManagedCapturer/SCTimedTask.m
Normal file
@ -0,0 +1,32 @@
|
||||
//
|
||||
// SCTimedTask.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Michel Loenngren on 4/2/17.
|
||||
// Copyright © 2017 Snapchat, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#import "SCTimedTask.h"
|
||||
|
||||
#import <SCFoundation/NSString+SCFormat.h>
|
||||
|
||||
@implementation SCTimedTask
|
||||
|
||||
- (instancetype)initWithTargetTime:(CMTime)targetTime
|
||||
task:
|
||||
(void (^)(CMTime relativePresentationTime, CGFloat sessionStartTimeDelayInSecond))task
|
||||
{
|
||||
if (self = [super init]) {
|
||||
_targetTime = targetTime;
|
||||
_task = task;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (NSString *)description
|
||||
{
|
||||
return [NSString
|
||||
sc_stringWithFormat:@"<%@: %p, targetTime: %lld>", NSStringFromClass([self class]), self, _targetTime.value];
|
||||
}
|
||||
|
||||
@end
|
83
ManagedCapturer/SCVideoCaptureSessionInfo.h
Normal file
83
ManagedCapturer/SCVideoCaptureSessionInfo.h
Normal file
@ -0,0 +1,83 @@
|
||||
//
|
||||
// SCVideoCaptureSessionInfo.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Michel Loenngren on 3/27/17.
|
||||
// Copyright © 2017 Snapchat, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#import <SCFoundation/NSString+SCFormat.h>
|
||||
|
||||
#import <CoreMedia/CoreMedia.h>
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
typedef NS_ENUM(NSInteger, SCManagedVideoCapturerInfoType) {
|
||||
SCManagedVideoCapturerInfoAudioQueueError,
|
||||
SCManagedVideoCapturerInfoAssetWriterError,
|
||||
SCManagedVideoCapturerInfoAudioSessionError,
|
||||
SCManagedVideoCapturerInfoAudioQueueRetrySuccess,
|
||||
SCManagedVideoCapturerInfoAudioQueueRetryDataSourceSuccess_audioQueue,
|
||||
SCManagedVideoCapturerInfoAudioQueueRetryDataSourceSuccess_hardware
|
||||
};
|
||||
|
||||
typedef u_int32_t sc_managed_capturer_recording_session_t;
|
||||
|
||||
/*
|
||||
Container object holding information about the
|
||||
current recording session.
|
||||
*/
|
||||
typedef struct {
|
||||
CMTime startTime;
|
||||
CMTime endTime;
|
||||
CMTime duration;
|
||||
sc_managed_capturer_recording_session_t sessionId;
|
||||
} SCVideoCaptureSessionInfo;
|
||||
|
||||
static inline SCVideoCaptureSessionInfo SCVideoCaptureSessionInfoMake(CMTime startTime, CMTime endTime,
|
||||
sc_managed_capturer_recording_session_t sessionId)
|
||||
{
|
||||
SCVideoCaptureSessionInfo session;
|
||||
session.startTime = startTime;
|
||||
session.endTime = endTime;
|
||||
if (CMTIME_IS_VALID(startTime) && CMTIME_IS_VALID(endTime)) {
|
||||
session.duration = CMTimeSubtract(endTime, startTime);
|
||||
} else {
|
||||
session.duration = kCMTimeInvalid;
|
||||
}
|
||||
session.sessionId = sessionId;
|
||||
return session;
|
||||
}
|
||||
|
||||
static inline NSTimeInterval SCVideoCaptureSessionInfoGetCurrentDuration(SCVideoCaptureSessionInfo sessionInfo)
|
||||
{
|
||||
if (CMTIME_IS_VALID(sessionInfo.startTime)) {
|
||||
if (CMTIME_IS_VALID(sessionInfo.endTime)) {
|
||||
return CMTimeGetSeconds(sessionInfo.duration);
|
||||
}
|
||||
return CACurrentMediaTime() - CMTimeGetSeconds(sessionInfo.startTime);
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
static inline NSString *SCVideoCaptureSessionInfoGetDebugString(CMTime time, NSString *label)
|
||||
{
|
||||
if (CMTIME_IS_VALID(time)) {
|
||||
return [NSString sc_stringWithFormat:@"%@: %f", label, CMTimeGetSeconds(time)];
|
||||
} else {
|
||||
return [NSString sc_stringWithFormat:@"%@: Invalid", label];
|
||||
}
|
||||
}
|
||||
|
||||
static inline NSString *SCVideoCaptureSessionInfoGetDebugDescription(SCVideoCaptureSessionInfo sessionInfo)
|
||||
{
|
||||
NSMutableString *description = [NSMutableString new];
|
||||
[description appendString:SCVideoCaptureSessionInfoGetDebugString(sessionInfo.startTime, @"StartTime")];
|
||||
[description appendString:@", "];
|
||||
[description appendString:SCVideoCaptureSessionInfoGetDebugString(sessionInfo.endTime, @"EndTime")];
|
||||
[description appendString:@", "];
|
||||
[description appendString:SCVideoCaptureSessionInfoGetDebugString(sessionInfo.duration, @"Duration")];
|
||||
[description appendString:@", "];
|
||||
[description appendString:[NSString sc_stringWithFormat:@"Id: %u", sessionInfo.sessionId]];
|
||||
|
||||
return [description copy];
|
||||
}
|
103
ManagedCapturer/StateMachine/SCCaptureBaseState.h
Normal file
103
ManagedCapturer/StateMachine/SCCaptureBaseState.h
Normal file
@ -0,0 +1,103 @@
|
||||
//
|
||||
// SCCaptureBaseState.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Lin Jia on 10/19/17.
|
||||
//
|
||||
//
|
||||
|
||||
#import "SCCaptureCommon.h"
|
||||
#import "SCCaptureStateDelegate.h"
|
||||
#import "SCCaptureStateMachineBookKeeper.h"
|
||||
#import "SCCaptureStateUtil.h"
|
||||
#import "SCCaptureWorker.h"
|
||||
#import "SCManagedCaptureDevice.h"
|
||||
#import "SCManagedCapturerState.h"
|
||||
#import "SCStateTransitionPayload.h"
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
@class SCCaptureResource;
|
||||
|
||||
@class SCCapturerToken;
|
||||
|
||||
@class SCAudioConfiguration;
|
||||
|
||||
@class SCQueuePerformer;
|
||||
/*
|
||||
Every state machine state needs to inherent SCCaptureBaseState to have the APIs. State machine state in general will
|
||||
only implement APIs which are legal for itself. If illegal APIs are invoked, SCCaptureBaseState will handle it.
|
||||
The intended behavior:
|
||||
1) crash using SCAssert in Debug build,
|
||||
2) ignore api call, and log the call, for alpha/master/production.
|
||||
3) in the future, we will introduce dangerous API call concept, and restart camera in such case, to avoid bad state.
|
||||
|
||||
Every state machine state is going to be built to follow functional programming as more as possible. The shared
|
||||
resources between them will be passed into the API via SCCaptureResource.
|
||||
*/
|
||||
|
||||
@interface SCCaptureBaseState : NSObject
|
||||
|
||||
- (instancetype)init NS_UNAVAILABLE;
|
||||
|
||||
- (instancetype)initWithPerformer:(SCQueuePerformer *)performer
|
||||
bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper
|
||||
delegate:(id<SCCaptureStateDelegate>)delegate;
|
||||
|
||||
/* The following API will be invoked at the moment state context promote the state to be current state. State use this
|
||||
* chance to do something, such as start recording for recording state.
|
||||
*/
|
||||
- (void)didBecomeCurrentState:(SCStateTransitionPayload *)payload
|
||||
resource:(SCCaptureResource *)resource
|
||||
context:(NSString *)context;
|
||||
|
||||
- (SCCaptureStateMachineStateId)stateId;
|
||||
|
||||
- (void)initializeCaptureWithDevicePosition:(SCManagedCaptureDevicePosition)devicePosition
|
||||
resource:(SCCaptureResource *)resource
|
||||
completionHandler:(dispatch_block_t)completionHandler
|
||||
context:(NSString *)context;
|
||||
|
||||
- (void)startRunningWithCapturerToken:(SCCapturerToken *)token
|
||||
resource:(SCCaptureResource *)resource
|
||||
completionHandler:(dispatch_block_t)completionHandler
|
||||
context:(NSString *)context;
|
||||
|
||||
- (void)stopRunningWithCapturerToken:(SCCapturerToken *)token
|
||||
resource:(SCCaptureResource *)resource
|
||||
completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler
|
||||
context:(NSString *)context;
|
||||
|
||||
- (void)prepareForRecordingWithResource:(SCCaptureResource *)resource
|
||||
audioConfiguration:(SCAudioConfiguration *)configuration
|
||||
context:(NSString *)context;
|
||||
|
||||
- (void)startRecordingWithResource:(SCCaptureResource *)resource
|
||||
audioConfiguration:(SCAudioConfiguration *)configuration
|
||||
outputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings
|
||||
maxDuration:(NSTimeInterval)maxDuration
|
||||
fileURL:(NSURL *)fileURL
|
||||
captureSessionID:(NSString *)captureSessionID
|
||||
completionHandler:(sc_managed_capturer_start_recording_completion_handler_t)completionHandler
|
||||
context:(NSString *)context;
|
||||
|
||||
- (void)stopRecordingWithResource:(SCCaptureResource *)resource context:(NSString *)context;
|
||||
|
||||
- (void)cancelRecordingWithResource:(SCCaptureResource *)resource context:(NSString *)context;
|
||||
|
||||
- (void)captureStillImageWithResource:(SCCaptureResource *)resource
|
||||
aspectRatio:(CGFloat)aspectRatio
|
||||
captureSessionID:(NSString *)captureSessionID
|
||||
completionHandler:(sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler
|
||||
context:(NSString *)context;
|
||||
|
||||
- (void)startScanWithScanConfiguration:(SCScanConfiguration *)configuration
|
||||
resource:(SCCaptureResource *)resource
|
||||
context:(NSString *)context;
|
||||
|
||||
- (void)stopScanWithCompletionHandler:(dispatch_block_t)completionHandler
|
||||
resource:(SCCaptureResource *)resource
|
||||
context:(NSString *)context;
|
||||
|
||||
@property (nonatomic, strong, readonly) SCCaptureStateMachineBookKeeper *bookKeeper;
|
||||
@end
|
169
ManagedCapturer/StateMachine/SCCaptureBaseState.m
Normal file
169
ManagedCapturer/StateMachine/SCCaptureBaseState.m
Normal file
@ -0,0 +1,169 @@
|
||||
//
|
||||
// SCCaptureBaseState.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Lin Jia on 10/19/17.
|
||||
//
|
||||
//
|
||||
|
||||
#import "SCCaptureBaseState.h"
|
||||
|
||||
#import "SCCaptureStateMachineBookKeeper.h"
|
||||
#import "SCCapturerToken.h"
|
||||
#import "SCManagedCapturerV1_Private.h"
|
||||
|
||||
#import <SCFoundation/SCAppEnvironment.h>
|
||||
#import <SCFoundation/SCAssertWrapper.h>
|
||||
#import <SCFoundation/SCQueuePerformer.h>
|
||||
|
||||
@implementation SCCaptureBaseState {
|
||||
SCCaptureStateMachineBookKeeper *_bookKeeper;
|
||||
SCQueuePerformer *_performer;
|
||||
__weak id<SCCaptureStateDelegate> _delegate;
|
||||
}
|
||||
|
||||
- (instancetype)initWithPerformer:(SCQueuePerformer *)performer
|
||||
bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper
|
||||
delegate:(id<SCCaptureStateDelegate>)delegate
|
||||
{
|
||||
self = [super init];
|
||||
if (self) {
|
||||
SCAssert(performer, @"");
|
||||
SCAssert(bookKeeper, @"");
|
||||
_bookKeeper = bookKeeper;
|
||||
_performer = performer;
|
||||
_delegate = delegate;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (SCCaptureStateMachineStateId)stateId
|
||||
{
|
||||
return SCCaptureBaseStateId;
|
||||
}
|
||||
|
||||
- (void)didBecomeCurrentState:(SCStateTransitionPayload *)payload
|
||||
resource:(SCCaptureResource *)resource
|
||||
context:(NSString *)context
|
||||
{
|
||||
[self _handleBaseStateBehavior:@"didBecomeCurrentState" context:context];
|
||||
}
|
||||
|
||||
- (void)initializeCaptureWithDevicePosition:(SCManagedCaptureDevicePosition)devicePosition
|
||||
resource:(SCCaptureResource *)resource
|
||||
completionHandler:(dispatch_block_t)completionHandler
|
||||
context:(NSString *)context
|
||||
{
|
||||
[self _handleBaseStateBehavior:@"initializeCaptureWithDevicePosition" context:context];
|
||||
}
|
||||
|
||||
- (void)startRunningWithCapturerToken:(SCCapturerToken *)token
|
||||
resource:(SCCaptureResource *)resource
|
||||
completionHandler:(dispatch_block_t)completionHandler
|
||||
context:(NSString *)context
|
||||
{
|
||||
[self _handleBaseStateBehavior:@"startRunningWithCapturerToken" context:context];
|
||||
}
|
||||
|
||||
- (void)stopRunningWithCapturerToken:(SCCapturerToken *)token
|
||||
resource:(SCCaptureResource *)resource
|
||||
completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler
|
||||
context:(NSString *)context
|
||||
{
|
||||
SCAssertPerformer(_performer);
|
||||
BOOL actuallyStopped = [[SCManagedCapturerV1 sharedInstance] stopRunningWithCaptureToken:token
|
||||
completionHandler:completionHandler
|
||||
context:context];
|
||||
// TODO: Fix CCAM-14450
|
||||
// This is a temporary solution for https://jira.sc-corp.net/browse/CCAM-14450
|
||||
// It is caused by switching from scanning state to stop running state when the view is disappearing in the scanning
|
||||
// state, which can be reproduced by triggering scanning and then switch to maps page.
|
||||
// We remove SCAssert to ingore the crashes in master branch and will find a solution for the illegal call for the
|
||||
// state machine later
|
||||
|
||||
if (self.stateId != SCCaptureScanningStateId) {
|
||||
SCAssert(!actuallyStopped, @"actuallyStopped in state: %@ with context: %@", SCCaptureStateName([self stateId]),
|
||||
context);
|
||||
} else {
|
||||
SCLogCaptureStateMachineInfo(@"actuallyStopped:%d in state: %@ with context: %@", actuallyStopped,
|
||||
SCCaptureStateName([self stateId]), context);
|
||||
}
|
||||
|
||||
if (actuallyStopped) {
|
||||
[_delegate currentState:self
|
||||
requestToTransferToNewState:SCCaptureInitializedStateId
|
||||
payload:nil
|
||||
context:context];
|
||||
}
|
||||
}
|
||||
|
||||
- (void)prepareForRecordingWithResource:(SCCaptureResource *)resource
|
||||
audioConfiguration:(SCAudioConfiguration *)configuration
|
||||
context:(NSString *)context
|
||||
{
|
||||
[self _handleBaseStateBehavior:@"prepareForRecordingWithResource" context:context];
|
||||
}
|
||||
|
||||
- (void)startRecordingWithResource:(SCCaptureResource *)resource
|
||||
audioConfiguration:(SCAudioConfiguration *)configuration
|
||||
outputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings
|
||||
maxDuration:(NSTimeInterval)maxDuration
|
||||
fileURL:(NSURL *)fileURL
|
||||
captureSessionID:(NSString *)captureSessionID
|
||||
completionHandler:(sc_managed_capturer_start_recording_completion_handler_t)completionHandler
|
||||
context:(NSString *)context
|
||||
{
|
||||
[self _handleBaseStateBehavior:@"startRecordingWithResource" context:context];
|
||||
}
|
||||
|
||||
- (void)stopRecordingWithResource:(SCCaptureResource *)resource context:(NSString *)context
|
||||
{
|
||||
[self _handleBaseStateBehavior:@"stopRecordingWithResource" context:context];
|
||||
}
|
||||
|
||||
- (void)cancelRecordingWithResource:(SCCaptureResource *)resource context:(NSString *)context
|
||||
{
|
||||
[self _handleBaseStateBehavior:@"cancelRecordingWithResource" context:context];
|
||||
}
|
||||
|
||||
- (void)captureStillImageWithResource:(SCCaptureResource *)resource
|
||||
aspectRatio:(CGFloat)aspectRatio
|
||||
captureSessionID:(NSString *)captureSessionID
|
||||
completionHandler:(sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler
|
||||
context:(NSString *)context
|
||||
{
|
||||
[self _handleBaseStateBehavior:@"captureStillImageWithResource" context:context];
|
||||
}
|
||||
|
||||
- (void)startScanWithScanConfiguration:(SCScanConfiguration *)configuration
|
||||
resource:(SCCaptureResource *)resource
|
||||
context:(NSString *)context
|
||||
{
|
||||
[self _handleBaseStateBehavior:@"startScanWithScanConfiguration" context:context];
|
||||
}
|
||||
|
||||
- (void)stopScanWithCompletionHandler:(dispatch_block_t)completionHandler
|
||||
resource:(SCCaptureResource *)resource
|
||||
context:(NSString *)context
|
||||
{
|
||||
// Temporary solution until IDT-12520 is resolved.
|
||||
[SCCaptureWorker stopScanWithCompletionHandler:completionHandler resource:resource];
|
||||
//[self _handleBaseStateBehavior:@"stopScanWithCompletionHandler"];
|
||||
}
|
||||
|
||||
- (void)_handleBaseStateBehavior:(NSString *)illegalAPIName context:(NSString *)context
|
||||
{
|
||||
[_bookKeeper state:[self stateId]
|
||||
illegalAPIcalled:illegalAPIName
|
||||
callStack:[NSThread callStackSymbols]
|
||||
context:context];
|
||||
if (SCIsDebugBuild()) {
|
||||
SCAssertFail(@"illegal API invoked on capture state machine");
|
||||
}
|
||||
}
|
||||
|
||||
- (SCCaptureStateMachineBookKeeper *)bookKeeper
|
||||
{
|
||||
return _bookKeeper;
|
||||
}
|
||||
@end
|
30
ManagedCapturer/StateMachine/SCCaptureStateDelegate.h
Normal file
30
ManagedCapturer/StateMachine/SCCaptureStateDelegate.h
Normal file
@ -0,0 +1,30 @@
|
||||
//
|
||||
// SCCaptureStateDelegate.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Lin Jia on 10/27/17.
|
||||
//
|
||||
//
|
||||
|
||||
#import "SCCaptureStateUtil.h"
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
@class SCCaptureBaseState;
|
||||
@class SCStateTransitionPayload;
|
||||
/*
|
||||
The state machine state delegate is used by state machine states to hint to the system that "I am done, now transfer
|
||||
to other state".
|
||||
|
||||
Currently, SCCaptureStateMachineContext is the central piece that glues all states together, and it is the delegate for
|
||||
those states.
|
||||
*/
|
||||
|
||||
@protocol SCCaptureStateDelegate <NSObject>
|
||||
|
||||
- (void)currentState:(SCCaptureBaseState *)state
|
||||
requestToTransferToNewState:(SCCaptureStateMachineStateId)newState
|
||||
payload:(SCStateTransitionPayload *)payload
|
||||
context:(NSString *)context;
|
||||
|
||||
@end
|
@ -0,0 +1,29 @@
|
||||
//
|
||||
// SCCaptureStateTransitionBookKeeper.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Lin Jia on 10/27/17.
|
||||
//
|
||||
//
|
||||
|
||||
#import "SCCaptureStateUtil.h"
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
/*
|
||||
Book keeper is used to record every state transition, and every illegal API call.
|
||||
*/
|
||||
|
||||
@interface SCCaptureStateMachineBookKeeper : NSObject
|
||||
|
||||
- (void)stateTransitionFrom:(SCCaptureStateMachineStateId)fromId
|
||||
to:(SCCaptureStateMachineStateId)toId
|
||||
context:(NSString *)context;
|
||||
|
||||
- (void)state:(SCCaptureStateMachineStateId)captureState
|
||||
illegalAPIcalled:(NSString *)illegalAPIName
|
||||
callStack:(NSArray<NSString *> *)callStack
|
||||
context:(NSString *)context;
|
||||
|
||||
- (void)logAPICalled:(NSString *)apiName context:(NSString *)context;
|
||||
@end
|
@ -0,0 +1,63 @@
|
||||
//
|
||||
// SCCaptureStateTransitionBookKeeper.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Lin Jia on 10/27/17.
|
||||
//
|
||||
//
|
||||
|
||||
#import "SCCaptureStateMachineBookKeeper.h"
|
||||
|
||||
#import "SCCaptureStateUtil.h"
|
||||
#import "SCLogger+Camera.h"
|
||||
|
||||
#import <SCFoundation/SCAssertWrapper.h>
|
||||
#import <SCLogger/SCCameraMetrics.h>
|
||||
|
||||
@interface SCCaptureStateMachineBookKeeper () {
|
||||
NSDate *_lastStateStartTime;
|
||||
}
|
||||
@end
|
||||
|
||||
@implementation SCCaptureStateMachineBookKeeper
|
||||
|
||||
- (void)stateTransitionFrom:(SCCaptureStateMachineStateId)fromId
|
||||
to:(SCCaptureStateMachineStateId)toId
|
||||
context:(NSString *)context
|
||||
{
|
||||
NSDate *date = [NSDate date];
|
||||
SCLogCaptureStateMachineInfo(@"State %@ life span: %f seconds, transition to: %@, in context:%@, at: %@ \n",
|
||||
SCCaptureStateName(fromId), [date timeIntervalSinceDate:_lastStateStartTime],
|
||||
SCCaptureStateName(toId), context, date);
|
||||
_lastStateStartTime = date;
|
||||
}
|
||||
|
||||
- (void)state:(SCCaptureStateMachineStateId)captureState
|
||||
illegalAPIcalled:(NSString *)illegalAPIName
|
||||
callStack:(NSArray<NSString *> *)callStack
|
||||
context:(NSString *)context
|
||||
|
||||
{
|
||||
SCAssert(callStack, @"call stack empty");
|
||||
SCAssert(illegalAPIName, @"");
|
||||
SCAssert(context, @"Context is empty");
|
||||
SCLogCaptureStateMachineError(@"State: %@, illegal API invoke: %@, at: %@, callstack: %@ \n",
|
||||
SCCaptureStateName(captureState), illegalAPIName, [NSDate date], callStack);
|
||||
NSArray<NSString *> *reportedArray =
|
||||
[callStack count] > 15 ? [callStack subarrayWithRange:NSMakeRange(0, 15)] : callStack;
|
||||
[[SCLogger sharedInstance] logEvent:kSCCameraStateMachineIllegalAPICall
|
||||
parameters:@{
|
||||
@"state" : SCCaptureStateName(captureState),
|
||||
@"API" : illegalAPIName,
|
||||
@"call_stack" : reportedArray,
|
||||
@"context" : context
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)logAPICalled:(NSString *)apiName context:(NSString *)context
|
||||
{
|
||||
SCAssert(apiName, @"API name is empty");
|
||||
SCAssert(context, @"Context is empty");
|
||||
SCLogCaptureStateMachineInfo(@"api: %@ context: %@", apiName, context);
|
||||
}
|
||||
@end
|
76
ManagedCapturer/StateMachine/SCCaptureStateMachineContext.h
Normal file
76
ManagedCapturer/StateMachine/SCCaptureStateMachineContext.h
Normal file
@ -0,0 +1,76 @@
|
||||
//
|
||||
// SCCaptureStateMachineContext.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Lin Jia on 10/18/17.
|
||||
//
|
||||
//
|
||||
|
||||
#import "SCCaptureCommon.h"
|
||||
#import "SCManagedCaptureDevice.h"
|
||||
|
||||
#import <SCAudio/SCAudioConfiguration.h>
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
/*
|
||||
SCCaptureStateMachineContext is the central piece that glues all states together.
|
||||
|
||||
It will pass API calls to the current state.
|
||||
|
||||
The classic state machine design pattern:
|
||||
https://en.wikipedia.org/wiki/State_pattern
|
||||
|
||||
It is also the delegate for the states it manages, so that those states can tell stateMachineContext to transit to next
|
||||
state.
|
||||
*/
|
||||
|
||||
@class SCCaptureResource;
|
||||
|
||||
@class SCCapturerToken;
|
||||
|
||||
@interface SCCaptureStateMachineContext : NSObject
|
||||
|
||||
- (instancetype)initWithResource:(SCCaptureResource *)resource;
|
||||
|
||||
- (void)initializeCaptureWithDevicePositionAsynchronously:(SCManagedCaptureDevicePosition)devicePosition
|
||||
completionHandler:(dispatch_block_t)completionHandler
|
||||
context:(NSString *)context;
|
||||
|
||||
- (SCCapturerToken *)startRunningWithContext:(NSString *)context completionHandler:(dispatch_block_t)completionHandler;
|
||||
|
||||
- (void)stopRunningWithCapturerToken:(SCCapturerToken *)token
|
||||
completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler
|
||||
context:(NSString *)context;
|
||||
|
||||
- (void)stopRunningWithCapturerToken:(SCCapturerToken *)token
|
||||
after:(NSTimeInterval)delay
|
||||
completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler
|
||||
context:(NSString *)context;
|
||||
|
||||
- (void)prepareForRecordingAsynchronouslyWithAudioConfiguration:(SCAudioConfiguration *)configuration
|
||||
context:(NSString *)context;
|
||||
|
||||
- (void)startRecordingWithOutputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings
|
||||
audioConfiguration:(SCAudioConfiguration *)configuration
|
||||
maxDuration:(NSTimeInterval)maxDuration
|
||||
fileURL:(NSURL *)fileURL
|
||||
captureSessionID:(NSString *)captureSessionID
|
||||
completionHandler:(sc_managed_capturer_start_recording_completion_handler_t)completionHandler
|
||||
context:(NSString *)context;
|
||||
|
||||
- (void)stopRecordingWithContext:(NSString *)context;
|
||||
|
||||
- (void)cancelRecordingWithContext:(NSString *)context;
|
||||
|
||||
- (void)captureStillImageAsynchronouslyWithAspectRatio:(CGFloat)aspectRatio
|
||||
captureSessionID:(NSString *)captureSessionID
|
||||
completionHandler:
|
||||
(sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler
|
||||
context:(NSString *)context;
|
||||
|
||||
#pragma mark - Scanning
|
||||
- (void)startScanAsynchronouslyWithScanConfiguration:(SCScanConfiguration *)configuration context:(NSString *)context;
|
||||
- (void)stopScanAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler context:(NSString *)context;
|
||||
|
||||
@end
|
301
ManagedCapturer/StateMachine/SCCaptureStateMachineContext.m
Normal file
301
ManagedCapturer/StateMachine/SCCaptureStateMachineContext.m
Normal file
@ -0,0 +1,301 @@
|
||||
//
|
||||
// SCCaptureStateMachineContext.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Lin Jia on 10/18/17.
|
||||
//
|
||||
//
|
||||
|
||||
#import "SCCaptureStateMachineContext.h"
|
||||
|
||||
#import "SCCaptureBaseState.h"
|
||||
#import "SCCaptureImageState.h"
|
||||
#import "SCCaptureImageWhileRecordingState.h"
|
||||
#import "SCCaptureInitializedState.h"
|
||||
#import "SCCaptureRecordingState.h"
|
||||
#import "SCCaptureResource.h"
|
||||
#import "SCCaptureRunningState.h"
|
||||
#import "SCCaptureScanningState.h"
|
||||
#import "SCCaptureStateMachineBookKeeper.h"
|
||||
#import "SCCaptureStateUtil.h"
|
||||
#import "SCCaptureUninitializedState.h"
|
||||
#import "SCCaptureWorker.h"
|
||||
#import "SCCapturerToken.h"
|
||||
#import "SCStateTransitionPayload.h"
|
||||
|
||||
#import <SCAudio/SCAudioConfiguration.h>
|
||||
#import <SCFoundation/SCAssertWrapper.h>
|
||||
#import <SCFoundation/SCQueuePerformer.h>
|
||||
#import <SCFoundation/SCTrace.h>
|
||||
#import <SCLogger/SCCameraMetrics.h>
|
||||
#import <SCLogger/SCLogger+Performance.h>
|
||||
|
||||
@interface SCCaptureStateMachineContext () <SCCaptureStateDelegate> {
|
||||
SCQueuePerformer *_queuePerformer;
|
||||
|
||||
// Cache all the states.
|
||||
NSMutableDictionary<SCCaptureStateKey *, SCCaptureBaseState *> *_states;
|
||||
SCCaptureBaseState *_currentState;
|
||||
SCCaptureStateMachineBookKeeper *_bookKeeper;
|
||||
SCCaptureResource *_captureResource;
|
||||
}
|
||||
@end
|
||||
|
||||
@implementation SCCaptureStateMachineContext
|
||||
|
||||
- (instancetype)initWithResource:(SCCaptureResource *)resource
|
||||
{
|
||||
self = [super init];
|
||||
if (self) {
|
||||
SCAssert(resource, @"");
|
||||
SCAssert(resource.queuePerformer, @"");
|
||||
_captureResource = resource;
|
||||
_queuePerformer = resource.queuePerformer;
|
||||
_states = [[NSMutableDictionary<SCCaptureStateKey *, SCCaptureBaseState *> alloc] init];
|
||||
_bookKeeper = [[SCCaptureStateMachineBookKeeper alloc] init];
|
||||
[self _setCurrentState:SCCaptureUninitializedStateId payload:nil context:SCCapturerContext];
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)_setCurrentState:(SCCaptureStateMachineStateId)stateId
|
||||
payload:(SCStateTransitionPayload *)payload
|
||||
context:(NSString *)context
|
||||
{
|
||||
switch (stateId) {
|
||||
case SCCaptureUninitializedStateId:
|
||||
if (![_states objectForKey:@(stateId)]) {
|
||||
SCCaptureUninitializedState *uninitializedState =
|
||||
[[SCCaptureUninitializedState alloc] initWithPerformer:_queuePerformer
|
||||
bookKeeper:_bookKeeper
|
||||
delegate:self];
|
||||
[_states setObject:uninitializedState forKey:@(stateId)];
|
||||
}
|
||||
_currentState = [_states objectForKey:@(stateId)];
|
||||
break;
|
||||
case SCCaptureInitializedStateId:
|
||||
if (![_states objectForKey:@(stateId)]) {
|
||||
SCCaptureInitializedState *initializedState =
|
||||
[[SCCaptureInitializedState alloc] initWithPerformer:_queuePerformer
|
||||
bookKeeper:_bookKeeper
|
||||
delegate:self];
|
||||
[_states setObject:initializedState forKey:@(stateId)];
|
||||
}
|
||||
_currentState = [_states objectForKey:@(stateId)];
|
||||
break;
|
||||
case SCCaptureRunningStateId:
|
||||
if (![_states objectForKey:@(stateId)]) {
|
||||
SCCaptureRunningState *runningState =
|
||||
[[SCCaptureRunningState alloc] initWithPerformer:_queuePerformer bookKeeper:_bookKeeper delegate:self];
|
||||
[_states setObject:runningState forKey:@(stateId)];
|
||||
}
|
||||
_currentState = [_states objectForKey:@(stateId)];
|
||||
break;
|
||||
case SCCaptureImageStateId:
|
||||
if (![_states objectForKey:@(stateId)]) {
|
||||
SCCaptureImageState *captureImageState =
|
||||
[[SCCaptureImageState alloc] initWithPerformer:_queuePerformer bookKeeper:_bookKeeper delegate:self];
|
||||
[_states setObject:captureImageState forKey:@(stateId)];
|
||||
}
|
||||
_currentState = [_states objectForKey:@(stateId)];
|
||||
break;
|
||||
case SCCaptureImageWhileRecordingStateId:
|
||||
if (![_states objectForKey:@(stateId)]) {
|
||||
SCCaptureImageWhileRecordingState *captureImageWhileRecordingState =
|
||||
[[SCCaptureImageWhileRecordingState alloc] initWithPerformer:_queuePerformer
|
||||
bookKeeper:_bookKeeper
|
||||
delegate:self];
|
||||
[_states setObject:captureImageWhileRecordingState forKey:@(stateId)];
|
||||
}
|
||||
_currentState = [_states objectForKey:@(stateId)];
|
||||
break;
|
||||
case SCCaptureScanningStateId:
|
||||
if (![_states objectForKey:@(stateId)]) {
|
||||
SCCaptureScanningState *scanningState =
|
||||
[[SCCaptureScanningState alloc] initWithPerformer:_queuePerformer bookKeeper:_bookKeeper delegate:self];
|
||||
[_states setObject:scanningState forKey:@(stateId)];
|
||||
}
|
||||
_currentState = [_states objectForKey:@(stateId)];
|
||||
break;
|
||||
case SCCaptureRecordingStateId:
|
||||
if (![_states objectForKey:@(stateId)]) {
|
||||
SCCaptureRecordingState *recordingState = [[SCCaptureRecordingState alloc] initWithPerformer:_queuePerformer
|
||||
bookKeeper:_bookKeeper
|
||||
delegate:self];
|
||||
[_states setObject:recordingState forKey:@(stateId)];
|
||||
}
|
||||
_currentState = [_states objectForKey:@(stateId)];
|
||||
break;
|
||||
default:
|
||||
SCAssert(NO, @"illigal state Id");
|
||||
break;
|
||||
}
|
||||
[_currentState didBecomeCurrentState:payload resource:_captureResource context:context];
|
||||
}
|
||||
|
||||
- (void)initializeCaptureWithDevicePositionAsynchronously:(SCManagedCaptureDevicePosition)devicePosition
|
||||
completionHandler:(dispatch_block_t)completionHandler
|
||||
context:(NSString *)context
|
||||
{
|
||||
[SCCaptureWorker setupCapturePreviewLayerController];
|
||||
|
||||
SCTraceResumeToken resumeToken = SCTraceCapture();
|
||||
[_queuePerformer perform:^{
|
||||
SCTraceResume(resumeToken);
|
||||
[_currentState initializeCaptureWithDevicePosition:devicePosition
|
||||
resource:_captureResource
|
||||
completionHandler:completionHandler
|
||||
context:context];
|
||||
}];
|
||||
}
|
||||
|
||||
- (SCCapturerToken *)startRunningWithContext:(NSString *)context completionHandler:(dispatch_block_t)completionHandler
|
||||
{
|
||||
[[SCLogger sharedInstance] updateLogTimedEventStart:kSCCameraMetricsOpen uniqueId:@""];
|
||||
|
||||
SCCapturerToken *token = [[SCCapturerToken alloc] initWithIdentifier:context];
|
||||
SCTraceResumeToken resumeToken = SCTraceCapture();
|
||||
[_queuePerformer perform:^{
|
||||
SCTraceResume(resumeToken);
|
||||
[_currentState startRunningWithCapturerToken:token
|
||||
resource:_captureResource
|
||||
completionHandler:completionHandler
|
||||
context:context];
|
||||
}];
|
||||
|
||||
return token;
|
||||
}
|
||||
|
||||
- (void)stopRunningWithCapturerToken:(SCCapturerToken *)token
|
||||
completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler
|
||||
context:(NSString *)context
|
||||
{
|
||||
SCTraceResumeToken resumeToken = SCTraceCapture();
|
||||
[_queuePerformer perform:^{
|
||||
SCTraceResume(resumeToken);
|
||||
[_currentState stopRunningWithCapturerToken:token
|
||||
resource:_captureResource
|
||||
completionHandler:completionHandler
|
||||
context:context];
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)stopRunningWithCapturerToken:(SCCapturerToken *)token
|
||||
after:(NSTimeInterval)delay
|
||||
completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler
|
||||
context:(NSString *)context
|
||||
{
|
||||
SCTraceResumeToken resumeToken = SCTraceCapture();
|
||||
[_queuePerformer perform:^{
|
||||
SCTraceResume(resumeToken);
|
||||
[_currentState stopRunningWithCapturerToken:token
|
||||
resource:_captureResource
|
||||
completionHandler:completionHandler
|
||||
context:context];
|
||||
}
|
||||
after:delay];
|
||||
}
|
||||
|
||||
- (void)prepareForRecordingAsynchronouslyWithAudioConfiguration:(SCAudioConfiguration *)configuration
|
||||
context:(NSString *)context
|
||||
{
|
||||
SCTraceResumeToken resumeToken = SCTraceCapture();
|
||||
[_queuePerformer perform:^{
|
||||
SCTraceResume(resumeToken);
|
||||
[_currentState prepareForRecordingWithResource:_captureResource
|
||||
audioConfiguration:configuration
|
||||
context:context];
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)startRecordingWithOutputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings
|
||||
audioConfiguration:(SCAudioConfiguration *)configuration
|
||||
maxDuration:(NSTimeInterval)maxDuration
|
||||
fileURL:(NSURL *)fileURL
|
||||
captureSessionID:(NSString *)captureSessionID
|
||||
completionHandler:(sc_managed_capturer_start_recording_completion_handler_t)completionHandler
|
||||
context:(NSString *)context
|
||||
{
|
||||
SCTraceResumeToken resumeToken = SCTraceCapture();
|
||||
[_queuePerformer perform:^{
|
||||
SCTraceResume(resumeToken);
|
||||
[_currentState startRecordingWithResource:_captureResource
|
||||
audioConfiguration:configuration
|
||||
outputSettings:outputSettings
|
||||
maxDuration:maxDuration
|
||||
fileURL:fileURL
|
||||
captureSessionID:captureSessionID
|
||||
completionHandler:completionHandler
|
||||
context:context];
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)stopRecordingWithContext:(NSString *)context
|
||||
{
|
||||
SCTraceResumeToken resumeToken = SCTraceCapture();
|
||||
[_queuePerformer perform:^{
|
||||
SCTraceResume(resumeToken);
|
||||
[_currentState stopRecordingWithResource:_captureResource context:context];
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)cancelRecordingWithContext:(NSString *)context
|
||||
{
|
||||
SCTraceResumeToken resumeToken = SCTraceCapture();
|
||||
[_queuePerformer perform:^{
|
||||
SCTraceResume(resumeToken);
|
||||
[_currentState cancelRecordingWithResource:_captureResource context:context];
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)captureStillImageAsynchronouslyWithAspectRatio:(CGFloat)aspectRatio
|
||||
captureSessionID:(NSString *)captureSessionID
|
||||
completionHandler:
|
||||
(sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler
|
||||
context:(NSString *)context
|
||||
{
|
||||
[_queuePerformer perform:^() {
|
||||
[_currentState captureStillImageWithResource:_captureResource
|
||||
aspectRatio:aspectRatio
|
||||
captureSessionID:captureSessionID
|
||||
completionHandler:completionHandler
|
||||
context:context];
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)startScanAsynchronouslyWithScanConfiguration:(SCScanConfiguration *)configuration context:(NSString *)context
|
||||
{
|
||||
[_queuePerformer perform:^() {
|
||||
[_currentState startScanWithScanConfiguration:configuration resource:_captureResource context:context];
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)stopScanAsynchronouslyWithCompletionHandler:(dispatch_block_t)completionHandler context:(NSString *)context
|
||||
{
|
||||
[_queuePerformer perform:^() {
|
||||
[_currentState stopScanWithCompletionHandler:completionHandler resource:_captureResource context:context];
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)currentState:(SCCaptureBaseState *)state
|
||||
requestToTransferToNewState:(SCCaptureStateMachineStateId)newState
|
||||
payload:(SCStateTransitionPayload *)payload
|
||||
context:(NSString *)context
|
||||
{
|
||||
SCAssertPerformer(_queuePerformer);
|
||||
SCAssert(_currentState == state, @"state: %@ newState: %@ context:%@", SCCaptureStateName([state stateId]),
|
||||
SCCaptureStateName(newState), context);
|
||||
if (payload) {
|
||||
SCAssert(payload.fromState == [state stateId], @"From state id check");
|
||||
SCAssert(payload.toState == newState, @"To state id check");
|
||||
}
|
||||
|
||||
if (_currentState != state) {
|
||||
return;
|
||||
}
|
||||
|
||||
[_bookKeeper stateTransitionFrom:[state stateId] to:newState context:context];
|
||||
[self _setCurrentState:newState payload:payload context:context];
|
||||
}
|
||||
|
||||
@end
|
37
ManagedCapturer/StateMachine/SCCaptureStateUtil.h
Normal file
37
ManagedCapturer/StateMachine/SCCaptureStateUtil.h
Normal file
@ -0,0 +1,37 @@
|
||||
//
|
||||
// SCCaptureStateUtil.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Lin Jia on 10/27/17.
|
||||
//
|
||||
//
|
||||
|
||||
#import "SCLogger+Camera.h"
|
||||
|
||||
#import <SCBase/SCMacros.h>
|
||||
#import <SCFoundation/SCLog.h>
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
#define SCLogCaptureStateMachineInfo(fmt, ...) SCLogCoreCameraInfo(@"[SCCaptureStateMachine] " fmt, ##__VA_ARGS__)
|
||||
#define SCLogCaptureStateMachineError(fmt, ...) SCLogCoreCameraError(@"[SCCaptureStateMachine] " fmt, ##__VA_ARGS__)
|
||||
|
||||
typedef NSNumber SCCaptureStateKey;
|
||||
|
||||
typedef NS_ENUM(NSUInteger, SCCaptureStateMachineStateId) {
|
||||
SCCaptureBaseStateId = 0,
|
||||
SCCaptureUninitializedStateId,
|
||||
SCCaptureInitializedStateId,
|
||||
SCCaptureImageStateId,
|
||||
SCCaptureImageWhileRecordingStateId,
|
||||
SCCaptureRunningStateId,
|
||||
SCCaptureRecordingStateId,
|
||||
SCCaptureScanningStateId,
|
||||
SCCaptureStateMachineStateIdCount
|
||||
};
|
||||
|
||||
SC_EXTERN_C_BEGIN
|
||||
|
||||
NSString *SCCaptureStateName(SCCaptureStateMachineStateId stateId);
|
||||
|
||||
SC_EXTERN_C_END
|
38
ManagedCapturer/StateMachine/SCCaptureStateUtil.m
Normal file
38
ManagedCapturer/StateMachine/SCCaptureStateUtil.m
Normal file
@ -0,0 +1,38 @@
|
||||
//
|
||||
// SCCaptureStateUtil.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Lin Jia on 10/27/17.
|
||||
//
|
||||
//
|
||||
|
||||
#import "SCCaptureStateUtil.h"
|
||||
|
||||
#import <SCFoundation/SCAppEnvironment.h>
|
||||
#import <SCFoundation/SCAssertWrapper.h>
|
||||
|
||||
NSString *SCCaptureStateName(SCCaptureStateMachineStateId stateId)
|
||||
{
|
||||
switch (stateId) {
|
||||
case SCCaptureBaseStateId:
|
||||
return @"SCCaptureBaseStateId";
|
||||
case SCCaptureUninitializedStateId:
|
||||
return @"SCCaptureUninitializedStateId";
|
||||
case SCCaptureInitializedStateId:
|
||||
return @"SCCaptureInitializedStateId";
|
||||
case SCCaptureImageStateId:
|
||||
return @"SCCaptureImageStateId";
|
||||
case SCCaptureImageWhileRecordingStateId:
|
||||
return @"SCCaptureImageWhileRecordingStateId";
|
||||
case SCCaptureRunningStateId:
|
||||
return @"SCCaptureRunningStateId";
|
||||
case SCCaptureRecordingStateId:
|
||||
return @"SCCaptureRecordingStateId";
|
||||
case SCCaptureScanningStateId:
|
||||
return @"SCCaptureScanningStateId";
|
||||
default:
|
||||
SCCAssert(NO, @"illegate state id");
|
||||
break;
|
||||
}
|
||||
return @"SCIllegalStateId";
|
||||
}
|
12
ManagedCapturer/StateMachine/SCManagedCapturerLogging.h
Normal file
12
ManagedCapturer/StateMachine/SCManagedCapturerLogging.h
Normal file
@ -0,0 +1,12 @@
|
||||
//
|
||||
// SCManagedCapturerLogging.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Lin Jia on 11/13/17.
|
||||
//
|
||||
|
||||
#import <SCFoundation/SCLog.h>
|
||||
|
||||
#define SCLogCapturerInfo(fmt, ...) SCLogCoreCameraInfo(@"[SCManagedCapturer] " fmt, ##__VA_ARGS__)
|
||||
#define SCLogCapturerWarning(fmt, ...) SCLogCoreCameraWarning(@"[SCManagedCapturer] " fmt, ##__VA_ARGS__)
|
||||
#define SCLogCapturerError(fmt, ...) SCLogCoreCameraError(@"[SCManagedCapturer] " fmt, ##__VA_ARGS__)
|
22
ManagedCapturer/StateMachine/States/SCCaptureImageState.h
Normal file
22
ManagedCapturer/StateMachine/States/SCCaptureImageState.h
Normal file
@ -0,0 +1,22 @@
|
||||
//
|
||||
// SCCaptureImageState.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Lin Jia on 1/8/18.
|
||||
//
|
||||
|
||||
#import "SCCaptureBaseState.h"
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
@class SCQueuePerformer;
|
||||
|
||||
@interface SCCaptureImageState : SCCaptureBaseState
|
||||
|
||||
SC_INIT_AND_NEW_UNAVAILABLE
|
||||
|
||||
- (instancetype)initWithPerformer:(SCQueuePerformer *)performer
|
||||
bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper
|
||||
delegate:(id<SCCaptureStateDelegate>)delegate;
|
||||
|
||||
@end
|
65
ManagedCapturer/StateMachine/States/SCCaptureImageState.m
Normal file
65
ManagedCapturer/StateMachine/States/SCCaptureImageState.m
Normal file
@ -0,0 +1,65 @@
|
||||
//
|
||||
// SCCaptureImageState.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Lin Jia on 1/8/18.
|
||||
//
|
||||
|
||||
#import "SCCaptureImageState.h"
|
||||
|
||||
#import "SCCaptureImageStateTransitionPayload.h"
|
||||
#import "SCManagedCapturerV1_Private.h"
|
||||
#import "SCStateTransitionPayload.h"
|
||||
|
||||
#import <SCFoundation/SCAssertWrapper.h>
|
||||
#import <SCFoundation/SCQueuePerformer.h>
|
||||
|
||||
@interface SCCaptureImageState () {
|
||||
__weak id<SCCaptureStateDelegate> _delegate;
|
||||
SCQueuePerformer *_performer;
|
||||
}
|
||||
@end
|
||||
|
||||
@implementation SCCaptureImageState
|
||||
|
||||
- (instancetype)initWithPerformer:(SCQueuePerformer *)performer
|
||||
bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper
|
||||
delegate:(id<SCCaptureStateDelegate>)delegate
|
||||
{
|
||||
self = [super initWithPerformer:performer bookKeeper:bookKeeper delegate:delegate];
|
||||
if (self) {
|
||||
_delegate = delegate;
|
||||
_performer = performer;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)didBecomeCurrentState:(SCStateTransitionPayload *)payload
|
||||
resource:(SCCaptureResource *)resource
|
||||
context:(NSString *)context
|
||||
{
|
||||
SCAssertPerformer(_performer);
|
||||
SCAssert(payload.toState == [self stateId], @"");
|
||||
if (![payload isKindOfClass:[SCCaptureImageStateTransitionPayload class]]) {
|
||||
SCAssertFail(@"wrong payload pass in");
|
||||
[_delegate currentState:self requestToTransferToNewState:payload.fromState payload:nil context:context];
|
||||
return;
|
||||
}
|
||||
SCCaptureImageStateTransitionPayload *captureImagePayload = (SCCaptureImageStateTransitionPayload *)payload;
|
||||
|
||||
[SCCaptureWorker
|
||||
captureStillImageWithCaptureResource:resource
|
||||
aspectRatio:captureImagePayload.aspectRatio
|
||||
captureSessionID:captureImagePayload.captureSessionID
|
||||
shouldCaptureFromVideo:[SCCaptureWorker shouldCaptureImageFromVideoWithResource:resource]
|
||||
completionHandler:captureImagePayload.block
|
||||
context:context];
|
||||
|
||||
[_delegate currentState:self requestToTransferToNewState:SCCaptureRunningStateId payload:nil context:context];
|
||||
}
|
||||
|
||||
- (SCCaptureStateMachineStateId)stateId
|
||||
{
|
||||
return SCCaptureImageStateId;
|
||||
}
|
||||
@end
|
@ -0,0 +1,29 @@
|
||||
//
|
||||
// SCCaptureImageStateTransitionPayload.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Lin Jia on 1/9/18.
|
||||
//
|
||||
|
||||
#import "SCCaptureCommon.h"
|
||||
#import "SCStateTransitionPayload.h"
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
@interface SCCaptureImageStateTransitionPayload : SCStateTransitionPayload
|
||||
|
||||
@property (nonatomic, readonly, strong) NSString *captureSessionID;
|
||||
|
||||
@property (nonatomic, readonly, copy) sc_managed_capturer_capture_still_image_completion_handler_t block;
|
||||
|
||||
@property (nonatomic, readonly, assign) CGFloat aspectRatio;
|
||||
|
||||
SC_INIT_AND_NEW_UNAVAILABLE
|
||||
|
||||
- (instancetype)initWithFromState:(SCCaptureStateMachineStateId)fromState
|
||||
toState:(SCCaptureStateMachineStateId)toState
|
||||
captureSessionId:(NSString *)captureSessionID
|
||||
aspectRatio:(CGFloat)aspectRatio
|
||||
completionHandler:(sc_managed_capturer_capture_still_image_completion_handler_t)block;
|
||||
|
||||
@end
|
@ -0,0 +1,27 @@
|
||||
//
|
||||
// SCCaptureImageStateTransitionPayload.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Lin Jia on 1/9/18.
|
||||
//
|
||||
|
||||
#import "SCCaptureImageStateTransitionPayload.h"
|
||||
|
||||
@implementation SCCaptureImageStateTransitionPayload
|
||||
|
||||
- (instancetype)initWithFromState:(SCCaptureStateMachineStateId)fromState
|
||||
toState:(SCCaptureStateMachineStateId)toState
|
||||
captureSessionId:(NSString *)captureSessionID
|
||||
aspectRatio:(CGFloat)aspectRatio
|
||||
completionHandler:(sc_managed_capturer_capture_still_image_completion_handler_t)block
|
||||
{
|
||||
self = [super initWithFromState:fromState toState:toState];
|
||||
if (self) {
|
||||
_captureSessionID = captureSessionID;
|
||||
_aspectRatio = aspectRatio;
|
||||
_block = block;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
@end
|
@ -0,0 +1,22 @@
|
||||
//
|
||||
// SCCaptureImageWhileRecordingState.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Sun Lei on 22/02/2018.
|
||||
//
|
||||
|
||||
#import "SCCaptureBaseState.h"
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
@class SCQueuePerformer;
|
||||
|
||||
@interface SCCaptureImageWhileRecordingState : SCCaptureBaseState
|
||||
|
||||
SC_INIT_AND_NEW_UNAVAILABLE
|
||||
|
||||
- (instancetype)initWithPerformer:(SCQueuePerformer *)performer
|
||||
bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper
|
||||
delegate:(id<SCCaptureStateDelegate>)delegate;
|
||||
|
||||
@end
|
@ -0,0 +1,85 @@
|
||||
//
|
||||
// SCCaptureImageWhileRecordingState.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Sun Lei on 22/02/2018.
|
||||
//
|
||||
|
||||
#import "SCCaptureImageWhileRecordingState.h"
|
||||
|
||||
#import "SCCaptureImageWhileRecordingStateTransitionPayload.h"
|
||||
#import "SCManagedCapturerV1_Private.h"
|
||||
|
||||
#import <SCFoundation/SCAssertWrapper.h>
|
||||
#import <SCFoundation/SCQueuePerformer.h>
|
||||
|
||||
@interface SCCaptureImageWhileRecordingState () {
|
||||
__weak id<SCCaptureStateDelegate> _delegate;
|
||||
SCQueuePerformer *_performer;
|
||||
}
|
||||
@end
|
||||
|
||||
@implementation SCCaptureImageWhileRecordingState
|
||||
|
||||
- (instancetype)initWithPerformer:(SCQueuePerformer *)performer
|
||||
bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper
|
||||
delegate:(id<SCCaptureStateDelegate>)delegate
|
||||
{
|
||||
self = [super initWithPerformer:performer bookKeeper:bookKeeper delegate:delegate];
|
||||
if (self) {
|
||||
_delegate = delegate;
|
||||
_performer = performer;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (SCCaptureStateMachineStateId)stateId
|
||||
{
|
||||
return SCCaptureImageWhileRecordingStateId;
|
||||
}
|
||||
|
||||
- (void)didBecomeCurrentState:(SCStateTransitionPayload *)payload
|
||||
resource:(SCCaptureResource *)resource
|
||||
context:(NSString *)context
|
||||
{
|
||||
SCAssertPerformer(_performer);
|
||||
SCAssert(payload.fromState == SCCaptureRecordingStateId, @"");
|
||||
SCAssert(payload.toState == [self stateId], @"");
|
||||
SCAssert([payload isKindOfClass:[SCCaptureImageWhileRecordingStateTransitionPayload class]], @"");
|
||||
;
|
||||
SCCaptureImageWhileRecordingStateTransitionPayload *captureImagePayload =
|
||||
(SCCaptureImageWhileRecordingStateTransitionPayload *)payload;
|
||||
|
||||
@weakify(self);
|
||||
sc_managed_capturer_capture_still_image_completion_handler_t block =
|
||||
^(UIImage *fullScreenImage, NSDictionary *metadata, NSError *error, SCManagedCapturerState *state) {
|
||||
captureImagePayload.block(fullScreenImage, metadata, error, state);
|
||||
[_performer perform:^{
|
||||
@strongify(self);
|
||||
[self _cancelRecordingWithContext:context resource:resource];
|
||||
}];
|
||||
};
|
||||
|
||||
[SCCaptureWorker
|
||||
captureStillImageWithCaptureResource:resource
|
||||
aspectRatio:captureImagePayload.aspectRatio
|
||||
captureSessionID:captureImagePayload.captureSessionID
|
||||
shouldCaptureFromVideo:[SCCaptureWorker shouldCaptureImageFromVideoWithResource:resource]
|
||||
completionHandler:block
|
||||
context:context];
|
||||
|
||||
[_delegate currentState:self requestToTransferToNewState:SCCaptureRunningStateId payload:nil context:context];
|
||||
}
|
||||
|
||||
- (void)_cancelRecordingWithContext:(NSString *)context resource:(SCCaptureResource *)resource
|
||||
{
|
||||
SCTraceODPCompatibleStart(2);
|
||||
SCAssertPerformer(_performer);
|
||||
|
||||
[SCCaptureWorker cancelRecordingWithCaptureResource:resource];
|
||||
|
||||
NSString *apiName =
|
||||
[NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)];
|
||||
[self.bookKeeper logAPICalled:apiName context:context];
|
||||
}
|
||||
@end
|
@ -0,0 +1,29 @@
|
||||
//
|
||||
// SCCaptureImageWhileRecordingStateTransitionPayload.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Sun Lei on 22/02/2018.
|
||||
//
|
||||
|
||||
#import "SCCaptureCommon.h"
|
||||
#import "SCStateTransitionPayload.h"
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
@interface SCCaptureImageWhileRecordingStateTransitionPayload : SCStateTransitionPayload
|
||||
|
||||
@property (nonatomic, readonly, strong) NSString *captureSessionID;
|
||||
|
||||
@property (nonatomic, readonly, copy) sc_managed_capturer_capture_still_image_completion_handler_t block;
|
||||
|
||||
@property (nonatomic, readonly, assign) CGFloat aspectRatio;
|
||||
|
||||
SC_INIT_AND_NEW_UNAVAILABLE
|
||||
|
||||
- (instancetype)initWithFromState:(SCCaptureStateMachineStateId)fromState
|
||||
toState:(SCCaptureStateMachineStateId)toState
|
||||
captureSessionId:(NSString *)captureSessionID
|
||||
aspectRatio:(CGFloat)aspectRatio
|
||||
completionHandler:(sc_managed_capturer_capture_still_image_completion_handler_t)block;
|
||||
|
||||
@end
|
@ -0,0 +1,27 @@
|
||||
//
|
||||
// SCCaptureImageWhileRecordingStateTransitionPayload.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Sun Lei on 22/02/2018.
|
||||
//
|
||||
|
||||
#import "SCCaptureImageWhileRecordingStateTransitionPayload.h"
|
||||
|
||||
@implementation SCCaptureImageWhileRecordingStateTransitionPayload
|
||||
|
||||
- (instancetype)initWithFromState:(SCCaptureStateMachineStateId)fromState
|
||||
toState:(SCCaptureStateMachineStateId)toState
|
||||
captureSessionId:(NSString *)captureSessionID
|
||||
aspectRatio:(CGFloat)aspectRatio
|
||||
completionHandler:(sc_managed_capturer_capture_still_image_completion_handler_t)block
|
||||
{
|
||||
self = [super initWithFromState:fromState toState:toState];
|
||||
if (self) {
|
||||
_captureSessionID = captureSessionID;
|
||||
_aspectRatio = aspectRatio;
|
||||
_block = block;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
@end
|
@ -0,0 +1,22 @@
|
||||
//
|
||||
// SCCaptureInitializedState.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Jingtian Yang on 20/12/2017.
|
||||
//
|
||||
|
||||
#import "SCCaptureBaseState.h"
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
@class SCQueuePerformer;
|
||||
|
||||
@interface SCCaptureInitializedState : SCCaptureBaseState
|
||||
|
||||
- (instancetype)init NS_UNAVAILABLE;
|
||||
|
||||
- (instancetype)initWithPerformer:(SCQueuePerformer *)performer
|
||||
bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper
|
||||
delegate:(id<SCCaptureStateDelegate>)delegate;
|
||||
|
||||
@end
|
@ -0,0 +1,68 @@
|
||||
//
|
||||
// SCCaptureInitializedState.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Jingtian Yang on 20/12/2017.
|
||||
//
|
||||
|
||||
#import "SCCaptureInitializedState.h"
|
||||
|
||||
#import "SCCapturerToken.h"
|
||||
#import "SCManagedCapturerLogging.h"
|
||||
#import "SCManagedCapturerV1_Private.h"
|
||||
|
||||
#import <SCFoundation/SCAssertWrapper.h>
|
||||
#import <SCFoundation/SCQueuePerformer.h>
|
||||
|
||||
@interface SCCaptureInitializedState () {
|
||||
__weak id<SCCaptureStateDelegate> _delegate;
|
||||
SCQueuePerformer *_performer;
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
@implementation SCCaptureInitializedState
|
||||
|
||||
- (instancetype)initWithPerformer:(SCQueuePerformer *)performer
|
||||
bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper
|
||||
delegate:(id<SCCaptureStateDelegate>)delegate
|
||||
{
|
||||
self = [super initWithPerformer:performer bookKeeper:bookKeeper delegate:delegate];
|
||||
if (self) {
|
||||
_delegate = delegate;
|
||||
_performer = performer;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)didBecomeCurrentState:(SCStateTransitionPayload *)payload
|
||||
resource:(SCCaptureResource *)resource
|
||||
context:(NSString *)context
|
||||
{
|
||||
// No op.
|
||||
}
|
||||
|
||||
- (SCCaptureStateMachineStateId)stateId
|
||||
{
|
||||
return SCCaptureInitializedStateId;
|
||||
}
|
||||
|
||||
- (void)startRunningWithCapturerToken:(SCCapturerToken *)token
|
||||
resource:(SCCaptureResource *)resource
|
||||
completionHandler:(dispatch_block_t)completionHandler
|
||||
context:(NSString *)context
|
||||
{
|
||||
SCAssertPerformer(_performer);
|
||||
SCTraceODPCompatibleStart(2);
|
||||
SCLogCapturerInfo(@"startRunningAsynchronouslyWithCompletionHandler called. token: %@", token);
|
||||
|
||||
[SCCaptureWorker startRunningWithCaptureResource:resource token:token completionHandler:completionHandler];
|
||||
|
||||
[_delegate currentState:self requestToTransferToNewState:SCCaptureRunningStateId payload:nil context:context];
|
||||
|
||||
NSString *apiName =
|
||||
[NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)];
|
||||
[self.bookKeeper logAPICalled:apiName context:context];
|
||||
}
|
||||
|
||||
@end
|
@ -0,0 +1,22 @@
|
||||
//
|
||||
// SCCaptureRecordingState.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Jingtian Yang on 12/01/2018.
|
||||
//
|
||||
|
||||
#import "SCCaptureBaseState.h"
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
@class SCQueuePerformer;
|
||||
|
||||
@interface SCCaptureRecordingState : SCCaptureBaseState
|
||||
|
||||
SC_INIT_AND_NEW_UNAVAILABLE
|
||||
|
||||
- (instancetype)initWithPerformer:(SCQueuePerformer *)performer
|
||||
bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper
|
||||
delegate:(id<SCCaptureStateDelegate>)delegate;
|
||||
|
||||
@end
|
114
ManagedCapturer/StateMachine/States/SCCaptureRecordingState.m
Normal file
114
ManagedCapturer/StateMachine/States/SCCaptureRecordingState.m
Normal file
@ -0,0 +1,114 @@
|
||||
//
|
||||
// SCCaptureRecordingState.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Jingtian Yang on 12/01/2018.
|
||||
//
|
||||
|
||||
#import "SCCaptureRecordingState.h"
|
||||
|
||||
#import "SCCaptureImageWhileRecordingStateTransitionPayload.h"
|
||||
#import "SCCaptureRecordingStateTransitionPayload.h"
|
||||
#import "SCManagedCapturerV1_Private.h"
|
||||
#import "SCStateTransitionPayload.h"
|
||||
|
||||
#import <SCFoundation/SCAssertWrapper.h>
|
||||
#import <SCFoundation/SCQueuePerformer.h>
|
||||
|
||||
@interface SCCaptureRecordingState () {
|
||||
__weak id<SCCaptureStateDelegate> _delegate;
|
||||
SCQueuePerformer *_performer;
|
||||
}
|
||||
@end
|
||||
|
||||
@implementation SCCaptureRecordingState
|
||||
|
||||
- (instancetype)initWithPerformer:(SCQueuePerformer *)performer
|
||||
bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper
|
||||
delegate:(id<SCCaptureStateDelegate>)delegate
|
||||
{
|
||||
self = [super initWithPerformer:performer bookKeeper:bookKeeper delegate:delegate];
|
||||
if (self) {
|
||||
_delegate = delegate;
|
||||
_performer = performer;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)didBecomeCurrentState:(SCStateTransitionPayload *)payload
|
||||
resource:(SCCaptureResource *)resource
|
||||
context:(NSString *)context
|
||||
{
|
||||
SCAssertPerformer(resource.queuePerformer);
|
||||
SCAssert(payload.toState == [self stateId], @"");
|
||||
if (![payload isKindOfClass:[SCCaptureRecordingStateTransitionPayload class]]) {
|
||||
SCAssertFail(@"wrong payload pass in");
|
||||
[_delegate currentState:self requestToTransferToNewState:payload.fromState payload:nil context:context];
|
||||
return;
|
||||
}
|
||||
|
||||
SCCaptureRecordingStateTransitionPayload *recordingPayload = (SCCaptureRecordingStateTransitionPayload *)payload;
|
||||
[SCCaptureWorker startRecordingWithCaptureResource:resource
|
||||
outputSettings:recordingPayload.outputSettings
|
||||
audioConfiguration:recordingPayload.configuration
|
||||
maxDuration:recordingPayload.maxDuration
|
||||
fileURL:recordingPayload.fileURL
|
||||
captureSessionID:recordingPayload.captureSessionID
|
||||
completionHandler:recordingPayload.block];
|
||||
}
|
||||
|
||||
- (void)stopRecordingWithResource:(SCCaptureResource *)resource context:(NSString *)context
|
||||
{
|
||||
SCTraceODPCompatibleStart(2);
|
||||
SCAssertPerformer(_performer);
|
||||
|
||||
[SCCaptureWorker stopRecordingWithCaptureResource:resource];
|
||||
[_delegate currentState:self requestToTransferToNewState:SCCaptureRunningStateId payload:nil context:context];
|
||||
|
||||
NSString *apiName =
|
||||
[NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)];
|
||||
[self.bookKeeper logAPICalled:apiName context:context];
|
||||
}
|
||||
|
||||
- (void)cancelRecordingWithResource:(SCCaptureResource *)resource context:(NSString *)context
|
||||
{
|
||||
SCTraceODPCompatibleStart(2);
|
||||
SCAssertPerformer(_performer);
|
||||
|
||||
[SCCaptureWorker cancelRecordingWithCaptureResource:resource];
|
||||
[_delegate currentState:self requestToTransferToNewState:SCCaptureRunningStateId payload:nil context:context];
|
||||
|
||||
NSString *apiName =
|
||||
[NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)];
|
||||
[self.bookKeeper logAPICalled:apiName context:context];
|
||||
}
|
||||
|
||||
- (SCCaptureStateMachineStateId)stateId
|
||||
{
|
||||
return SCCaptureRecordingStateId;
|
||||
}
|
||||
|
||||
- (void)captureStillImageWithResource:(SCCaptureResource *)resource
|
||||
aspectRatio:(CGFloat)aspectRatio
|
||||
captureSessionID:(NSString *)captureSessionID
|
||||
completionHandler:(sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler
|
||||
context:(NSString *)context
|
||||
{
|
||||
SCAssertPerformer(_performer);
|
||||
SCCaptureImageWhileRecordingStateTransitionPayload *payload = [
|
||||
[SCCaptureImageWhileRecordingStateTransitionPayload alloc] initWithFromState:SCCaptureRecordingStateId
|
||||
toState:SCCaptureImageWhileRecordingStateId
|
||||
captureSessionId:captureSessionID
|
||||
aspectRatio:aspectRatio
|
||||
completionHandler:completionHandler];
|
||||
[_delegate currentState:self
|
||||
requestToTransferToNewState:SCCaptureImageWhileRecordingStateId
|
||||
payload:payload
|
||||
context:context];
|
||||
|
||||
NSString *apiName =
|
||||
[NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)];
|
||||
[self.bookKeeper logAPICalled:apiName context:context];
|
||||
}
|
||||
|
||||
@end
|
@ -0,0 +1,41 @@
|
||||
//
|
||||
// SCCaptureRecordingStateTransitionPayload.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Jingtian Yang on 12/01/2018.
|
||||
//
|
||||
|
||||
#import "SCCaptureCommon.h"
|
||||
#import "SCManagedVideoCapturerOutputSettings.h"
|
||||
#import "SCStateTransitionPayload.h"
|
||||
|
||||
#import <SCAudio/SCAudioConfiguration.h>
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
@interface SCCaptureRecordingStateTransitionPayload : SCStateTransitionPayload
|
||||
|
||||
@property (nonatomic, readonly, strong) SCManagedVideoCapturerOutputSettings *outputSettings;
|
||||
|
||||
@property (nonatomic, readonly, strong) SCAudioConfiguration *configuration;
|
||||
|
||||
@property (nonatomic, readonly, assign) NSTimeInterval maxDuration;
|
||||
|
||||
@property (nonatomic, readonly, strong) NSURL *fileURL;
|
||||
|
||||
@property (nonatomic, readonly, strong) NSString *captureSessionID;
|
||||
|
||||
@property (nonatomic, readonly, copy) sc_managed_capturer_start_recording_completion_handler_t block;
|
||||
|
||||
SC_INIT_AND_NEW_UNAVAILABLE
|
||||
|
||||
- (instancetype)initWithFromState:(SCCaptureStateMachineStateId)fromState
|
||||
toState:(SCCaptureStateMachineStateId)toState
|
||||
outputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings
|
||||
audioConfiguration:(SCAudioConfiguration *)configuration
|
||||
maxDuration:(NSTimeInterval)maxDuration
|
||||
fileURL:(NSURL *)fileURL
|
||||
captureSessionID:(NSString *)captureSessionID
|
||||
completionHandler:(sc_managed_capturer_start_recording_completion_handler_t)block;
|
||||
|
||||
@end
|
@ -0,0 +1,33 @@
|
||||
//
|
||||
// SCCaptureRecordingStateTransitionPayload.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Jingtian Yang on 12/01/2018.
|
||||
//
|
||||
|
||||
#import "SCCaptureRecordingStateTransitionPayload.h"
|
||||
|
||||
@implementation SCCaptureRecordingStateTransitionPayload
|
||||
|
||||
- (instancetype)initWithFromState:(SCCaptureStateMachineStateId)fromState
|
||||
toState:(SCCaptureStateMachineStateId)toState
|
||||
outputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings
|
||||
audioConfiguration:configuration
|
||||
maxDuration:(NSTimeInterval)maxDuration
|
||||
fileURL:(NSURL *)fileURL
|
||||
captureSessionID:(NSString *)captureSessionID
|
||||
completionHandler:(sc_managed_capturer_start_recording_completion_handler_t)block
|
||||
{
|
||||
self = [super initWithFromState:fromState toState:toState];
|
||||
if (self) {
|
||||
_outputSettings = outputSettings;
|
||||
_configuration = configuration;
|
||||
_maxDuration = maxDuration;
|
||||
_fileURL = fileURL;
|
||||
_captureSessionID = captureSessionID;
|
||||
_block = block;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
@end
|
22
ManagedCapturer/StateMachine/States/SCCaptureRunningState.h
Normal file
22
ManagedCapturer/StateMachine/States/SCCaptureRunningState.h
Normal file
@ -0,0 +1,22 @@
|
||||
//
|
||||
// SCCaptureRunningState.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Jingtian Yang on 08/01/2018.
|
||||
//
|
||||
|
||||
#import "SCCaptureBaseState.h"
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
@class SCQueuePerformer;
|
||||
|
||||
@interface SCCaptureRunningState : SCCaptureBaseState
|
||||
|
||||
- (instancetype)init NS_UNAVAILABLE;
|
||||
|
||||
- (instancetype)initWithPerformer:(SCQueuePerformer *)performer
|
||||
bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper
|
||||
delegate:(id<SCCaptureStateDelegate>)delegate;
|
||||
|
||||
@end
|
176
ManagedCapturer/StateMachine/States/SCCaptureRunningState.m
Normal file
176
ManagedCapturer/StateMachine/States/SCCaptureRunningState.m
Normal file
@ -0,0 +1,176 @@
|
||||
//
|
||||
// SCCaptureRunningState.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Jingtian Yang on 08/01/2018.
|
||||
//
|
||||
|
||||
#import "SCCaptureRunningState.h"
|
||||
|
||||
#import "SCCaptureImageStateTransitionPayload.h"
|
||||
#import "SCCaptureRecordingStateTransitionPayload.h"
|
||||
#import "SCCaptureWorker.h"
|
||||
#import "SCManagedCapturerLogging.h"
|
||||
#import "SCManagedCapturerV1_Private.h"
|
||||
#import "SCScanConfiguration.h"
|
||||
|
||||
#import <SCFoundation/SCAssertWrapper.h>
|
||||
#import <SCFoundation/SCQueuePerformer.h>
|
||||
#import <SCFoundation/SCTraceODPCompatible.h>
|
||||
|
||||
@interface SCCaptureRunningState () {
|
||||
__weak id<SCCaptureStateDelegate> _delegate;
|
||||
SCQueuePerformer *_performer;
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
@implementation SCCaptureRunningState
|
||||
|
||||
- (instancetype)initWithPerformer:(SCQueuePerformer *)performer
|
||||
bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper
|
||||
delegate:(id<SCCaptureStateDelegate>)delegate
|
||||
{
|
||||
self = [super initWithPerformer:performer bookKeeper:bookKeeper delegate:delegate];
|
||||
if (self) {
|
||||
_delegate = delegate;
|
||||
_performer = performer;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)didBecomeCurrentState:(SCStateTransitionPayload *)payload
|
||||
resource:(SCCaptureResource *)resource
|
||||
context:(NSString *)context
|
||||
{
|
||||
// No op.
|
||||
}
|
||||
|
||||
- (void)captureStillImageWithResource:(SCCaptureResource *)resource
|
||||
aspectRatio:(CGFloat)aspectRatio
|
||||
captureSessionID:(NSString *)captureSessionID
|
||||
completionHandler:(sc_managed_capturer_capture_still_image_completion_handler_t)completionHandler
|
||||
context:(NSString *)context
|
||||
{
|
||||
SCAssertPerformer(_performer);
|
||||
SCCaptureImageStateTransitionPayload *payload =
|
||||
[[SCCaptureImageStateTransitionPayload alloc] initWithFromState:SCCaptureRunningStateId
|
||||
toState:SCCaptureImageStateId
|
||||
captureSessionId:captureSessionID
|
||||
aspectRatio:aspectRatio
|
||||
completionHandler:completionHandler];
|
||||
[_delegate currentState:self requestToTransferToNewState:SCCaptureImageStateId payload:payload context:context];
|
||||
|
||||
NSString *apiName =
|
||||
[NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)];
|
||||
[self.bookKeeper logAPICalled:apiName context:context];
|
||||
}
|
||||
|
||||
- (SCCaptureStateMachineStateId)stateId
|
||||
{
|
||||
return SCCaptureRunningStateId;
|
||||
}
|
||||
|
||||
- (void)startRunningWithCapturerToken:(SCCapturerToken *)token
|
||||
resource:(SCCaptureResource *)resource
|
||||
completionHandler:(dispatch_block_t)completionHandler
|
||||
context:(NSString *)context
|
||||
{
|
||||
SCAssertPerformer(_performer);
|
||||
SCTraceODPCompatibleStart(2);
|
||||
SCLogCapturerInfo(@"startRunningAsynchronouslyWithCompletionHandler called. token: %@", token);
|
||||
[SCCaptureWorker startRunningWithCaptureResource:resource token:token completionHandler:completionHandler];
|
||||
|
||||
NSString *apiName =
|
||||
[NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)];
|
||||
[self.bookKeeper logAPICalled:apiName context:context];
|
||||
}
|
||||
|
||||
- (void)stopRunningWithCapturerToken:(SCCapturerToken *)token
|
||||
resource:(SCCaptureResource *)resource
|
||||
completionHandler:(sc_managed_capturer_stop_running_completion_handler_t)completionHandler
|
||||
context:(NSString *)context
|
||||
{
|
||||
SCTraceODPCompatibleStart(2);
|
||||
SCAssertPerformer(_performer);
|
||||
|
||||
SCLogCapturerInfo(@"Stop running asynchronously. token:%@", token);
|
||||
if ([[SCManagedCapturerV1 sharedInstance] stopRunningWithCaptureToken:token
|
||||
completionHandler:completionHandler
|
||||
context:context]) {
|
||||
[_delegate currentState:self
|
||||
requestToTransferToNewState:SCCaptureInitializedStateId
|
||||
payload:nil
|
||||
context:context];
|
||||
}
|
||||
|
||||
NSString *apiName =
|
||||
[NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)];
|
||||
[self.bookKeeper logAPICalled:apiName context:context];
|
||||
}
|
||||
|
||||
- (void)startScanWithScanConfiguration:(SCScanConfiguration *)configuration
|
||||
resource:(SCCaptureResource *)resource
|
||||
context:(NSString *)context
|
||||
{
|
||||
SCTraceODPCompatibleStart(2);
|
||||
SCLogCapturerInfo(@"Start scan on preview asynchronously. configuration:%@", configuration);
|
||||
SCAssertPerformer(_performer);
|
||||
[SCCaptureWorker startScanWithScanConfiguration:configuration resource:resource];
|
||||
[_delegate currentState:self requestToTransferToNewState:SCCaptureScanningStateId payload:nil context:context];
|
||||
|
||||
NSString *apiName =
|
||||
[NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)];
|
||||
[self.bookKeeper logAPICalled:apiName context:context];
|
||||
}
|
||||
|
||||
- (void)prepareForRecordingWithResource:(SCCaptureResource *)resource
|
||||
audioConfiguration:(SCAudioConfiguration *)configuration
|
||||
context:(NSString *)context
|
||||
{
|
||||
SCAssertPerformer(_performer);
|
||||
SCTraceODPCompatibleStart(2);
|
||||
[SCCaptureWorker prepareForRecordingWithAudioConfiguration:configuration resource:resource];
|
||||
|
||||
NSString *apiName =
|
||||
[NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)];
|
||||
[self.bookKeeper logAPICalled:apiName context:context];
|
||||
}
|
||||
|
||||
- (void)startRecordingWithResource:(SCCaptureResource *)resource
|
||||
audioConfiguration:(SCAudioConfiguration *)configuration
|
||||
outputSettings:(SCManagedVideoCapturerOutputSettings *)outputSettings
|
||||
maxDuration:(NSTimeInterval)maxDuration
|
||||
fileURL:(NSURL *)fileURL
|
||||
captureSessionID:(NSString *)captureSessionID
|
||||
completionHandler:(sc_managed_capturer_start_recording_completion_handler_t)completionHandler
|
||||
context:(NSString *)context
|
||||
{
|
||||
SCTraceODPCompatibleStart(2);
|
||||
SCAssertPerformer(_performer);
|
||||
|
||||
SCCaptureRecordingStateTransitionPayload *payload =
|
||||
[[SCCaptureRecordingStateTransitionPayload alloc] initWithFromState:SCCaptureRunningStateId
|
||||
toState:SCCaptureRecordingStateId
|
||||
outputSettings:outputSettings
|
||||
audioConfiguration:configuration
|
||||
maxDuration:maxDuration
|
||||
fileURL:fileURL
|
||||
captureSessionID:captureSessionID
|
||||
completionHandler:completionHandler];
|
||||
[_delegate currentState:self requestToTransferToNewState:SCCaptureRecordingStateId payload:payload context:context];
|
||||
|
||||
NSString *apiName =
|
||||
[NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)];
|
||||
[self.bookKeeper logAPICalled:apiName context:context];
|
||||
}
|
||||
|
||||
- (void)cancelRecordingWithResource:(SCCaptureResource *)resource context:(NSString *)context
|
||||
{
|
||||
// Intentionally No Op, this will be removed once CCAM-13851 gets resolved.
|
||||
NSString *apiName =
|
||||
[NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)];
|
||||
[self.bookKeeper logAPICalled:apiName context:context];
|
||||
}
|
||||
|
||||
@end
|
18
ManagedCapturer/StateMachine/States/SCCaptureScanningState.h
Normal file
18
ManagedCapturer/StateMachine/States/SCCaptureScanningState.h
Normal file
@ -0,0 +1,18 @@
|
||||
//
|
||||
// SCCaptureScanningState.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Xiaokang Liu on 09/01/2018.
|
||||
//
|
||||
|
||||
#import "SCCaptureBaseState.h"
|
||||
|
||||
@class SCQueuePerformer;
|
||||
|
||||
@interface SCCaptureScanningState : SCCaptureBaseState
|
||||
- (instancetype)init NS_UNAVAILABLE;
|
||||
|
||||
- (instancetype)initWithPerformer:(SCQueuePerformer *)performer
|
||||
bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper
|
||||
delegate:(id<SCCaptureStateDelegate>)delegate;
|
||||
@end
|
75
ManagedCapturer/StateMachine/States/SCCaptureScanningState.m
Normal file
75
ManagedCapturer/StateMachine/States/SCCaptureScanningState.m
Normal file
@ -0,0 +1,75 @@
|
||||
//
|
||||
// SCCaptureScanningState.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Xiaokang Liu on 09/01/2018.
|
||||
//
|
||||
|
||||
#import "SCCaptureScanningState.h"
|
||||
|
||||
#import "SCManagedCapturerLogging.h"
|
||||
#import "SCManagedCapturerV1_Private.h"
|
||||
|
||||
#import <SCFoundation/SCAssertWrapper.h>
|
||||
#import <SCFoundation/SCQueuePerformer.h>
|
||||
#import <SCFoundation/SCTraceODPCompatible.h>
|
||||
|
||||
@interface SCCaptureScanningState () {
|
||||
__weak id<SCCaptureStateDelegate> _delegate;
|
||||
SCQueuePerformer *_performer;
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
@implementation SCCaptureScanningState
|
||||
- (instancetype)initWithPerformer:(SCQueuePerformer *)performer
|
||||
bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper
|
||||
delegate:(id<SCCaptureStateDelegate>)delegate
|
||||
{
|
||||
self = [super initWithPerformer:performer bookKeeper:bookKeeper delegate:delegate];
|
||||
if (self) {
|
||||
SCAssert(delegate, @"");
|
||||
SCAssert(performer, @"");
|
||||
SCAssert(bookKeeper, @"");
|
||||
_delegate = delegate;
|
||||
_performer = performer;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)didBecomeCurrentState:(SCStateTransitionPayload *)payload
|
||||
resource:(SCCaptureResource *)resource
|
||||
context:(NSString *)context
|
||||
{
|
||||
// No op.
|
||||
}
|
||||
|
||||
- (SCCaptureStateMachineStateId)stateId
|
||||
{
|
||||
return SCCaptureScanningStateId;
|
||||
}
|
||||
|
||||
- (void)stopScanWithCompletionHandler:(dispatch_block_t)completionHandler
|
||||
resource:(SCCaptureResource *)resource
|
||||
context:(NSString *)context
|
||||
{
|
||||
SCAssertPerformer(_performer);
|
||||
SCTraceODPCompatibleStart(2);
|
||||
SCLogCapturerInfo(@"stop scan asynchronously.");
|
||||
[SCCaptureWorker stopScanWithCompletionHandler:completionHandler resource:resource];
|
||||
[_delegate currentState:self requestToTransferToNewState:SCCaptureRunningStateId payload:nil context:context];
|
||||
|
||||
NSString *apiName =
|
||||
[NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)];
|
||||
[self.bookKeeper logAPICalled:apiName context:context];
|
||||
}
|
||||
|
||||
- (void)cancelRecordingWithResource:(SCCaptureResource *)resource context:(NSString *)context
|
||||
{
|
||||
// Intentionally No Op, this will be removed once CCAM-13851 gets resolved.
|
||||
NSString *apiName =
|
||||
[NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)];
|
||||
[self.bookKeeper logAPICalled:apiName context:context];
|
||||
}
|
||||
|
||||
@end
|
@ -0,0 +1,26 @@
|
||||
//
|
||||
// SCCaptureUninitializedState.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Lin Jia on 10/19/17.
|
||||
//
|
||||
//
|
||||
|
||||
#import "SCCaptureBaseState.h"
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
/*
|
||||
State which handles capture initialialization, which should be used only once for every app life span.
|
||||
*/
|
||||
@class SCQueuePerformer;
|
||||
|
||||
@interface SCCaptureUninitializedState : SCCaptureBaseState
|
||||
|
||||
- (instancetype)init NS_UNAVAILABLE;
|
||||
|
||||
- (instancetype)initWithPerformer:(SCQueuePerformer *)performer
|
||||
bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper
|
||||
delegate:(id<SCCaptureStateDelegate>)delegate;
|
||||
|
||||
@end
|
@ -0,0 +1,70 @@
|
||||
//
|
||||
// SCCaptureUninitializedState.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Lin Jia on 10/19/17.
|
||||
//
|
||||
//
|
||||
|
||||
#import "SCCaptureUninitializedState.h"
|
||||
|
||||
#import "SCManagedCapturerLogging.h"
|
||||
#import "SCManagedCapturerV1_Private.h"
|
||||
|
||||
#import <SCFoundation/SCAssertWrapper.h>
|
||||
#import <SCFoundation/SCQueuePerformer.h>
|
||||
#import <SCFoundation/SCTraceODPCompatible.h>
|
||||
|
||||
@interface SCCaptureUninitializedState () {
|
||||
__weak id<SCCaptureStateDelegate> _delegate;
|
||||
SCQueuePerformer *_performer;
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
@implementation SCCaptureUninitializedState
|
||||
|
||||
- (instancetype)initWithPerformer:(SCQueuePerformer *)performer
|
||||
bookKeeper:(SCCaptureStateMachineBookKeeper *)bookKeeper
|
||||
delegate:(id<SCCaptureStateDelegate>)delegate
|
||||
{
|
||||
self = [super initWithPerformer:performer bookKeeper:bookKeeper delegate:delegate];
|
||||
if (self) {
|
||||
_delegate = delegate;
|
||||
_performer = performer;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)didBecomeCurrentState:(SCStateTransitionPayload *)payload
|
||||
resource:(SCCaptureResource *)resource
|
||||
context:(NSString *)context
|
||||
{
|
||||
// No op.
|
||||
}
|
||||
|
||||
- (SCCaptureStateMachineStateId)stateId
|
||||
{
|
||||
return SCCaptureUninitializedStateId;
|
||||
}
|
||||
|
||||
- (void)initializeCaptureWithDevicePosition:(SCManagedCaptureDevicePosition)devicePosition
|
||||
resource:(SCCaptureResource *)resource
|
||||
completionHandler:(dispatch_block_t)completionHandler
|
||||
context:(NSString *)context
|
||||
{
|
||||
SCAssertPerformer(_performer);
|
||||
SCTraceODPCompatibleStart(2);
|
||||
SCLogCapturerInfo(@"Setting up with devicePosition:%lu", (unsigned long)devicePosition);
|
||||
|
||||
// TODO: we need to push completionHandler to a payload and let intializedState handle.
|
||||
[[SCManagedCapturerV1 sharedInstance] setupWithDevicePosition:devicePosition completionHandler:completionHandler];
|
||||
|
||||
[_delegate currentState:self requestToTransferToNewState:SCCaptureInitializedStateId payload:nil context:context];
|
||||
|
||||
NSString *apiName =
|
||||
[NSString sc_stringWithFormat:@"%@/%@", NSStringFromClass([self class]), NSStringFromSelector(_cmd)];
|
||||
[self.bookKeeper logAPICalled:apiName context:context];
|
||||
}
|
||||
|
||||
@end
|
@ -0,0 +1,22 @@
|
||||
//
|
||||
// SCStateTransitionPayload.h
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Lin Jia on 1/8/18.
|
||||
//
|
||||
|
||||
#import "SCCaptureStateUtil.h"
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
@interface SCStateTransitionPayload : NSObject
|
||||
|
||||
@property (nonatomic, readonly, assign) SCCaptureStateMachineStateId fromState;
|
||||
|
||||
@property (nonatomic, readonly, assign) SCCaptureStateMachineStateId toState;
|
||||
|
||||
SC_INIT_AND_NEW_UNAVAILABLE
|
||||
|
||||
- (instancetype)initWithFromState:(SCCaptureStateMachineStateId)fromState toState:(SCCaptureStateMachineStateId)toState;
|
||||
|
||||
@end
|
@ -0,0 +1,27 @@
|
||||
//
|
||||
// SCStateTransitionPayload.m
|
||||
// Snapchat
|
||||
//
|
||||
// Created by Lin Jia on 1/8/18.
|
||||
//
|
||||
|
||||
#import "SCStateTransitionPayload.h"
|
||||
|
||||
#import <SCFoundation/SCAssertWrapper.h>
|
||||
|
||||
@implementation SCStateTransitionPayload
|
||||
|
||||
- (instancetype)initWithFromState:(SCCaptureStateMachineStateId)fromState toState:(SCCaptureStateMachineStateId)toState
|
||||
{
|
||||
self = [super init];
|
||||
if (self) {
|
||||
SCAssert(fromState != toState, @"");
|
||||
SCAssert(fromState > SCCaptureBaseStateId && fromState < SCCaptureStateMachineStateIdCount, @"");
|
||||
SCAssert(toState > SCCaptureBaseStateId && toState < SCCaptureStateMachineStateIdCount, @"");
|
||||
_fromState = fromState;
|
||||
_toState = toState;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
@end
|
Reference in New Issue
Block a user