Merge pull request #6 from Lax/update-201601224

Update 201601224
This commit is contained in:
Liu Lantao 2016-12-24 14:28:13 +08:00 committed by GitHub
commit b9bc5c12e1
1320 changed files with 77596 additions and 323 deletions

42
AVCam/LICENSE.txt Normal file
View File

@ -0,0 +1,42 @@
Sample code project: AVCam-iOS: Using AVFoundation to Capture Images and Movies
Version: 6.1
IMPORTANT: This Apple software is supplied to you by Apple
Inc. ("Apple") in consideration of your agreement to the following
terms, and your use, installation, modification or redistribution of
this Apple software constitutes acceptance of these terms. If you do
not agree with these terms, please do not use, install, modify or
redistribute this Apple software.
In consideration of your agreement to abide by the following terms, and
subject to these terms, Apple grants you a personal, non-exclusive
license, under Apple's copyrights in this original Apple software (the
"Apple Software"), to use, reproduce, modify and redistribute the Apple
Software, with or without modifications, in source and/or binary forms;
provided that if you redistribute the Apple Software in its entirety and
without modifications, you must retain this notice and the following
text and disclaimers in all such redistributions of the Apple Software.
Neither the name, trademarks, service marks or logos of Apple Inc. may
be used to endorse or promote products derived from the Apple Software
without specific prior written permission from Apple. Except as
expressly stated in this notice, no other rights or licenses, express or
implied, are granted by Apple herein, including but not limited to any
patent rights that may be infringed by your derivative works or by other
works in which the Apple Software may be incorporated.
The Apple Software is provided by Apple on an "AS IS" basis. APPLE
MAKES NO WARRANTIES, EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION
THE IMPLIED WARRANTIES OF NON-INFRINGEMENT, MERCHANTABILITY AND FITNESS
FOR A PARTICULAR PURPOSE, REGARDING THE APPLE SOFTWARE OR ITS USE AND
OPERATION ALONE OR IN COMBINATION WITH YOUR PRODUCTS.
IN NO EVENT SHALL APPLE BE LIABLE FOR ANY SPECIAL, INDIRECT, INCIDENTAL
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) ARISING IN ANY WAY OUT OF THE USE, REPRODUCTION,
MODIFICATION AND/OR DISTRIBUTION OF THE APPLE SOFTWARE, HOWEVER CAUSED
AND WHETHER UNDER THEORY OF CONTRACT, TORT (INCLUDING NEGLIGENCE),
STRICT LIABILITY OR OTHERWISE, EVEN IF APPLE HAS BEEN ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
Copyright (C) 2016 Apple Inc. All Rights Reserved.

View File

@ -0,0 +1,310 @@
// !$*UTF8*$!
{
archiveVersion = 1;
classes = {
};
objectVersion = 46;
objects = {
/* Begin PBXBuildFile section */
2206265F1A1E330400A45150 /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 2206265E1A1E330400A45150 /* main.m */; };
220626881A1E345E00A45150 /* AVCamAppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 220626831A1E345E00A45150 /* AVCamAppDelegate.m */; };
220626891A1E345E00A45150 /* AVCamPreviewView.m in Sources */ = {isa = PBXBuildFile; fileRef = 220626851A1E345E00A45150 /* AVCamPreviewView.m */; };
2206268A1A1E345E00A45150 /* AVCamCameraViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 220626871A1E345E00A45150 /* AVCamCameraViewController.m */; };
22CA31B81B022D1300D2DE70 /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 22CA31B61B022D1300D2DE70 /* LaunchScreen.storyboard */; };
7A74447A1CEE6B0F00C70C83 /* AVCamPhotoCaptureDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 7A7444791CEE6B0F00C70C83 /* AVCamPhotoCaptureDelegate.m */; };
7A74447C1CEE6B4B00C70C83 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 7A74447B1CEE6B4B00C70C83 /* Assets.xcassets */; };
7A74447E1CEE6B5900C70C83 /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 7A74447D1CEE6B5900C70C83 /* Main.storyboard */; };
/* End PBXBuildFile section */
/* Begin PBXFileReference section */
220626591A1E330400A45150 /* AVCam.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = AVCam.app; sourceTree = BUILT_PRODUCTS_DIR; };
2206265D1A1E330400A45150 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = "<group>"; };
2206265E1A1E330400A45150 /* main.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = main.m; sourceTree = "<group>"; };
220626821A1E345E00A45150 /* AVCamAppDelegate.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AVCamAppDelegate.h; sourceTree = "<group>"; };
220626831A1E345E00A45150 /* AVCamAppDelegate.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = AVCamAppDelegate.m; sourceTree = "<group>"; };
220626841A1E345E00A45150 /* AVCamPreviewView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AVCamPreviewView.h; sourceTree = "<group>"; };
220626851A1E345E00A45150 /* AVCamPreviewView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = AVCamPreviewView.m; sourceTree = "<group>"; };
220626861A1E345E00A45150 /* AVCamCameraViewController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AVCamCameraViewController.h; sourceTree = "<group>"; };
220626871A1E345E00A45150 /* AVCamCameraViewController.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = AVCamCameraViewController.m; sourceTree = "<group>"; };
22CA31B71B022D1300D2DE70 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = AVCam/Base.lproj/LaunchScreen.storyboard; sourceTree = "<group>"; };
22CA31B91B0250C300D2DE70 /* README.md */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = net.daringfireball.markdown; name = README.md; path = ../README.md; sourceTree = "<group>"; };
7A7444781CEE6B0F00C70C83 /* AVCamPhotoCaptureDelegate.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AVCamPhotoCaptureDelegate.h; sourceTree = "<group>"; };
7A7444791CEE6B0F00C70C83 /* AVCamPhotoCaptureDelegate.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = AVCamPhotoCaptureDelegate.m; sourceTree = "<group>"; };
7A74447B1CEE6B4B00C70C83 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = "<group>"; };
7A74447D1CEE6B5900C70C83 /* Main.storyboard */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = file.storyboard; name = Main.storyboard; path = Base.lproj/Main.storyboard; sourceTree = "<group>"; };
/* End PBXFileReference section */
/* Begin PBXFrameworksBuildPhase section */
220626561A1E330400A45150 /* Frameworks */ = {
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXFrameworksBuildPhase section */
/* Begin PBXGroup section */
220626501A1E330400A45150 = {
isa = PBXGroup;
children = (
22CA31B91B0250C300D2DE70 /* README.md */,
2206265B1A1E330400A45150 /* AVCam */,
2206265A1A1E330400A45150 /* Products */,
);
sourceTree = "<group>";
};
2206265A1A1E330400A45150 /* Products */ = {
isa = PBXGroup;
children = (
220626591A1E330400A45150 /* AVCam.app */,
);
name = Products;
sourceTree = "<group>";
};
2206265B1A1E330400A45150 /* AVCam */ = {
isa = PBXGroup;
children = (
220626821A1E345E00A45150 /* AVCamAppDelegate.h */,
220626831A1E345E00A45150 /* AVCamAppDelegate.m */,
220626841A1E345E00A45150 /* AVCamPreviewView.h */,
220626851A1E345E00A45150 /* AVCamPreviewView.m */,
220626861A1E345E00A45150 /* AVCamCameraViewController.h */,
220626871A1E345E00A45150 /* AVCamCameraViewController.m */,
7A7444781CEE6B0F00C70C83 /* AVCamPhotoCaptureDelegate.h */,
7A7444791CEE6B0F00C70C83 /* AVCamPhotoCaptureDelegate.m */,
7A74447D1CEE6B5900C70C83 /* Main.storyboard */,
7A74447B1CEE6B4B00C70C83 /* Assets.xcassets */,
22CA31B61B022D1300D2DE70 /* LaunchScreen.storyboard */,
2206265D1A1E330400A45150 /* Info.plist */,
2206265E1A1E330400A45150 /* main.m */,
);
path = AVCam;
sourceTree = "<group>";
};
/* End PBXGroup section */
/* Begin PBXNativeTarget section */
220626581A1E330400A45150 /* AVCam */ = {
isa = PBXNativeTarget;
buildConfigurationList = 2206267C1A1E330400A45150 /* Build configuration list for PBXNativeTarget "AVCam" */;
buildPhases = (
220626551A1E330400A45150 /* Sources */,
220626561A1E330400A45150 /* Frameworks */,
220626571A1E330400A45150 /* Resources */,
);
buildRules = (
);
dependencies = (
);
name = AVCam;
productName = AVCam;
productReference = 220626591A1E330400A45150 /* AVCam.app */;
productType = "com.apple.product-type.application";
};
/* End PBXNativeTarget section */
/* Begin PBXProject section */
220626511A1E330400A45150 /* Project object */ = {
isa = PBXProject;
attributes = {
LastUpgradeCheck = 0800;
TargetAttributes = {
220626581A1E330400A45150 = {
CreatedOnToolsVersion = 6.1;
ProvisioningStyle = Automatic;
};
};
};
buildConfigurationList = 220626541A1E330400A45150 /* Build configuration list for PBXProject "AVCam Objective-C" */;
compatibilityVersion = "Xcode 3.2";
developmentRegion = English;
hasScannedForEncodings = 0;
knownRegions = (
en,
Base,
);
mainGroup = 220626501A1E330400A45150;
productRefGroup = 2206265A1A1E330400A45150 /* Products */;
projectDirPath = "";
projectRoot = "";
targets = (
220626581A1E330400A45150 /* AVCam */,
);
};
/* End PBXProject section */
/* Begin PBXResourcesBuildPhase section */
220626571A1E330400A45150 /* Resources */ = {
isa = PBXResourcesBuildPhase;
buildActionMask = 2147483647;
files = (
7A74447E1CEE6B5900C70C83 /* Main.storyboard in Resources */,
7A74447C1CEE6B4B00C70C83 /* Assets.xcassets in Resources */,
22CA31B81B022D1300D2DE70 /* LaunchScreen.storyboard in Resources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXResourcesBuildPhase section */
/* Begin PBXSourcesBuildPhase section */
220626551A1E330400A45150 /* Sources */ = {
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
220626881A1E345E00A45150 /* AVCamAppDelegate.m in Sources */,
2206265F1A1E330400A45150 /* main.m in Sources */,
220626891A1E345E00A45150 /* AVCamPreviewView.m in Sources */,
2206268A1A1E345E00A45150 /* AVCamCameraViewController.m in Sources */,
7A74447A1CEE6B0F00C70C83 /* AVCamPhotoCaptureDelegate.m in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXSourcesBuildPhase section */
/* Begin PBXVariantGroup section */
22CA31B61B022D1300D2DE70 /* LaunchScreen.storyboard */ = {
isa = PBXVariantGroup;
children = (
22CA31B71B022D1300D2DE70 /* Base */,
);
name = LaunchScreen.storyboard;
path = ..;
sourceTree = "<group>";
};
/* End PBXVariantGroup section */
/* Begin XCBuildConfiguration section */
2206267A1A1E330400A45150 /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
COPY_PHASE_STRIP = NO;
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_TESTABILITY = YES;
GCC_C_LANGUAGE_STANDARD = gnu99;
GCC_DYNAMIC_NO_PIC = NO;
GCC_NO_COMMON_BLOCKS = YES;
GCC_OPTIMIZATION_LEVEL = 0;
GCC_PREPROCESSOR_DEFINITIONS = (
"DEBUG=1",
"$(inherited)",
);
GCC_SYMBOLS_PRIVATE_EXTERN = NO;
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 8.0;
MTL_ENABLE_DEBUG_INFO = YES;
ONLY_ACTIVE_ARCH = YES;
SDKROOT = iphoneos;
TARGETED_DEVICE_FAMILY = "1,2";
};
name = Debug;
};
2206267B1A1E330400A45150 /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
COPY_PHASE_STRIP = YES;
ENABLE_NS_ASSERTIONS = NO;
ENABLE_STRICT_OBJC_MSGSEND = YES;
GCC_C_LANGUAGE_STANDARD = gnu99;
GCC_NO_COMMON_BLOCKS = YES;
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 8.0;
MTL_ENABLE_DEBUG_INFO = NO;
SDKROOT = iphoneos;
TARGETED_DEVICE_FAMILY = "1,2";
VALIDATE_PRODUCT = YES;
};
name = Release;
};
2206267D1A1E330400A45150 /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
CODE_SIGN_IDENTITY = "iPhone Developer";
INFOPLIST_FILE = AVCam/Info.plist;
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
PRODUCT_BUNDLE_IDENTIFIER = "com.example.apple-samplecode.AVCam";
PRODUCT_NAME = "$(TARGET_NAME)";
SDKROOT = iphoneos;
};
name = Debug;
};
2206267E1A1E330400A45150 /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
CODE_SIGN_IDENTITY = "iPhone Developer";
INFOPLIST_FILE = AVCam/Info.plist;
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
PRODUCT_BUNDLE_IDENTIFIER = "com.example.apple-samplecode.AVCam";
PRODUCT_NAME = "$(TARGET_NAME)";
SDKROOT = iphoneos;
};
name = Release;
};
/* End XCBuildConfiguration section */
/* Begin XCConfigurationList section */
220626541A1E330400A45150 /* Build configuration list for PBXProject "AVCam Objective-C" */ = {
isa = XCConfigurationList;
buildConfigurations = (
2206267A1A1E330400A45150 /* Debug */,
2206267B1A1E330400A45150 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
2206267C1A1E330400A45150 /* Build configuration list for PBXNativeTarget "AVCam" */ = {
isa = XCConfigurationList;
buildConfigurations = (
2206267D1A1E330400A45150 /* Debug */,
2206267E1A1E330400A45150 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
/* End XCConfigurationList section */
};
rootObject = 220626511A1E330400A45150 /* Project object */;
}

View File

@ -0,0 +1,91 @@
<?xml version="1.0" encoding="UTF-8"?>
<Scheme
LastUpgradeVersion = "0800"
version = "1.3">
<BuildAction
parallelizeBuildables = "YES"
buildImplicitDependencies = "YES">
<BuildActionEntries>
<BuildActionEntry
buildForTesting = "YES"
buildForRunning = "YES"
buildForProfiling = "YES"
buildForArchiving = "YES"
buildForAnalyzing = "YES">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "220626581A1E330400A45150"
BuildableName = "AVCam.app"
BlueprintName = "AVCam"
ReferencedContainer = "container:AVCam Objective-C.xcodeproj">
</BuildableReference>
</BuildActionEntry>
</BuildActionEntries>
</BuildAction>
<TestAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
shouldUseLaunchSchemeArgsEnv = "YES">
<Testables>
</Testables>
<MacroExpansion>
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "220626581A1E330400A45150"
BuildableName = "AVCam.app"
BlueprintName = "AVCam"
ReferencedContainer = "container:AVCam Objective-C.xcodeproj">
</BuildableReference>
</MacroExpansion>
<AdditionalOptions>
</AdditionalOptions>
</TestAction>
<LaunchAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
launchStyle = "0"
useCustomWorkingDirectory = "NO"
ignoresPersistentStateOnLaunch = "NO"
debugDocumentVersioning = "YES"
debugServiceExtension = "internal"
allowLocationSimulation = "YES">
<BuildableProductRunnable
runnableDebuggingMode = "0">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "220626581A1E330400A45150"
BuildableName = "AVCam.app"
BlueprintName = "AVCam"
ReferencedContainer = "container:AVCam Objective-C.xcodeproj">
</BuildableReference>
</BuildableProductRunnable>
<AdditionalOptions>
</AdditionalOptions>
</LaunchAction>
<ProfileAction
buildConfiguration = "Release"
shouldUseLaunchSchemeArgsEnv = "YES"
savedToolIdentifier = ""
useCustomWorkingDirectory = "NO"
debugDocumentVersioning = "YES">
<BuildableProductRunnable
runnableDebuggingMode = "0">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "220626581A1E330400A45150"
BuildableName = "AVCam.app"
BlueprintName = "AVCam"
ReferencedContainer = "container:AVCam Objective-C.xcodeproj">
</BuildableReference>
</BuildableProductRunnable>
</ProfileAction>
<AnalyzeAction
buildConfiguration = "Debug">
</AnalyzeAction>
<ArchiveAction
buildConfiguration = "Release"
revealArchiveInOrganizer = "YES">
</ArchiveAction>
</Scheme>

View File

@ -0,0 +1,15 @@
/*
Copyright (C) 2016 Apple Inc. All Rights Reserved.
See LICENSE.txt for this samples licensing information
Abstract:
Application delegate.
*/
@import UIKit;
@interface AVCamAppDelegate : UIResponder <UIApplicationDelegate>
@property (nonatomic) UIWindow *window;
@end

View File

@ -0,0 +1,13 @@
/*
Copyright (C) 2016 Apple Inc. All Rights Reserved.
See LICENSE.txt for this samples licensing information
Abstract:
Application delegate.
*/
#import "AVCamAppDelegate.h"
@implementation AVCamAppDelegate
@end

View File

@ -0,0 +1,13 @@
/*
Copyright (C) 2016 Apple Inc. All Rights Reserved.
See LICENSE.txt for this samples licensing information
Abstract:
View controller for camera interface.
*/
@import UIKit;
@interface AVCamCameraViewController : UIViewController
@end

View File

@ -0,0 +1,947 @@
/*
Copyright (C) 2016 Apple Inc. All Rights Reserved.
See LICENSE.txt for this samples licensing information
Abstract:
View controller for camera interface.
*/
@import AVFoundation;
@import Photos;
#import "AVCamCameraViewController.h"
#import "AVCamPreviewView.h"
#import "AVCamPhotoCaptureDelegate.h"
static void * SessionRunningContext = &SessionRunningContext;
typedef NS_ENUM( NSInteger, AVCamSetupResult ) {
AVCamSetupResultSuccess,
AVCamSetupResultCameraNotAuthorized,
AVCamSetupResultSessionConfigurationFailed
};
typedef NS_ENUM( NSInteger, AVCamCaptureMode ) {
AVCamCaptureModePhoto = 0,
AVCamCaptureModeMovie = 1
};
typedef NS_ENUM( NSInteger, AVCamLivePhotoMode ) {
AVCamLivePhotoModeOn,
AVCamLivePhotoModeOff
};
@interface AVCaptureDeviceDiscoverySession (Utilities)
- (NSInteger)uniqueDevicePositionsCount;
@end
@implementation AVCaptureDeviceDiscoverySession (Utilities)
- (NSInteger)uniqueDevicePositionsCount
{
NSMutableArray<NSNumber *> *uniqueDevicePositions = [NSMutableArray array];
for ( AVCaptureDevice *device in self.devices ) {
if ( ! [uniqueDevicePositions containsObject:@(device.position)] ) {
[uniqueDevicePositions addObject:@(device.position)];
}
}
return uniqueDevicePositions.count;
}
@end
@interface AVCamCameraViewController () <AVCaptureFileOutputRecordingDelegate>
// Session management.
@property (nonatomic, weak) IBOutlet AVCamPreviewView *previewView;
@property (nonatomic, weak) IBOutlet UISegmentedControl *captureModeControl;
@property (nonatomic) AVCamSetupResult setupResult;
@property (nonatomic) dispatch_queue_t sessionQueue;
@property (nonatomic) AVCaptureSession *session;
@property (nonatomic, getter=isSessionRunning) BOOL sessionRunning;
@property (nonatomic) AVCaptureDeviceInput *videoDeviceInput;
// Device configuration.
@property (nonatomic, weak) IBOutlet UIButton *cameraButton;
@property (nonatomic, weak) IBOutlet UILabel *cameraUnavailableLabel;
@property (nonatomic) AVCaptureDeviceDiscoverySession *videoDeviceDiscoverySession;
// Capturing photos.
@property (nonatomic, weak) IBOutlet UIButton *photoButton;
@property (nonatomic, weak) IBOutlet UIButton *livePhotoModeButton;
@property (nonatomic) AVCamLivePhotoMode livePhotoMode;
@property (nonatomic, weak) IBOutlet UILabel *capturingLivePhotoLabel;
@property (nonatomic) AVCapturePhotoOutput *photoOutput;
@property (nonatomic) NSMutableDictionary<NSNumber *, AVCamPhotoCaptureDelegate *> *inProgressPhotoCaptureDelegates;
@property (nonatomic) NSInteger inProgressLivePhotoCapturesCount;
// Recording movies.
@property (nonatomic, weak) IBOutlet UIButton *recordButton;
@property (nonatomic, weak) IBOutlet UIButton *resumeButton;
@property (nonatomic, strong) AVCaptureMovieFileOutput *movieFileOutput;
@property (nonatomic) UIBackgroundTaskIdentifier backgroundRecordingID;
@end
@implementation AVCamCameraViewController
#pragma mark View Controller Life Cycle
- (void)viewDidLoad
{
[super viewDidLoad];
// Disable UI. The UI is enabled if and only if the session starts running.
self.cameraButton.enabled = NO;
self.recordButton.enabled = NO;
self.photoButton.enabled = NO;
self.livePhotoModeButton.enabled = NO;
self.captureModeControl.enabled = NO;
// Create the AVCaptureSession.
self.session = [[AVCaptureSession alloc] init];
// Create a device discovery session.
NSArray<AVCaptureDeviceType> *deviceTypes = @[AVCaptureDeviceTypeBuiltInWideAngleCamera, AVCaptureDeviceTypeBuiltInDuoCamera];
self.videoDeviceDiscoverySession = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:deviceTypes mediaType:AVMediaTypeVideo position:AVCaptureDevicePositionUnspecified];
// Set up the preview view.
self.previewView.session = self.session;
// Communicate with the session and other session objects on this queue.
self.sessionQueue = dispatch_queue_create( "session queue", DISPATCH_QUEUE_SERIAL );
self.setupResult = AVCamSetupResultSuccess;
/*
Check video authorization status. Video access is required and audio
access is optional. If audio access is denied, audio is not recorded
during movie recording.
*/
switch ( [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo] )
{
case AVAuthorizationStatusAuthorized:
{
// The user has previously granted access to the camera.
break;
}
case AVAuthorizationStatusNotDetermined:
{
/*
The user has not yet been presented with the option to grant
video access. We suspend the session queue to delay session
setup until the access request has completed.
Note that audio access will be implicitly requested when we
create an AVCaptureDeviceInput for audio during session setup.
*/
dispatch_suspend( self.sessionQueue );
[AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo completionHandler:^( BOOL granted ) {
if ( ! granted ) {
self.setupResult = AVCamSetupResultCameraNotAuthorized;
}
dispatch_resume( self.sessionQueue );
}];
break;
}
default:
{
// The user has previously denied access.
self.setupResult = AVCamSetupResultCameraNotAuthorized;
break;
}
}
/*
Setup the capture session.
In general it is not safe to mutate an AVCaptureSession or any of its
inputs, outputs, or connections from multiple threads at the same time.
Why not do all of this on the main queue?
Because -[AVCaptureSession startRunning] is a blocking call which can
take a long time. We dispatch session setup to the sessionQueue so
that the main queue isn't blocked, which keeps the UI responsive.
*/
dispatch_async( self.sessionQueue, ^{
[self configureSession];
} );
}
- (void)viewWillAppear:(BOOL)animated
{
[super viewWillAppear:animated];
dispatch_async( self.sessionQueue, ^{
switch ( self.setupResult )
{
case AVCamSetupResultSuccess:
{
// Only setup observers and start the session running if setup succeeded.
[self addObservers];
[self.session startRunning];
self.sessionRunning = self.session.isRunning;
break;
}
case AVCamSetupResultCameraNotAuthorized:
{
dispatch_async( dispatch_get_main_queue(), ^{
NSString *message = NSLocalizedString( @"AVCam doesn't have permission to use the camera, please change privacy settings", @"Alert message when the user has denied access to the camera" );
UIAlertController *alertController = [UIAlertController alertControllerWithTitle:@"AVCam" message:message preferredStyle:UIAlertControllerStyleAlert];
UIAlertAction *cancelAction = [UIAlertAction actionWithTitle:NSLocalizedString( @"OK", @"Alert OK button" ) style:UIAlertActionStyleCancel handler:nil];
[alertController addAction:cancelAction];
// Provide quick access to Settings.
UIAlertAction *settingsAction = [UIAlertAction actionWithTitle:NSLocalizedString( @"Settings", @"Alert button to open Settings" ) style:UIAlertActionStyleDefault handler:^( UIAlertAction *action ) {
[[UIApplication sharedApplication] openURL:[NSURL URLWithString:UIApplicationOpenSettingsURLString] options:@{} completionHandler:nil];
}];
[alertController addAction:settingsAction];
[self presentViewController:alertController animated:YES completion:nil];
} );
break;
}
case AVCamSetupResultSessionConfigurationFailed:
{
dispatch_async( dispatch_get_main_queue(), ^{
NSString *message = NSLocalizedString( @"Unable to capture media", @"Alert message when something goes wrong during capture session configuration" );
UIAlertController *alertController = [UIAlertController alertControllerWithTitle:@"AVCam" message:message preferredStyle:UIAlertControllerStyleAlert];
UIAlertAction *cancelAction = [UIAlertAction actionWithTitle:NSLocalizedString( @"OK", @"Alert OK button" ) style:UIAlertActionStyleCancel handler:nil];
[alertController addAction:cancelAction];
[self presentViewController:alertController animated:YES completion:nil];
} );
break;
}
}
} );
}
- (void)viewDidDisappear:(BOOL)animated
{
dispatch_async( self.sessionQueue, ^{
if ( self.setupResult == AVCamSetupResultSuccess ) {
[self.session stopRunning];
[self removeObservers];
}
} );
[super viewDidDisappear:animated];
}
- (BOOL)shouldAutorotate
{
// Disable autorotation of the interface when recording is in progress.
return ! self.movieFileOutput.isRecording;
}
- (UIInterfaceOrientationMask)supportedInterfaceOrientations
{
return UIInterfaceOrientationMaskAll;
}
- (void)viewWillTransitionToSize:(CGSize)size withTransitionCoordinator:(id<UIViewControllerTransitionCoordinator>)coordinator
{
[super viewWillTransitionToSize:size withTransitionCoordinator:coordinator];
UIDeviceOrientation deviceOrientation = [UIDevice currentDevice].orientation;
if ( UIDeviceOrientationIsPortrait( deviceOrientation ) || UIDeviceOrientationIsLandscape( deviceOrientation ) ) {
self.previewView.videoPreviewLayer.connection.videoOrientation = (AVCaptureVideoOrientation)deviceOrientation;
}
}
#pragma mark Session Management
// Call this on the session queue.
- (void)configureSession
{
if ( self.setupResult != AVCamSetupResultSuccess ) {
return;
}
NSError *error = nil;
[self.session beginConfiguration];
/*
We do not create an AVCaptureMovieFileOutput when setting up the session because the
AVCaptureMovieFileOutput does not support movie recording with AVCaptureSessionPresetPhoto.
*/
self.session.sessionPreset = AVCaptureSessionPresetPhoto;
// Add video input.
// Choose the back dual camera if available, otherwise default to a wide angle camera.
AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithDeviceType:AVCaptureDeviceTypeBuiltInDuoCamera mediaType:AVMediaTypeVideo position:AVCaptureDevicePositionBack];
if ( ! videoDevice ) {
// If the back dual camera is not available, default to the back wide angle camera.
videoDevice = [AVCaptureDevice defaultDeviceWithDeviceType:AVCaptureDeviceTypeBuiltInWideAngleCamera mediaType:AVMediaTypeVideo position:AVCaptureDevicePositionBack];
// In some cases where users break their phones, the back wide angle camera is not available. In this case, we should default to the front wide angle camera.
if ( ! videoDevice ) {
videoDevice = [AVCaptureDevice defaultDeviceWithDeviceType:AVCaptureDeviceTypeBuiltInWideAngleCamera mediaType:AVMediaTypeVideo position:AVCaptureDevicePositionFront];
}
}
AVCaptureDeviceInput *videoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
if ( ! videoDeviceInput ) {
NSLog( @"Could not create video device input: %@", error );
self.setupResult = AVCamSetupResultSessionConfigurationFailed;
[self.session commitConfiguration];
return;
}
if ( [self.session canAddInput:videoDeviceInput] ) {
[self.session addInput:videoDeviceInput];
self.videoDeviceInput = videoDeviceInput;
dispatch_async( dispatch_get_main_queue(), ^{
/*
Why are we dispatching this to the main queue?
Because AVCaptureVideoPreviewLayer is the backing layer for AVCamPreviewView and UIView
can only be manipulated on the main thread.
Note: As an exception to the above rule, it is not necessary to serialize video orientation changes
on the AVCaptureVideoPreviewLayers connection with other session manipulation.
Use the status bar orientation as the initial video orientation. Subsequent orientation changes are
handled by -[AVCamCameraViewController viewWillTransitionToSize:withTransitionCoordinator:].
*/
UIInterfaceOrientation statusBarOrientation = [UIApplication sharedApplication].statusBarOrientation;
AVCaptureVideoOrientation initialVideoOrientation = AVCaptureVideoOrientationPortrait;
if ( statusBarOrientation != UIInterfaceOrientationUnknown ) {
initialVideoOrientation = (AVCaptureVideoOrientation)statusBarOrientation;
}
self.previewView.videoPreviewLayer.connection.videoOrientation = initialVideoOrientation;
} );
}
else {
NSLog( @"Could not add video device input to the session" );
self.setupResult = AVCamSetupResultSessionConfigurationFailed;
[self.session commitConfiguration];
return;
}
// Add audio input.
AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
AVCaptureDeviceInput *audioDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:&error];
if ( ! audioDeviceInput ) {
NSLog( @"Could not create audio device input: %@", error );
}
if ( [self.session canAddInput:audioDeviceInput] ) {
[self.session addInput:audioDeviceInput];
}
else {
NSLog( @"Could not add audio device input to the session" );
}
// Add photo output.
AVCapturePhotoOutput *photoOutput = [[AVCapturePhotoOutput alloc] init];
if ( [self.session canAddOutput:photoOutput] ) {
[self.session addOutput:photoOutput];
self.photoOutput = photoOutput;
self.photoOutput.highResolutionCaptureEnabled = YES;
self.photoOutput.livePhotoCaptureEnabled = self.photoOutput.livePhotoCaptureSupported;
self.livePhotoMode = self.photoOutput.livePhotoCaptureSupported ? AVCamLivePhotoModeOn : AVCamLivePhotoModeOff;
self.inProgressPhotoCaptureDelegates = [NSMutableDictionary dictionary];
self.inProgressLivePhotoCapturesCount = 0;
}
else {
NSLog( @"Could not add photo output to the session" );
self.setupResult = AVCamSetupResultSessionConfigurationFailed;
[self.session commitConfiguration];
return;
}
self.backgroundRecordingID = UIBackgroundTaskInvalid;
[self.session commitConfiguration];
}
- (IBAction)resumeInterruptedSession:(id)sender
{
dispatch_async( self.sessionQueue, ^{
/*
The session might fail to start running, e.g., if a phone or FaceTime call is still
using audio or video. A failure to start the session running will be communicated via
a session runtime error notification. To avoid repeatedly failing to start the session
running, we only try to restart the session running in the session runtime error handler
if we aren't trying to resume the session running.
*/
[self.session startRunning];
self.sessionRunning = self.session.isRunning;
if ( ! self.session.isRunning ) {
dispatch_async( dispatch_get_main_queue(), ^{
NSString *message = NSLocalizedString( @"Unable to resume", @"Alert message when unable to resume the session running" );
UIAlertController *alertController = [UIAlertController alertControllerWithTitle:@"AVCam" message:message preferredStyle:UIAlertControllerStyleAlert];
UIAlertAction *cancelAction = [UIAlertAction actionWithTitle:NSLocalizedString( @"OK", @"Alert OK button" ) style:UIAlertActionStyleCancel handler:nil];
[alertController addAction:cancelAction];
[self presentViewController:alertController animated:YES completion:nil];
} );
}
else {
dispatch_async( dispatch_get_main_queue(), ^{
self.resumeButton.hidden = YES;
} );
}
} );
}
- (IBAction)toggleCaptureMode:(UISegmentedControl *)captureModeControl
{
if ( captureModeControl.selectedSegmentIndex == AVCamCaptureModePhoto ) {
self.recordButton.enabled = NO;
dispatch_async( self.sessionQueue, ^{
/*
Remove the AVCaptureMovieFileOutput from the session because movie recording is
not supported with AVCaptureSessionPresetPhoto. Additionally, Live Photo
capture is not supported when an AVCaptureMovieFileOutput is connected to the session.
*/
[self.session beginConfiguration];
[self.session removeOutput:self.movieFileOutput];
self.session.sessionPreset = AVCaptureSessionPresetPhoto;
[self.session commitConfiguration];
self.movieFileOutput = nil;
if ( self.photoOutput.livePhotoCaptureSupported ) {
self.photoOutput.livePhotoCaptureEnabled = YES;
dispatch_async( dispatch_get_main_queue(), ^{
self.livePhotoModeButton.enabled = YES;
self.livePhotoModeButton.hidden = NO;
} );
}
} );
}
else if ( captureModeControl.selectedSegmentIndex == AVCamCaptureModeMovie ) {
self.livePhotoModeButton.hidden = YES;
dispatch_async( self.sessionQueue, ^{
AVCaptureMovieFileOutput *movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
if ( [self.session canAddOutput:movieFileOutput] )
{
[self.session beginConfiguration];
[self.session addOutput:movieFileOutput];
self.session.sessionPreset = AVCaptureSessionPresetHigh;
AVCaptureConnection *connection = [movieFileOutput connectionWithMediaType:AVMediaTypeVideo];
if ( connection.isVideoStabilizationSupported ) {
connection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeAuto;
}
[self.session commitConfiguration];
self.movieFileOutput = movieFileOutput;
dispatch_async( dispatch_get_main_queue(), ^{
self.recordButton.enabled = YES;
} );
}
} );
}
}
#pragma mark Device Configuration
- (IBAction)changeCamera:(id)sender
{
self.cameraButton.enabled = NO;
self.recordButton.enabled = NO;
self.photoButton.enabled = NO;
self.livePhotoModeButton.enabled = NO;
self.captureModeControl.enabled = NO;
dispatch_async( self.sessionQueue, ^{
AVCaptureDevice *currentVideoDevice = self.videoDeviceInput.device;
AVCaptureDevicePosition currentPosition = currentVideoDevice.position;
AVCaptureDevicePosition preferredPosition;
AVCaptureDeviceType preferredDeviceType;
switch ( currentPosition )
{
case AVCaptureDevicePositionUnspecified:
case AVCaptureDevicePositionFront:
preferredPosition = AVCaptureDevicePositionBack;
preferredDeviceType = AVCaptureDeviceTypeBuiltInDuoCamera;
break;
case AVCaptureDevicePositionBack:
preferredPosition = AVCaptureDevicePositionFront;
preferredDeviceType = AVCaptureDeviceTypeBuiltInWideAngleCamera;
break;
}
NSArray<AVCaptureDevice *> *devices = self.videoDeviceDiscoverySession.devices;
AVCaptureDevice *newVideoDevice = nil;
// First, look for a device with both the preferred position and device type.
for ( AVCaptureDevice *device in devices ) {
if ( device.position == preferredPosition && [device.deviceType isEqualToString:preferredDeviceType] ) {
newVideoDevice = device;
break;
}
}
// Otherwise, look for a device with only the preferred position.
if ( ! newVideoDevice ) {
for ( AVCaptureDevice *device in devices ) {
if ( device.position == preferredPosition ) {
newVideoDevice = device;
break;
}
}
}
if ( newVideoDevice ) {
AVCaptureDeviceInput *videoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:newVideoDevice error:NULL];
[self.session beginConfiguration];
// Remove the existing device input first, since using the front and back camera simultaneously is not supported.
[self.session removeInput:self.videoDeviceInput];
if ( [self.session canAddInput:videoDeviceInput] ) {
[[NSNotificationCenter defaultCenter] removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:currentVideoDevice];
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(subjectAreaDidChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:newVideoDevice];
[self.session addInput:videoDeviceInput];
self.videoDeviceInput = videoDeviceInput;
}
else {
[self.session addInput:self.videoDeviceInput];
}
AVCaptureConnection *movieFileOutputConnection = [self.movieFileOutput connectionWithMediaType:AVMediaTypeVideo];
if ( movieFileOutputConnection.isVideoStabilizationSupported ) {
movieFileOutputConnection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeAuto;
}
/*
Set Live Photo capture enabled if it is supported. When changing cameras, the
`livePhotoCaptureEnabled` property of the AVCapturePhotoOutput gets set to NO when
a video device is disconnected from the session. After the new video device is
added to the session, re-enable Live Photo capture on the AVCapturePhotoOutput if it is supported.
*/
self.photoOutput.livePhotoCaptureEnabled = self.photoOutput.livePhotoCaptureSupported;
[self.session commitConfiguration];
}
dispatch_async( dispatch_get_main_queue(), ^{
self.cameraButton.enabled = YES;
self.recordButton.enabled = self.captureModeControl.selectedSegmentIndex == AVCamCaptureModeMovie;
self.photoButton.enabled = YES;
self.livePhotoModeButton.enabled = YES;
self.captureModeControl.enabled = YES;
} );
} );
}
- (IBAction)focusAndExposeTap:(UIGestureRecognizer *)gestureRecognizer
{
CGPoint devicePoint = [self.previewView.videoPreviewLayer captureDevicePointOfInterestForPoint:[gestureRecognizer locationInView:gestureRecognizer.view]];
[self focusWithMode:AVCaptureFocusModeAutoFocus exposeWithMode:AVCaptureExposureModeAutoExpose atDevicePoint:devicePoint monitorSubjectAreaChange:YES];
}
- (void)focusWithMode:(AVCaptureFocusMode)focusMode exposeWithMode:(AVCaptureExposureMode)exposureMode atDevicePoint:(CGPoint)point monitorSubjectAreaChange:(BOOL)monitorSubjectAreaChange
{
dispatch_async( self.sessionQueue, ^{
AVCaptureDevice *device = self.videoDeviceInput.device;
NSError *error = nil;
if ( [device lockForConfiguration:&error] ) {
/*
Setting (focus/exposure)PointOfInterest alone does not initiate a (focus/exposure) operation.
Call set(Focus/Exposure)Mode() to apply the new point of interest.
*/
if ( device.isFocusPointOfInterestSupported && [device isFocusModeSupported:focusMode] ) {
device.focusPointOfInterest = point;
device.focusMode = focusMode;
}
if ( device.isExposurePointOfInterestSupported && [device isExposureModeSupported:exposureMode] ) {
device.exposurePointOfInterest = point;
device.exposureMode = exposureMode;
}
device.subjectAreaChangeMonitoringEnabled = monitorSubjectAreaChange;
[device unlockForConfiguration];
}
else {
NSLog( @"Could not lock device for configuration: %@", error );
}
} );
}
#pragma mark Capturing Photos
- (IBAction)capturePhoto:(id)sender
{
/*
Retrieve the video preview layer's video orientation on the main queue before
entering the session queue. We do this to ensure UI elements are accessed on
the main thread and session configuration is done on the session queue.
*/
AVCaptureVideoOrientation videoPreviewLayerVideoOrientation = self.previewView.videoPreviewLayer.connection.videoOrientation;
dispatch_async( self.sessionQueue, ^{
// Update the photo output's connection to match the video orientation of the video preview layer.
AVCaptureConnection *photoOutputConnection = [self.photoOutput connectionWithMediaType:AVMediaTypeVideo];
photoOutputConnection.videoOrientation = videoPreviewLayerVideoOrientation;
// Capture a JPEG photo with flash set to auto and high resolution photo enabled.
AVCapturePhotoSettings *photoSettings = [AVCapturePhotoSettings photoSettings];
photoSettings.flashMode = AVCaptureFlashModeAuto;
photoSettings.highResolutionPhotoEnabled = YES;
if ( photoSettings.availablePreviewPhotoPixelFormatTypes.count > 0 ) {
photoSettings.previewPhotoFormat = @{ (NSString *)kCVPixelBufferPixelFormatTypeKey : photoSettings.availablePreviewPhotoPixelFormatTypes.firstObject };
}
if ( self.livePhotoMode == AVCamLivePhotoModeOn && self.photoOutput.livePhotoCaptureSupported ) { // Live Photo capture is not supported in movie mode.
NSString *livePhotoMovieFileName = [NSUUID UUID].UUIDString;
NSString *livePhotoMovieFilePath = [NSTemporaryDirectory() stringByAppendingPathComponent:[livePhotoMovieFileName stringByAppendingPathExtension:@"mov"]];
photoSettings.livePhotoMovieFileURL = [NSURL fileURLWithPath:livePhotoMovieFilePath];
}
// Use a separate object for the photo capture delegate to isolate each capture life cycle.
AVCamPhotoCaptureDelegate *photoCaptureDelegate = [[AVCamPhotoCaptureDelegate alloc] initWithRequestedPhotoSettings:photoSettings willCapturePhotoAnimation:^{
dispatch_async( dispatch_get_main_queue(), ^{
self.previewView.videoPreviewLayer.opacity = 0.0;
[UIView animateWithDuration:0.25 animations:^{
self.previewView.videoPreviewLayer.opacity = 1.0;
}];
} );
} capturingLivePhoto:^( BOOL capturing ) {
/*
Because Live Photo captures can overlap, we need to keep track of the
number of in progress Live Photo captures to ensure that the
Live Photo label stays visible during these captures.
*/
dispatch_async( self.sessionQueue, ^{
if ( capturing ) {
self.inProgressLivePhotoCapturesCount++;
}
else {
self.inProgressLivePhotoCapturesCount--;
}
NSInteger inProgressLivePhotoCapturesCount = self.inProgressLivePhotoCapturesCount;
dispatch_async( dispatch_get_main_queue(), ^{
if ( inProgressLivePhotoCapturesCount > 0 ) {
self.capturingLivePhotoLabel.hidden = NO;
}
else if ( inProgressLivePhotoCapturesCount == 0 ) {
self.capturingLivePhotoLabel.hidden = YES;
}
else {
NSLog( @"Error: In progress live photo capture count is less than 0" );
}
} );
} );
} completed:^( AVCamPhotoCaptureDelegate *photoCaptureDelegate ) {
// When the capture is complete, remove a reference to the photo capture delegate so it can be deallocated.
dispatch_async( self.sessionQueue, ^{
self.inProgressPhotoCaptureDelegates[@(photoCaptureDelegate.requestedPhotoSettings.uniqueID)] = nil;
} );
}];
/*
The Photo Output keeps a weak reference to the photo capture delegate so
we store it in an array to maintain a strong reference to this object
until the capture is completed.
*/
self.inProgressPhotoCaptureDelegates[@(photoCaptureDelegate.requestedPhotoSettings.uniqueID)] = photoCaptureDelegate;
[self.photoOutput capturePhotoWithSettings:photoSettings delegate:photoCaptureDelegate];
} );
}
- (IBAction)toggleLivePhotoMode:(UIButton *)livePhotoModeButton
{
dispatch_async( self.sessionQueue, ^{
self.livePhotoMode = ( self.livePhotoMode == AVCamLivePhotoModeOn ) ? AVCamLivePhotoModeOff : AVCamLivePhotoModeOn;
AVCamLivePhotoMode livePhotoMode = self.livePhotoMode;
dispatch_async( dispatch_get_main_queue(), ^{
if ( livePhotoMode == AVCamLivePhotoModeOn ) {
[self.livePhotoModeButton setTitle:NSLocalizedString( @"Live Photo Mode: On", @"Live photo mode button on title" ) forState:UIControlStateNormal];
}
else {
[self.livePhotoModeButton setTitle:NSLocalizedString( @"Live Photo Mode: Off", @"Live photo mode button off title" ) forState:UIControlStateNormal];
}
} );
} );
}
#pragma mark Recording Movies
- (IBAction)toggleMovieRecording:(id)sender
{
/*
Disable the Camera button until recording finishes, and disable
the Record button until recording starts or finishes.
See the AVCaptureFileOutputRecordingDelegate methods.
*/
self.cameraButton.enabled = NO;
self.recordButton.enabled = NO;
self.captureModeControl.enabled = NO;
/*
Retrieve the video preview layer's video orientation on the main queue
before entering the session queue. We do this to ensure UI elements are
accessed on the main thread and session configuration is done on the session queue.
*/
AVCaptureVideoOrientation videoPreviewLayerVideoOrientation = self.previewView.videoPreviewLayer.connection.videoOrientation;
dispatch_async( self.sessionQueue, ^{
if ( ! self.movieFileOutput.isRecording ) {
if ( [UIDevice currentDevice].isMultitaskingSupported ) {
/*
Setup background task.
This is needed because the -[captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error:]
callback is not received until AVCam returns to the foreground unless you request background execution time.
This also ensures that there will be time to write the file to the photo library when AVCam is backgrounded.
To conclude this background execution, -[endBackgroundTask:] is called in
-[captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error:] after the recorded file has been saved.
*/
self.backgroundRecordingID = [[UIApplication sharedApplication] beginBackgroundTaskWithExpirationHandler:nil];
}
// Update the orientation on the movie file output video connection before starting recording.
AVCaptureConnection *movieFileOutputConnection = [self.movieFileOutput connectionWithMediaType:AVMediaTypeVideo];
movieFileOutputConnection.videoOrientation = videoPreviewLayerVideoOrientation;
// Start recording to a temporary file.
NSString *outputFileName = [NSUUID UUID].UUIDString;
NSString *outputFilePath = [NSTemporaryDirectory() stringByAppendingPathComponent:[outputFileName stringByAppendingPathExtension:@"mov"]];
[self.movieFileOutput startRecordingToOutputFileURL:[NSURL fileURLWithPath:outputFilePath] recordingDelegate:self];
}
else {
[self.movieFileOutput stopRecording];
}
} );
}
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didStartRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray *)connections
{
// Enable the Record button to let the user stop the recording.
dispatch_async( dispatch_get_main_queue(), ^{
self.recordButton.enabled = YES;
[self.recordButton setTitle:NSLocalizedString( @"Stop", @"Recording button stop title" ) forState:UIControlStateNormal];
});
}
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error
{
/*
Note that currentBackgroundRecordingID is used to end the background task
associated with this recording. This allows a new recording to be started,
associated with a new UIBackgroundTaskIdentifier, once the movie file output's
`recording` property is back to NO which happens sometime after this method
returns.
Note: Since we use a unique file path for each recording, a new recording will
not overwrite a recording currently being saved.
*/
UIBackgroundTaskIdentifier currentBackgroundRecordingID = self.backgroundRecordingID;
self.backgroundRecordingID = UIBackgroundTaskInvalid;
dispatch_block_t cleanup = ^{
if ( [[NSFileManager defaultManager] fileExistsAtPath:outputFileURL.path] ) {
[[NSFileManager defaultManager] removeItemAtPath:outputFileURL.path error:NULL];
}
if ( currentBackgroundRecordingID != UIBackgroundTaskInvalid ) {
[[UIApplication sharedApplication] endBackgroundTask:currentBackgroundRecordingID];
}
};
BOOL success = YES;
if ( error ) {
NSLog( @"Movie file finishing error: %@", error );
success = [error.userInfo[AVErrorRecordingSuccessfullyFinishedKey] boolValue];
}
if ( success ) {
// Check authorization status.
[PHPhotoLibrary requestAuthorization:^( PHAuthorizationStatus status ) {
if ( status == PHAuthorizationStatusAuthorized ) {
// Save the movie file to the photo library and cleanup.
[[PHPhotoLibrary sharedPhotoLibrary] performChanges:^{
PHAssetResourceCreationOptions *options = [[PHAssetResourceCreationOptions alloc] init];
options.shouldMoveFile = YES;
PHAssetCreationRequest *creationRequest = [PHAssetCreationRequest creationRequestForAsset];
[creationRequest addResourceWithType:PHAssetResourceTypeVideo fileURL:outputFileURL options:options];
} completionHandler:^( BOOL success, NSError *error ) {
if ( ! success ) {
NSLog( @"Could not save movie to photo library: %@", error );
}
cleanup();
}];
}
else {
cleanup();
}
}];
}
else {
cleanup();
}
// Enable the Camera and Record buttons to let the user switch camera and start another recording.
dispatch_async( dispatch_get_main_queue(), ^{
// Only enable the ability to change camera if the device has more than one camera.
self.cameraButton.enabled = ( self.videoDeviceDiscoverySession.uniqueDevicePositionsCount > 1 );
self.recordButton.enabled = YES;
self.captureModeControl.enabled = YES;
[self.recordButton setTitle:NSLocalizedString( @"Record", @"Recording button record title" ) forState:UIControlStateNormal];
});
}
#pragma mark KVO and Notifications
- (void)addObservers
{
[self.session addObserver:self forKeyPath:@"running" options:NSKeyValueObservingOptionNew context:SessionRunningContext];
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(subjectAreaDidChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:self.videoDeviceInput.device];
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(sessionRuntimeError:) name:AVCaptureSessionRuntimeErrorNotification object:self.session];
/*
A session can only run when the app is full screen. It will be interrupted
in a multi-app layout, introduced in iOS 9, see also the documentation of
AVCaptureSessionInterruptionReason. Add observers to handle these session
interruptions and show a preview is paused message. See the documentation
of AVCaptureSessionWasInterruptedNotification for other interruption reasons.
*/
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(sessionWasInterrupted:) name:AVCaptureSessionWasInterruptedNotification object:self.session];
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(sessionInterruptionEnded:) name:AVCaptureSessionInterruptionEndedNotification object:self.session];
}
- (void)removeObservers
{
[[NSNotificationCenter defaultCenter] removeObserver:self];
[self.session removeObserver:self forKeyPath:@"running" context:SessionRunningContext];
}
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
{
if ( context == SessionRunningContext ) {
BOOL isSessionRunning = [change[NSKeyValueChangeNewKey] boolValue];
BOOL livePhotoCaptureSupported = self.photoOutput.livePhotoCaptureSupported;
BOOL livePhotoCaptureEnabled = self.photoOutput.livePhotoCaptureEnabled;
dispatch_async( dispatch_get_main_queue(), ^{
// Only enable the ability to change camera if the device has more than one camera.
self.cameraButton.enabled = isSessionRunning && ( self.videoDeviceDiscoverySession.uniqueDevicePositionsCount > 1 );
self.recordButton.enabled = isSessionRunning && ( self.captureModeControl.selectedSegmentIndex == AVCamCaptureModeMovie );
self.photoButton.enabled = isSessionRunning;
self.captureModeControl.enabled = isSessionRunning;
self.livePhotoModeButton.enabled = isSessionRunning && livePhotoCaptureEnabled;
self.livePhotoModeButton.hidden = ! ( isSessionRunning && livePhotoCaptureSupported );
} );
}
else {
[super observeValueForKeyPath:keyPath ofObject:object change:change context:context];
}
}
- (void)subjectAreaDidChange:(NSNotification *)notification
{
CGPoint devicePoint = CGPointMake( 0.5, 0.5 );
[self focusWithMode:AVCaptureFocusModeContinuousAutoFocus exposeWithMode:AVCaptureExposureModeContinuousAutoExposure atDevicePoint:devicePoint monitorSubjectAreaChange:NO];
}
- (void)sessionRuntimeError:(NSNotification *)notification
{
NSError *error = notification.userInfo[AVCaptureSessionErrorKey];
NSLog( @"Capture session runtime error: %@", error );
/*
Automatically try to restart the session running if media services were
reset and the last start running succeeded. Otherwise, enable the user
to try to resume the session running.
*/
if ( error.code == AVErrorMediaServicesWereReset ) {
dispatch_async( self.sessionQueue, ^{
if ( self.isSessionRunning ) {
[self.session startRunning];
self.sessionRunning = self.session.isRunning;
}
else {
dispatch_async( dispatch_get_main_queue(), ^{
self.resumeButton.hidden = NO;
} );
}
} );
}
else {
self.resumeButton.hidden = NO;
}
}
- (void)sessionWasInterrupted:(NSNotification *)notification
{
/*
In some scenarios we want to enable the user to resume the session running.
For example, if music playback is initiated via control center while
using AVCam, then the user can let AVCam resume
the session running, which will stop music playback. Note that stopping
music playback in control center will not automatically resume the session
running. Also note that it is not always possible to resume, see -[resumeInterruptedSession:].
*/
BOOL showResumeButton = NO;
AVCaptureSessionInterruptionReason reason = [notification.userInfo[AVCaptureSessionInterruptionReasonKey] integerValue];
NSLog( @"Capture session was interrupted with reason %ld", (long)reason );
if ( reason == AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient ||
reason == AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient ) {
showResumeButton = YES;
}
else if ( reason == AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultipleForegroundApps ) {
// Simply fade-in a label to inform the user that the camera is unavailable.
self.cameraUnavailableLabel.alpha = 0.0;
self.cameraUnavailableLabel.hidden = NO;
[UIView animateWithDuration:0.25 animations:^{
self.cameraUnavailableLabel.alpha = 1.0;
}];
}
if ( showResumeButton ) {
// Simply fade-in a button to enable the user to try to resume the session running.
self.resumeButton.alpha = 0.0;
self.resumeButton.hidden = NO;
[UIView animateWithDuration:0.25 animations:^{
self.resumeButton.alpha = 1.0;
}];
}
}
- (void)sessionInterruptionEnded:(NSNotification *)notification
{
NSLog( @"Capture session interruption ended" );
if ( ! self.resumeButton.hidden ) {
[UIView animateWithDuration:0.25 animations:^{
self.resumeButton.alpha = 0.0;
} completion:^( BOOL finished ) {
self.resumeButton.hidden = YES;
}];
}
if ( ! self.cameraUnavailableLabel.hidden ) {
[UIView animateWithDuration:0.25 animations:^{
self.cameraUnavailableLabel.alpha = 0.0;
} completion:^( BOOL finished ) {
self.cameraUnavailableLabel.hidden = YES;
}];
}
}
@end

View File

@ -0,0 +1,17 @@
/*
Copyright (C) 2016 Apple Inc. All Rights Reserved.
See LICENSE.txt for this samples licensing information
Abstract:
Photo capture delegate.
*/
@import AVFoundation;
@interface AVCamPhotoCaptureDelegate : NSObject<AVCapturePhotoCaptureDelegate>
- (instancetype)initWithRequestedPhotoSettings:(AVCapturePhotoSettings *)requestedPhotoSettings willCapturePhotoAnimation:(void (^)())willCapturePhotoAnimation capturingLivePhoto:(void (^)( BOOL capturing ))capturingLivePhoto completed:(void (^)( AVCamPhotoCaptureDelegate *photoCaptureDelegate ))completed;
@property (nonatomic, readonly) AVCapturePhotoSettings *requestedPhotoSettings;
@end

View File

@ -0,0 +1,130 @@
/*
Copyright (C) 2016 Apple Inc. All Rights Reserved.
See LICENSE.txt for this samples licensing information
Abstract:
Photo capture delegate.
*/
#import "AVCamPhotoCaptureDelegate.h"
@import Photos;
@interface AVCamPhotoCaptureDelegate ()
@property (nonatomic, readwrite) AVCapturePhotoSettings *requestedPhotoSettings;
@property (nonatomic) void (^willCapturePhotoAnimation)();
@property (nonatomic) void (^capturingLivePhoto)(BOOL capturing);
@property (nonatomic) void (^completed)(AVCamPhotoCaptureDelegate *photoCaptureDelegate);
@property (nonatomic) NSData *photoData;
@property (nonatomic) NSURL *livePhotoCompanionMovieURL;
@end
@implementation AVCamPhotoCaptureDelegate
- (instancetype)initWithRequestedPhotoSettings:(AVCapturePhotoSettings *)requestedPhotoSettings willCapturePhotoAnimation:(void (^)())willCapturePhotoAnimation capturingLivePhoto:(void (^)(BOOL))capturingLivePhoto completed:(void (^)(AVCamPhotoCaptureDelegate *))completed
{
self = [super init];
if ( self ) {
self.requestedPhotoSettings = requestedPhotoSettings;
self.willCapturePhotoAnimation = willCapturePhotoAnimation;
self.capturingLivePhoto = capturingLivePhoto;
self.completed = completed;
}
return self;
}
- (void)didFinish
{
if ( [[NSFileManager defaultManager] fileExistsAtPath:self.livePhotoCompanionMovieURL.path] ) {
NSError *error = nil;
[[NSFileManager defaultManager] removeItemAtPath:self.livePhotoCompanionMovieURL.path error:&error];
if ( error ) {
NSLog( @"Could not remove file at url: %@", self.livePhotoCompanionMovieURL.path );
}
}
self.completed( self );
}
- (void)captureOutput:(AVCapturePhotoOutput *)captureOutput willBeginCaptureForResolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings
{
if ( ( resolvedSettings.livePhotoMovieDimensions.width > 0 ) && ( resolvedSettings.livePhotoMovieDimensions.height > 0 ) ) {
self.capturingLivePhoto( YES );
}
}
- (void)captureOutput:(AVCapturePhotoOutput *)captureOutput willCapturePhotoForResolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings
{
self.willCapturePhotoAnimation();
}
- (void)captureOutput:(AVCapturePhotoOutput *)captureOutput didFinishProcessingPhotoSampleBuffer:(CMSampleBufferRef)photoSampleBuffer previewPhotoSampleBuffer:(CMSampleBufferRef)previewPhotoSampleBuffer resolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings bracketSettings:(AVCaptureBracketedStillImageSettings *)bracketSettings error:(NSError *)error
{
if ( error != nil ) {
NSLog( @"Error capturing photo: %@", error );
return;
}
self.photoData = [AVCapturePhotoOutput JPEGPhotoDataRepresentationForJPEGSampleBuffer:photoSampleBuffer previewPhotoSampleBuffer:previewPhotoSampleBuffer];
}
- (void)captureOutput:(AVCapturePhotoOutput *)captureOutput didFinishRecordingLivePhotoMovieForEventualFileAtURL:(NSURL *)outputFileURL resolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings
{
self.capturingLivePhoto(NO);
}
- (void)captureOutput:(AVCapturePhotoOutput *)captureOutput didFinishProcessingLivePhotoToMovieFileAtURL:(NSURL *)outputFileURL duration:(CMTime)duration photoDisplayTime:(CMTime)photoDisplayTime resolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings error:(NSError *)error
{
if ( error != nil ) {
NSLog( @"Error processing live photo companion movie: %@", error );
return;
}
self.livePhotoCompanionMovieURL = outputFileURL;
}
- (void)captureOutput:(AVCapturePhotoOutput *)captureOutput didFinishCaptureForResolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings error:(NSError *)error
{
if ( error != nil ) {
NSLog( @"Error capturing photo: %@", error );
[self didFinish];
return;
}
if ( self.photoData == nil ) {
NSLog( @"No photo data resource" );
[self didFinish];
return;
}
[PHPhotoLibrary requestAuthorization:^( PHAuthorizationStatus status ) {
if ( status == PHAuthorizationStatusAuthorized ) {
[[PHPhotoLibrary sharedPhotoLibrary] performChanges:^{
PHAssetCreationRequest *creationRequest = [PHAssetCreationRequest creationRequestForAsset];
[creationRequest addResourceWithType:PHAssetResourceTypePhoto data:self.photoData options:nil];
if ( self.livePhotoCompanionMovieURL ) {
PHAssetResourceCreationOptions *livePhotoCompanionMovieResourceOptions = [[PHAssetResourceCreationOptions alloc] init];
livePhotoCompanionMovieResourceOptions.shouldMoveFile = YES;
[creationRequest addResourceWithType:PHAssetResourceTypePairedVideo fileURL:self.livePhotoCompanionMovieURL options:livePhotoCompanionMovieResourceOptions];
}
} completionHandler:^( BOOL success, NSError * _Nullable error ) {
if ( ! success ) {
NSLog( @"Error occurred while saving photo to photo library: %@", error );
}
[self didFinish];
}];
}
else {
NSLog( @"Not authorized to save photo" );
[self didFinish];
}
}];
}
@end

View File

@ -0,0 +1,19 @@
/*
Copyright (C) 2016 Apple Inc. All Rights Reserved.
See LICENSE.txt for this samples licensing information
Abstract:
Application preview view.
*/
@import UIKit;
@class AVCaptureSession;
@interface AVCamPreviewView : UIView
@property (nonatomic, readonly) AVCaptureVideoPreviewLayer *videoPreviewLayer;
@property (nonatomic) AVCaptureSession *session;
@end

View File

@ -0,0 +1,35 @@
/*
Copyright (C) 2016 Apple Inc. All Rights Reserved.
See LICENSE.txt for this samples licensing information
Abstract:
Application preview view.
*/
@import AVFoundation;
#import "AVCamPreviewView.h"
@implementation AVCamPreviewView
+ (Class)layerClass
{
return [AVCaptureVideoPreviewLayer class];
}
- (AVCaptureVideoPreviewLayer *)videoPreviewLayer
{
return (AVCaptureVideoPreviewLayer *)self.layer;
}
- (AVCaptureSession *)session
{
return self.videoPreviewLayer.session;
}
- (void)setSession:(AVCaptureSession *)session
{
self.videoPreviewLayer.session = session;
}
@end

View File

@ -0,0 +1,128 @@
{
"images" : [
{
"size" : "29x29",
"idiom" : "iphone",
"filename" : "Icon-Small.png",
"scale" : "1x"
},
{
"size" : "29x29",
"idiom" : "iphone",
"filename" : "Icon-Small@2x.png",
"scale" : "2x"
},
{
"size" : "29x29",
"idiom" : "iphone",
"filename" : "Icon-Small@3x.png",
"scale" : "3x"
},
{
"size" : "40x40",
"idiom" : "iphone",
"filename" : "Icon-40@2x.png",
"scale" : "2x"
},
{
"size" : "40x40",
"idiom" : "iphone",
"filename" : "Icon-40@3x.png",
"scale" : "3x"
},
{
"size" : "57x57",
"idiom" : "iphone",
"filename" : "Icon.png",
"scale" : "1x"
},
{
"size" : "57x57",
"idiom" : "iphone",
"filename" : "Icon@2x.png",
"scale" : "2x"
},
{
"size" : "60x60",
"idiom" : "iphone",
"filename" : "Icon-60@2x.png",
"scale" : "2x"
},
{
"size" : "60x60",
"idiom" : "iphone",
"filename" : "Icon-60@3x.png",
"scale" : "3x"
},
{
"size" : "29x29",
"idiom" : "ipad",
"filename" : "Icon-Small.png",
"scale" : "1x"
},
{
"size" : "29x29",
"idiom" : "ipad",
"filename" : "Icon-Small@2x.png",
"scale" : "2x"
},
{
"size" : "40x40",
"idiom" : "ipad",
"filename" : "Icon-40.png",
"scale" : "1x"
},
{
"size" : "40x40",
"idiom" : "ipad",
"filename" : "Icon-40@2x.png",
"scale" : "2x"
},
{
"size" : "50x50",
"idiom" : "ipad",
"filename" : "Icon-Small-50.png",
"scale" : "1x"
},
{
"size" : "50x50",
"idiom" : "ipad",
"filename" : "Icon-Small-50@2x.png",
"scale" : "2x"
},
{
"size" : "72x72",
"idiom" : "ipad",
"filename" : "Icon-72.png",
"scale" : "1x"
},
{
"size" : "72x72",
"idiom" : "ipad",
"filename" : "Icon-72@2x.png",
"scale" : "2x"
},
{
"size" : "76x76",
"idiom" : "ipad",
"filename" : "Icon-76.png",
"scale" : "1x"
},
{
"size" : "76x76",
"idiom" : "ipad",
"filename" : "Icon-76@2x.png",
"scale" : "2x"
},
{
"size" : "83.5x83.5",
"idiom" : "ipad",
"filename" : "Icon-83.5@2x.png",
"scale" : "2x"
}
],
"info" : {
"version" : 1,
"author" : "xcode"
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 14 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 14 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 28 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 19 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 21 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 25 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 10 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 8.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 13 KiB

View File

@ -0,0 +1,6 @@
{
"info" : {
"version" : 1,
"author" : "xcode"
}
}

View File

@ -0,0 +1,28 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="11129.12" systemVersion="15F24" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" launchScreen="YES" useTraitCollections="YES" colorMatched="YES" initialViewController="01J-lp-oVM">
<dependencies>
<deployment identifier="iOS"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="11103.9"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
</dependencies>
<scenes>
<!--View Controller-->
<scene sceneID="EHf-IW-A2E">
<objects>
<viewController id="01J-lp-oVM" sceneMemberID="viewController">
<layoutGuides>
<viewControllerLayoutGuide type="top" id="Llm-lL-Icb"/>
<viewControllerLayoutGuide type="bottom" id="xb3-aO-Qok"/>
</layoutGuides>
<view key="view" contentMode="scaleToFill" id="Ze5-6b-2t3">
<rect key="frame" x="0.0" y="0.0" width="375" height="667"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
</view>
</viewController>
<placeholder placeholderIdentifier="IBFirstResponder" id="iYj-Kq-Ea1" userLabel="First Responder" sceneMemberID="firstResponder"/>
</objects>
<point key="canvasLocation" x="53" y="375"/>
</scene>
</scenes>
<color key="tintColor" red="1" green="1" blue="0.0" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
</document>

View File

@ -0,0 +1,193 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="11129.12" systemVersion="15F24" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" colorMatched="YES" initialViewController="BYZ-38-t0r">
<dependencies>
<deployment identifier="iOS"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="11103.9"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
</dependencies>
<scenes>
<!--Cam Camera View Controller-->
<scene sceneID="tne-QT-ifu">
<objects>
<viewController id="BYZ-38-t0r" customClass="AVCamCameraViewController" sceneMemberID="viewController">
<layoutGuides>
<viewControllerLayoutGuide type="top" id="y3c-jy-aDJ"/>
<viewControllerLayoutGuide type="bottom" id="wfy-db-euE"/>
</layoutGuides>
<view key="view" contentMode="scaleToFill" id="8bC-Xf-vdC">
<rect key="frame" x="0.0" y="0.0" width="375" height="667"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<subviews>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="3eR-Rn-XpZ" userLabel="Preview" customClass="AVCamPreviewView">
<color key="backgroundColor" red="0.0" green="0.0" blue="0.0" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<gestureRecognizers/>
<connections>
<outletCollection property="gestureRecognizers" destination="fY6-qX-ntV" appends="YES" id="G6D-dx-xU8"/>
</connections>
</view>
<label hidden="YES" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="Camera Unavailable" textAlignment="center" lineBreakMode="tailTruncation" numberOfLines="0" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="zf0-db-esM" userLabel="Camera Unavailable">
<color key="backgroundColor" red="0.0" green="0.0" blue="0.0" alpha="0.0" colorSpace="custom" customColorSpace="sRGB"/>
<fontDescription key="fontDescription" type="system" pointSize="24"/>
<color key="textColor" red="1" green="1" blue="0.0" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<nil key="highlightedColor"/>
</label>
<button hidden="YES" opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" buttonType="roundedRect" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="FZr-Ip-7WL" userLabel="Resume">
<color key="backgroundColor" red="0.0" green="0.0" blue="0.0" alpha="0.29999999999999999" colorSpace="custom" customColorSpace="sRGB"/>
<fontDescription key="fontDescription" type="system" pointSize="24"/>
<inset key="contentEdgeInsets" minX="10" minY="5" maxX="10" maxY="5"/>
<state key="normal" title="Tap to resume">
<color key="titleShadowColor" red="0.5" green="0.5" blue="0.5" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
</state>
<userDefinedRuntimeAttributes>
<userDefinedRuntimeAttribute type="number" keyPath="layer.cornerRadius">
<integer key="value" value="4"/>
</userDefinedRuntimeAttribute>
</userDefinedRuntimeAttributes>
<connections>
<action selector="resumeInterruptedSession:" destination="BYZ-38-t0r" eventType="touchUpInside" id="42K-1B-qJd"/>
</connections>
</button>
<button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" buttonType="roundedRect" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="eRT-dK-6dM" userLabel="Record">
<color key="backgroundColor" red="0.0" green="0.0" blue="0.0" alpha="0.29999999999999999" colorSpace="custom" customColorSpace="sRGB"/>
<fontDescription key="fontDescription" type="system" pointSize="20"/>
<state key="normal" title="Record">
<color key="titleShadowColor" red="0.5" green="0.5" blue="0.5" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
</state>
<userDefinedRuntimeAttributes>
<userDefinedRuntimeAttribute type="number" keyPath="layer.cornerRadius">
<integer key="value" value="4"/>
</userDefinedRuntimeAttribute>
</userDefinedRuntimeAttributes>
<connections>
<action selector="toggleMovieRecording:" destination="BYZ-38-t0r" eventType="touchUpInside" id="9R7-Ok-FpB"/>
</connections>
</button>
<button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" buttonType="roundedRect" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="uCj-6P-mHF" userLabel="Still">
<color key="backgroundColor" red="0.0" green="0.0" blue="0.0" alpha="0.29999999999999999" colorSpace="custom" customColorSpace="sRGB"/>
<constraints>
<constraint firstAttribute="height" constant="30" id="NtC-UN-gTs"/>
<constraint firstAttribute="width" constant="80" id="dxU-UP-4Ae"/>
</constraints>
<fontDescription key="fontDescription" type="system" pointSize="20"/>
<state key="normal" title="Photo">
<color key="titleShadowColor" red="0.5" green="0.5" blue="0.5" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
</state>
<userDefinedRuntimeAttributes>
<userDefinedRuntimeAttribute type="number" keyPath="layer.cornerRadius">
<integer key="value" value="4"/>
</userDefinedRuntimeAttribute>
</userDefinedRuntimeAttributes>
<connections>
<action selector="capturePhoto:" destination="BYZ-38-t0r" eventType="touchUpInside" id="o5K-SC-fYn"/>
</connections>
</button>
<button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" buttonType="roundedRect" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="rUJ-G6-RPv" userLabel="Camera">
<color key="backgroundColor" red="0.0" green="0.0" blue="0.0" alpha="0.29999999999999999" colorSpace="custom" customColorSpace="sRGB"/>
<fontDescription key="fontDescription" type="system" pointSize="20"/>
<state key="normal" title="Camera">
<color key="titleShadowColor" red="0.5" green="0.5" blue="0.5" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
</state>
<userDefinedRuntimeAttributes>
<userDefinedRuntimeAttribute type="number" keyPath="layer.cornerRadius">
<integer key="value" value="4"/>
</userDefinedRuntimeAttribute>
</userDefinedRuntimeAttributes>
<connections>
<action selector="changeCamera:" destination="BYZ-38-t0r" eventType="touchUpInside" id="3W0-h3-6fc"/>
</connections>
</button>
<segmentedControl opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="left" contentVerticalAlignment="top" segmentControlStyle="plain" selectedSegmentIndex="0" translatesAutoresizingMaskIntoConstraints="NO" id="FAC-co-10c">
<segments>
<segment title="Photo"/>
<segment title="Movie"/>
</segments>
<connections>
<action selector="toggleCaptureMode:" destination="BYZ-38-t0r" eventType="valueChanged" id="SKd-67-ZHh"/>
</connections>
</segmentedControl>
<button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" buttonType="roundedRect" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="eI6-gV-W7d">
<color key="backgroundColor" red="0.0" green="0.0" blue="0.0" alpha="0.29999999999999999" colorSpace="custom" customColorSpace="sRGB"/>
<constraints>
<constraint firstAttribute="width" constant="200" id="heR-zX-F6K"/>
</constraints>
<fontDescription key="fontDescription" type="system" pointSize="20"/>
<state key="normal" title="Live Photo Mode: On"/>
<userDefinedRuntimeAttributes>
<userDefinedRuntimeAttribute type="number" keyPath="layer.cornerRadius">
<integer key="value" value="4"/>
</userDefinedRuntimeAttribute>
</userDefinedRuntimeAttributes>
<connections>
<action selector="toggleLivePhotoMode:" destination="BYZ-38-t0r" eventType="touchUpInside" id="JqX-wJ-Xf1"/>
</connections>
</button>
<label hidden="YES" opaque="NO" clipsSubviews="YES" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="Live" textAlignment="center" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="Pii-2r-R2l">
<color key="backgroundColor" red="0.0" green="0.0" blue="0.0" alpha="0.29999999999999999" colorSpace="custom" customColorSpace="sRGB"/>
<constraints>
<constraint firstAttribute="height" constant="25" id="Kxo-zf-Fe1"/>
<constraint firstAttribute="width" constant="40" id="eRd-mj-8Du"/>
</constraints>
<fontDescription key="fontDescription" type="system" pointSize="17"/>
<color key="textColor" red="1" green="1" blue="0.0" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<nil key="highlightedColor"/>
<userDefinedRuntimeAttributes>
<userDefinedRuntimeAttribute type="number" keyPath="layer.cornerRadius">
<integer key="value" value="4"/>
</userDefinedRuntimeAttribute>
</userDefinedRuntimeAttributes>
</label>
</subviews>
<color key="backgroundColor" red="0.0" green="0.0" blue="0.0" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<constraints>
<constraint firstItem="3eR-Rn-XpZ" firstAttribute="centerX" secondItem="8bC-Xf-vdC" secondAttribute="centerX" id="125-kC-WZF"/>
<constraint firstItem="uCj-6P-mHF" firstAttribute="height" secondItem="eRT-dK-6dM" secondAttribute="height" id="AEV-ew-H4g"/>
<constraint firstItem="eI6-gV-W7d" firstAttribute="top" secondItem="y3c-jy-aDJ" secondAttribute="bottom" constant="8" id="Aao-6b-vLN"/>
<constraint firstItem="3eR-Rn-XpZ" firstAttribute="height" secondItem="8bC-Xf-vdC" secondAttribute="height" id="Ice-47-M9N"/>
<constraint firstItem="uCj-6P-mHF" firstAttribute="top" secondItem="rUJ-G6-RPv" secondAttribute="top" id="NFm-e8-abT"/>
<constraint firstItem="FZr-Ip-7WL" firstAttribute="centerX" secondItem="8bC-Xf-vdC" secondAttribute="centerX" id="OaZ-uO-vXY"/>
<constraint firstItem="FAC-co-10c" firstAttribute="centerX" secondItem="8bC-Xf-vdC" secondAttribute="centerX" id="Oow-A6-mDp"/>
<constraint firstItem="zf0-db-esM" firstAttribute="centerY" secondItem="8bC-Xf-vdC" secondAttribute="centerY" id="Ris-mI-8lA"/>
<constraint firstItem="Pii-2r-R2l" firstAttribute="centerX" secondItem="8bC-Xf-vdC" secondAttribute="centerX" id="Upd-h8-1dL"/>
<constraint firstItem="zf0-db-esM" firstAttribute="centerX" secondItem="8bC-Xf-vdC" secondAttribute="centerX" id="W6q-xJ-jfF"/>
<constraint firstItem="uCj-6P-mHF" firstAttribute="height" secondItem="rUJ-G6-RPv" secondAttribute="height" id="aQi-F7-E2b"/>
<constraint firstItem="uCj-6P-mHF" firstAttribute="top" secondItem="FAC-co-10c" secondAttribute="bottom" constant="20" id="aSR-Je-0lW"/>
<constraint firstItem="uCj-6P-mHF" firstAttribute="top" secondItem="eRT-dK-6dM" secondAttribute="top" id="bQd-ro-0Hw"/>
<constraint firstItem="wfy-db-euE" firstAttribute="top" secondItem="uCj-6P-mHF" secondAttribute="bottom" constant="20" id="eWs-co-Aaz"/>
<constraint firstItem="3eR-Rn-XpZ" firstAttribute="centerY" secondItem="8bC-Xf-vdC" secondAttribute="centerY" id="igk-MQ-CGt"/>
<constraint firstItem="rUJ-G6-RPv" firstAttribute="leading" secondItem="uCj-6P-mHF" secondAttribute="trailing" constant="20" id="lsk-Hm-rTd"/>
<constraint firstAttribute="centerX" secondItem="uCj-6P-mHF" secondAttribute="centerX" id="m8a-cF-Rf0"/>
<constraint firstItem="uCj-6P-mHF" firstAttribute="width" secondItem="rUJ-G6-RPv" secondAttribute="width" id="o8j-gw-35B"/>
<constraint firstItem="Pii-2r-R2l" firstAttribute="top" secondItem="eI6-gV-W7d" secondAttribute="bottom" constant="8" id="oDE-jY-ryC"/>
<constraint firstItem="3eR-Rn-XpZ" firstAttribute="width" secondItem="8bC-Xf-vdC" secondAttribute="width" id="pSC-xP-dl0"/>
<constraint firstItem="eI6-gV-W7d" firstAttribute="centerX" secondItem="8bC-Xf-vdC" secondAttribute="centerX" id="rqt-bn-mSt"/>
<constraint firstItem="uCj-6P-mHF" firstAttribute="width" secondItem="eRT-dK-6dM" secondAttribute="width" id="s8u-Y8-n27"/>
<constraint firstItem="FZr-Ip-7WL" firstAttribute="centerY" secondItem="8bC-Xf-vdC" secondAttribute="centerY" id="sTY-i6-czN"/>
<constraint firstItem="uCj-6P-mHF" firstAttribute="leading" secondItem="eRT-dK-6dM" secondAttribute="trailing" constant="20" id="zwj-TX-t6O"/>
</constraints>
</view>
<extendedEdge key="edgesForExtendedLayout"/>
<nil key="simulatedStatusBarMetrics"/>
<connections>
<outlet property="cameraButton" destination="rUJ-G6-RPv" id="dAV-WS-N1p"/>
<outlet property="cameraUnavailableLabel" destination="zf0-db-esM" id="P9W-lb-Pb8"/>
<outlet property="captureModeControl" destination="FAC-co-10c" id="KXj-wg-BvS"/>
<outlet property="capturingLivePhotoLabel" destination="Pii-2r-R2l" id="JAa-4l-5SD"/>
<outlet property="livePhotoModeButton" destination="eI6-gV-W7d" id="r9f-cN-YSH"/>
<outlet property="photoButton" destination="uCj-6P-mHF" id="Ha8-ua-hxy"/>
<outlet property="previewView" destination="3eR-Rn-XpZ" id="e7I-nu-L6j"/>
<outlet property="recordButton" destination="eRT-dK-6dM" id="iqk-en-NsW"/>
<outlet property="resumeButton" destination="FZr-Ip-7WL" id="tX5-Sx-rQK"/>
</connections>
</viewController>
<placeholder placeholderIdentifier="IBFirstResponder" id="dkx-z0-nzr" sceneMemberID="firstResponder"/>
<tapGestureRecognizer id="fY6-qX-ntV">
<connections>
<action selector="focusAndExposeTap:" destination="BYZ-38-t0r" id="65g-8k-5pv"/>
</connections>
</tapGestureRecognizer>
</objects>
<point key="canvasLocation" x="-656" y="-630"/>
</scene>
</scenes>
<color key="tintColor" red="1" green="1" blue="0.0" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
</document>

View File

@ -0,0 +1,60 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleDevelopmentRegion</key>
<string>en</string>
<key>CFBundleExecutable</key>
<string>$(EXECUTABLE_NAME)</string>
<key>CFBundleIdentifier</key>
<string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>$(PRODUCT_NAME)</string>
<key>CFBundlePackageType</key>
<string>APPL</string>
<key>CFBundleShortVersionString</key>
<string>5.0</string>
<key>CFBundleSignature</key>
<string>????</string>
<key>CFBundleVersion</key>
<string>1</string>
<key>LSRequiresIPhoneOS</key>
<true/>
<key>NSCameraUsageDescription</key>
<string>to take photos and videos</string>
<key>NSMicrophoneUsageDescription</key>
<string>to record Live Photos and movies</string>
<key>NSPhotoLibraryUsageDescription</key>
<string>to save photos and videos to your Photo Library</string>
<key>UILaunchStoryboardName</key>
<string>LaunchScreen</string>
<key>UIMainStoryboardFile</key>
<string>Main</string>
<key>UIRequiredDeviceCapabilities</key>
<array>
<string>armv7</string>
</array>
<key>UIRequiresFullScreen</key>
<true/>
<key>UIStatusBarHidden</key>
<true/>
<key>UISupportedInterfaceOrientations</key>
<array>
<string>UIInterfaceOrientationPortrait</string>
<string>UIInterfaceOrientationPortraitUpsideDown</string>
<string>UIInterfaceOrientationLandscapeLeft</string>
<string>UIInterfaceOrientationLandscapeRight</string>
</array>
<key>UISupportedInterfaceOrientations~ipad</key>
<array>
<string>UIInterfaceOrientationPortrait</string>
<string>UIInterfaceOrientationPortraitUpsideDown</string>
<string>UIInterfaceOrientationLandscapeLeft</string>
<string>UIInterfaceOrientationLandscapeRight</string>
</array>
<key>UIViewControllerBasedStatusBarAppearance</key>
<false/>
</dict>
</plist>

View File

@ -0,0 +1,17 @@
/*
Copyright (C) 2016 Apple Inc. All Rights Reserved.
See LICENSE.txt for this samples licensing information
Abstract:
Main application entry point.
*/
@import UIKit;
#import "AVCamAppDelegate.h"
int main(int argc, char * argv[]) {
@autoreleasepool {
return UIApplicationMain( argc, argv, nil, NSStringFromClass( [AVCamAppDelegate class] ) );
}
}

24
AVCam/README.md Normal file
View File

@ -0,0 +1,24 @@
# AVCam-iOS: Using AVFoundation to Capture Photos and Movies
AVCam demonstrates how to use the AVFoundation capture API to record movies and capture photos. The sample has a record button for recording movies, a photo button for capturing photos, a Live Photo mode button for enabling Live Photo capture, a capture mode control for toggling between photo and movie capture modes, and a camera button for switching between front and back cameras (on supported devices). AVCam runs only on an actual device, either an iPad or iPhone, and cannot be run in Simulator.
## Requirements
### Build
Xcode 8.0, iOS 10.0 SDK
### Runtime
iOS 10.0 or later
## Changes from Previous Version
- Adopt AVCapturePhotoOutput
- Capture Live Photos
- Add privacy keys to Info.plist
- Add a version of AVCam in Swift 3
- Remove support for AVCaptureStillImageOutput
- Bug fixes
Copyright (C) 2016 Apple Inc. All rights reserved.

View File

@ -0,0 +1,317 @@
// !$*UTF8*$!
{
archiveVersion = 1;
classes = {
};
objectVersion = 46;
objects = {
/* Begin PBXBuildFile section */
7AA677151CFF765600B353FB /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7AA677141CFF765600B353FB /* AppDelegate.swift */; };
7AA677171CFF765600B353FB /* CameraViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7AA677161CFF765600B353FB /* CameraViewController.swift */; };
7AA6771A1CFF765600B353FB /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 7AA677181CFF765600B353FB /* Main.storyboard */; };
7AA6771C1CFF765600B353FB /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 7AA6771B1CFF765600B353FB /* Assets.xcassets */; };
7AA6771F1CFF765600B353FB /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 7AA6771D1CFF765600B353FB /* LaunchScreen.storyboard */; };
7AA677271CFF774800B353FB /* PhotoCaptureDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7AA677261CFF774800B353FB /* PhotoCaptureDelegate.swift */; };
7AA677291CFF7B5C00B353FB /* PreviewView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7AA677281CFF7B5C00B353FB /* PreviewView.swift */; };
/* End PBXBuildFile section */
/* Begin PBXFileReference section */
7AA677111CFF765600B353FB /* AVCam.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = AVCam.app; sourceTree = BUILT_PRODUCTS_DIR; };
7AA677141CFF765600B353FB /* AppDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = "<group>"; };
7AA677161CFF765600B353FB /* CameraViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraViewController.swift; sourceTree = "<group>"; };
7AA677191CFF765600B353FB /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = "<group>"; };
7AA6771B1CFF765600B353FB /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = "<group>"; };
7AA6771E1CFF765600B353FB /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = "<group>"; };
7AA677201CFF765600B353FB /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = "<group>"; };
7AA677261CFF774800B353FB /* PhotoCaptureDelegate.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = PhotoCaptureDelegate.swift; sourceTree = "<group>"; };
7AA677281CFF7B5C00B353FB /* PreviewView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = PreviewView.swift; sourceTree = "<group>"; };
7AE4754E1D00FFA900C2CB9E /* README.md */ = {isa = PBXFileReference; lastKnownFileType = net.daringfireball.markdown; name = README.md; path = ../README.md; sourceTree = "<group>"; };
/* End PBXFileReference section */
/* Begin PBXFrameworksBuildPhase section */
7AA6770E1CFF765500B353FB /* Frameworks */ = {
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXFrameworksBuildPhase section */
/* Begin PBXGroup section */
7AA677081CFF765500B353FB = {
isa = PBXGroup;
children = (
7AE4754E1D00FFA900C2CB9E /* README.md */,
7AA677131CFF765600B353FB /* AVCam */,
7AA677121CFF765600B353FB /* Products */,
);
sourceTree = "<group>";
};
7AA677121CFF765600B353FB /* Products */ = {
isa = PBXGroup;
children = (
7AA677111CFF765600B353FB /* AVCam.app */,
);
name = Products;
sourceTree = "<group>";
};
7AA677131CFF765600B353FB /* AVCam */ = {
isa = PBXGroup;
children = (
7AA677141CFF765600B353FB /* AppDelegate.swift */,
7AA677281CFF7B5C00B353FB /* PreviewView.swift */,
7AA677161CFF765600B353FB /* CameraViewController.swift */,
7AA677261CFF774800B353FB /* PhotoCaptureDelegate.swift */,
7AA677181CFF765600B353FB /* Main.storyboard */,
7AA6771B1CFF765600B353FB /* Assets.xcassets */,
7AA6771D1CFF765600B353FB /* LaunchScreen.storyboard */,
7AA677201CFF765600B353FB /* Info.plist */,
);
path = AVCam;
sourceTree = "<group>";
};
/* End PBXGroup section */
/* Begin PBXNativeTarget section */
7AA677101CFF765500B353FB /* AVCam */ = {
isa = PBXNativeTarget;
buildConfigurationList = 7AA677231CFF765600B353FB /* Build configuration list for PBXNativeTarget "AVCam" */;
buildPhases = (
7AA6770D1CFF765500B353FB /* Sources */,
7AA6770E1CFF765500B353FB /* Frameworks */,
7AA6770F1CFF765500B353FB /* Resources */,
);
buildRules = (
);
dependencies = (
);
name = AVCam;
productName = AVCam;
productReference = 7AA677111CFF765600B353FB /* AVCam.app */;
productType = "com.apple.product-type.application";
};
/* End PBXNativeTarget section */
/* Begin PBXProject section */
7AA677091CFF765500B353FB /* Project object */ = {
isa = PBXProject;
attributes = {
LastSwiftUpdateCheck = 0800;
LastUpgradeCheck = 0800;
ORGANIZATIONNAME = "Apple, Inc.";
TargetAttributes = {
7AA677101CFF765500B353FB = {
CreatedOnToolsVersion = 8.0;
ProvisioningStyle = Automatic;
};
};
};
buildConfigurationList = 7AA6770C1CFF765500B353FB /* Build configuration list for PBXProject "AVCam Swift" */;
compatibilityVersion = "Xcode 3.2";
developmentRegion = English;
hasScannedForEncodings = 0;
knownRegions = (
en,
Base,
);
mainGroup = 7AA677081CFF765500B353FB;
productRefGroup = 7AA677121CFF765600B353FB /* Products */;
projectDirPath = "";
projectRoot = "";
targets = (
7AA677101CFF765500B353FB /* AVCam */,
);
};
/* End PBXProject section */
/* Begin PBXResourcesBuildPhase section */
7AA6770F1CFF765500B353FB /* Resources */ = {
isa = PBXResourcesBuildPhase;
buildActionMask = 2147483647;
files = (
7AA6771F1CFF765600B353FB /* LaunchScreen.storyboard in Resources */,
7AA6771C1CFF765600B353FB /* Assets.xcassets in Resources */,
7AA6771A1CFF765600B353FB /* Main.storyboard in Resources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXResourcesBuildPhase section */
/* Begin PBXSourcesBuildPhase section */
7AA6770D1CFF765500B353FB /* Sources */ = {
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
7AA677271CFF774800B353FB /* PhotoCaptureDelegate.swift in Sources */,
7AA677291CFF7B5C00B353FB /* PreviewView.swift in Sources */,
7AA677171CFF765600B353FB /* CameraViewController.swift in Sources */,
7AA677151CFF765600B353FB /* AppDelegate.swift in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXSourcesBuildPhase section */
/* Begin PBXVariantGroup section */
7AA677181CFF765600B353FB /* Main.storyboard */ = {
isa = PBXVariantGroup;
children = (
7AA677191CFF765600B353FB /* Base */,
);
name = Main.storyboard;
sourceTree = "<group>";
};
7AA6771D1CFF765600B353FB /* LaunchScreen.storyboard */ = {
isa = PBXVariantGroup;
children = (
7AA6771E1CFF765600B353FB /* Base */,
);
name = LaunchScreen.storyboard;
sourceTree = "<group>";
};
/* End PBXVariantGroup section */
/* Begin XCBuildConfiguration section */
7AA677211CFF765600B353FB /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
CLANG_ANALYZER_NONNULL = YES;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = dwarf;
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_TESTABILITY = YES;
GCC_C_LANGUAGE_STANDARD = gnu99;
GCC_DYNAMIC_NO_PIC = NO;
GCC_NO_COMMON_BLOCKS = YES;
GCC_OPTIMIZATION_LEVEL = 0;
GCC_PREPROCESSOR_DEFINITIONS = (
"DEBUG=1",
"$(inherited)",
);
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 10.0;
MTL_ENABLE_DEBUG_INFO = YES;
ONLY_ACTIVE_ARCH = YES;
SDKROOT = iphoneos;
SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG;
SWIFT_OPTIMIZATION_LEVEL = "-Onone";
TARGETED_DEVICE_FAMILY = "1,2";
};
name = Debug;
};
7AA677221CFF765600B353FB /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
CLANG_ANALYZER_NONNULL = YES;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
ENABLE_NS_ASSERTIONS = NO;
ENABLE_STRICT_OBJC_MSGSEND = YES;
GCC_C_LANGUAGE_STANDARD = gnu99;
GCC_NO_COMMON_BLOCKS = YES;
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 10.0;
MTL_ENABLE_DEBUG_INFO = NO;
SDKROOT = iphoneos;
SWIFT_OPTIMIZATION_LEVEL = "-Owholemodule";
TARGETED_DEVICE_FAMILY = "1,2";
VALIDATE_PRODUCT = YES;
};
name = Release;
};
7AA677241CFF765600B353FB /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
CODE_SIGN_IDENTITY = "iPhone Developer";
INFOPLIST_FILE = AVCam/Info.plist;
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
PRODUCT_BUNDLE_IDENTIFIER = "com.example.apple-samplecode.AVCam";
PRODUCT_NAME = "$(TARGET_NAME)";
SDKROOT = iphoneos;
SWIFT_VERSION = 3.0;
};
name = Debug;
};
7AA677251CFF765600B353FB /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
CODE_SIGN_IDENTITY = "iPhone Developer";
INFOPLIST_FILE = AVCam/Info.plist;
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
PRODUCT_BUNDLE_IDENTIFIER = "com.example.apple-samplecode.AVCam";
PRODUCT_NAME = "$(TARGET_NAME)";
SDKROOT = iphoneos;
SWIFT_VERSION = 3.0;
};
name = Release;
};
/* End XCBuildConfiguration section */
/* Begin XCConfigurationList section */
7AA6770C1CFF765500B353FB /* Build configuration list for PBXProject "AVCam Swift" */ = {
isa = XCConfigurationList;
buildConfigurations = (
7AA677211CFF765600B353FB /* Debug */,
7AA677221CFF765600B353FB /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
7AA677231CFF765600B353FB /* Build configuration list for PBXNativeTarget "AVCam" */ = {
isa = XCConfigurationList;
buildConfigurations = (
7AA677241CFF765600B353FB /* Debug */,
7AA677251CFF765600B353FB /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
/* End XCConfigurationList section */
};
rootObject = 7AA677091CFF765500B353FB /* Project object */;
}

View File

@ -0,0 +1,91 @@
<?xml version="1.0" encoding="UTF-8"?>
<Scheme
LastUpgradeVersion = "0800"
version = "1.3">
<BuildAction
parallelizeBuildables = "YES"
buildImplicitDependencies = "YES">
<BuildActionEntries>
<BuildActionEntry
buildForTesting = "YES"
buildForRunning = "YES"
buildForProfiling = "YES"
buildForArchiving = "YES"
buildForAnalyzing = "YES">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "7AA677101CFF765500B353FB"
BuildableName = "AVCam.app"
BlueprintName = "AVCam"
ReferencedContainer = "container:AVCam Swift.xcodeproj">
</BuildableReference>
</BuildActionEntry>
</BuildActionEntries>
</BuildAction>
<TestAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
shouldUseLaunchSchemeArgsEnv = "YES">
<Testables>
</Testables>
<MacroExpansion>
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "7AA677101CFF765500B353FB"
BuildableName = "AVCam.app"
BlueprintName = "AVCam"
ReferencedContainer = "container:AVCam Swift.xcodeproj">
</BuildableReference>
</MacroExpansion>
<AdditionalOptions>
</AdditionalOptions>
</TestAction>
<LaunchAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
launchStyle = "0"
useCustomWorkingDirectory = "NO"
ignoresPersistentStateOnLaunch = "NO"
debugDocumentVersioning = "YES"
debugServiceExtension = "internal"
allowLocationSimulation = "YES">
<BuildableProductRunnable
runnableDebuggingMode = "0">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "7AA677101CFF765500B353FB"
BuildableName = "AVCam.app"
BlueprintName = "AVCam"
ReferencedContainer = "container:AVCam Swift.xcodeproj">
</BuildableReference>
</BuildableProductRunnable>
<AdditionalOptions>
</AdditionalOptions>
</LaunchAction>
<ProfileAction
buildConfiguration = "Release"
shouldUseLaunchSchemeArgsEnv = "YES"
savedToolIdentifier = ""
useCustomWorkingDirectory = "NO"
debugDocumentVersioning = "YES">
<BuildableProductRunnable
runnableDebuggingMode = "0">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "7AA677101CFF765500B353FB"
BuildableName = "AVCam.app"
BlueprintName = "AVCam"
ReferencedContainer = "container:AVCam Swift.xcodeproj">
</BuildableReference>
</BuildableProductRunnable>
</ProfileAction>
<AnalyzeAction
buildConfiguration = "Debug">
</AnalyzeAction>
<ArchiveAction
buildConfiguration = "Release"
revealArchiveInOrganizer = "YES">
</ArchiveAction>
</Scheme>

View File

@ -0,0 +1,14 @@
/*
Copyright (C) 2016 Apple Inc. All Rights Reserved.
See LICENSE.txt for this samples licensing information
Abstract:
Application delegate.
*/
import UIKit
@UIApplicationMain
class AppDelegate: UIResponder, UIApplicationDelegate {
var window: UIWindow?
}

View File

@ -0,0 +1,128 @@
{
"images" : [
{
"size" : "29x29",
"idiom" : "iphone",
"filename" : "Icon-Small.png",
"scale" : "1x"
},
{
"size" : "29x29",
"idiom" : "iphone",
"filename" : "Icon-Small@2x.png",
"scale" : "2x"
},
{
"size" : "29x29",
"idiom" : "iphone",
"filename" : "Icon-Small@3x.png",
"scale" : "3x"
},
{
"size" : "40x40",
"idiom" : "iphone",
"filename" : "Icon-40@2x.png",
"scale" : "2x"
},
{
"size" : "40x40",
"idiom" : "iphone",
"filename" : "Icon-40@3x.png",
"scale" : "3x"
},
{
"size" : "57x57",
"idiom" : "iphone",
"filename" : "Icon.png",
"scale" : "1x"
},
{
"size" : "57x57",
"idiom" : "iphone",
"filename" : "Icon@2x.png",
"scale" : "2x"
},
{
"size" : "60x60",
"idiom" : "iphone",
"filename" : "Icon-60@2x.png",
"scale" : "2x"
},
{
"size" : "60x60",
"idiom" : "iphone",
"filename" : "Icon-60@3x.png",
"scale" : "3x"
},
{
"size" : "29x29",
"idiom" : "ipad",
"filename" : "Icon-Small.png",
"scale" : "1x"
},
{
"size" : "29x29",
"idiom" : "ipad",
"filename" : "Icon-Small@2x.png",
"scale" : "2x"
},
{
"size" : "40x40",
"idiom" : "ipad",
"filename" : "Icon-40.png",
"scale" : "1x"
},
{
"size" : "40x40",
"idiom" : "ipad",
"filename" : "Icon-40@2x.png",
"scale" : "2x"
},
{
"size" : "50x50",
"idiom" : "ipad",
"filename" : "Icon-Small-50.png",
"scale" : "1x"
},
{
"size" : "50x50",
"idiom" : "ipad",
"filename" : "Icon-Small-50@2x.png",
"scale" : "2x"
},
{
"size" : "72x72",
"idiom" : "ipad",
"filename" : "Icon-72.png",
"scale" : "1x"
},
{
"size" : "72x72",
"idiom" : "ipad",
"filename" : "Icon-72@2x.png",
"scale" : "2x"
},
{
"size" : "76x76",
"idiom" : "ipad",
"filename" : "Icon-76.png",
"scale" : "1x"
},
{
"size" : "76x76",
"idiom" : "ipad",
"filename" : "Icon-76@2x.png",
"scale" : "2x"
},
{
"size" : "83.5x83.5",
"idiom" : "ipad",
"filename" : "Icon-83.5@2x.png",
"scale" : "2x"
}
],
"info" : {
"version" : 1,
"author" : "xcode"
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 14 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 14 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 28 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 19 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 21 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 25 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 10 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 8.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 13 KiB

View File

@ -0,0 +1,6 @@
{
"info" : {
"version" : 1,
"author" : "xcode"
}
}

View File

@ -0,0 +1,28 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="11129.12" systemVersion="15F24" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" launchScreen="YES" useTraitCollections="YES" colorMatched="YES" initialViewController="01J-lp-oVM">
<dependencies>
<deployment identifier="iOS"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="11103.9"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
</dependencies>
<scenes>
<!--View Controller-->
<scene sceneID="EHf-IW-A2E">
<objects>
<viewController id="01J-lp-oVM" sceneMemberID="viewController">
<layoutGuides>
<viewControllerLayoutGuide type="top" id="Llm-lL-Icb"/>
<viewControllerLayoutGuide type="bottom" id="xb3-aO-Qok"/>
</layoutGuides>
<view key="view" contentMode="scaleToFill" id="Ze5-6b-2t3">
<rect key="frame" x="0.0" y="0.0" width="375" height="667"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
</view>
</viewController>
<placeholder placeholderIdentifier="IBFirstResponder" id="iYj-Kq-Ea1" userLabel="First Responder" sceneMemberID="firstResponder"/>
</objects>
<point key="canvasLocation" x="53" y="375"/>
</scene>
</scenes>
<color key="tintColor" red="1" green="1" blue="0.0" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
</document>

View File

@ -0,0 +1,193 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="11129.12" systemVersion="15F24" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" colorMatched="YES" initialViewController="BYZ-38-t0r">
<dependencies>
<deployment identifier="iOS"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="11103.9"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
</dependencies>
<scenes>
<!--Camera View Controller-->
<scene sceneID="tne-QT-ifu">
<objects>
<viewController id="BYZ-38-t0r" customClass="CameraViewController" customModule="AVCam" customModuleProvider="target" sceneMemberID="viewController">
<layoutGuides>
<viewControllerLayoutGuide type="top" id="y3c-jy-aDJ"/>
<viewControllerLayoutGuide type="bottom" id="wfy-db-euE"/>
</layoutGuides>
<view key="view" contentMode="scaleToFill" id="8bC-Xf-vdC">
<rect key="frame" x="0.0" y="0.0" width="375" height="667"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<subviews>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="3eR-Rn-XpZ" userLabel="Preview" customClass="PreviewView" customModule="AVCam" customModuleProvider="target">
<color key="backgroundColor" red="0.0" green="0.0" blue="0.0" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<gestureRecognizers/>
<connections>
<outletCollection property="gestureRecognizers" destination="fY6-qX-ntV" appends="YES" id="G6D-dx-xU8"/>
</connections>
</view>
<label hidden="YES" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="Camera Unavailable" textAlignment="center" lineBreakMode="tailTruncation" numberOfLines="0" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="zf0-db-esM" userLabel="Camera Unavailable">
<color key="backgroundColor" red="0.0" green="0.0" blue="0.0" alpha="0.0" colorSpace="custom" customColorSpace="sRGB"/>
<fontDescription key="fontDescription" type="system" pointSize="24"/>
<color key="textColor" red="1" green="1" blue="0.0" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<nil key="highlightedColor"/>
</label>
<button hidden="YES" opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" buttonType="roundedRect" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="FZr-Ip-7WL" userLabel="Resume">
<color key="backgroundColor" red="0.0" green="0.0" blue="0.0" alpha="0.29999999999999999" colorSpace="custom" customColorSpace="sRGB"/>
<fontDescription key="fontDescription" type="system" pointSize="24"/>
<inset key="contentEdgeInsets" minX="10" minY="5" maxX="10" maxY="5"/>
<state key="normal" title="Tap to resume">
<color key="titleShadowColor" red="0.5" green="0.5" blue="0.5" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
</state>
<userDefinedRuntimeAttributes>
<userDefinedRuntimeAttribute type="number" keyPath="layer.cornerRadius">
<integer key="value" value="4"/>
</userDefinedRuntimeAttribute>
</userDefinedRuntimeAttributes>
<connections>
<action selector="resumeInterruptedSession:" destination="BYZ-38-t0r" eventType="touchUpInside" id="o7T-5Z-tfn"/>
</connections>
</button>
<button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" buttonType="roundedRect" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="eRT-dK-6dM" userLabel="Record">
<color key="backgroundColor" red="0.0" green="0.0" blue="0.0" alpha="0.29999999999999999" colorSpace="custom" customColorSpace="sRGB"/>
<fontDescription key="fontDescription" type="system" pointSize="20"/>
<state key="normal" title="Record">
<color key="titleShadowColor" red="0.5" green="0.5" blue="0.5" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
</state>
<userDefinedRuntimeAttributes>
<userDefinedRuntimeAttribute type="number" keyPath="layer.cornerRadius">
<integer key="value" value="4"/>
</userDefinedRuntimeAttribute>
</userDefinedRuntimeAttributes>
<connections>
<action selector="toggleMovieRecording:" destination="BYZ-38-t0r" eventType="touchUpInside" id="9R7-Ok-FpB"/>
</connections>
</button>
<button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" buttonType="roundedRect" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="uCj-6P-mHF" userLabel="Still">
<color key="backgroundColor" red="0.0" green="0.0" blue="0.0" alpha="0.29999999999999999" colorSpace="custom" customColorSpace="sRGB"/>
<constraints>
<constraint firstAttribute="height" constant="30" id="NtC-UN-gTs"/>
<constraint firstAttribute="width" constant="80" id="dxU-UP-4Ae"/>
</constraints>
<fontDescription key="fontDescription" type="system" pointSize="20"/>
<state key="normal" title="Photo">
<color key="titleShadowColor" red="0.5" green="0.5" blue="0.5" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
</state>
<userDefinedRuntimeAttributes>
<userDefinedRuntimeAttribute type="number" keyPath="layer.cornerRadius">
<integer key="value" value="4"/>
</userDefinedRuntimeAttribute>
</userDefinedRuntimeAttributes>
<connections>
<action selector="capturePhoto:" destination="BYZ-38-t0r" eventType="touchUpInside" id="o5K-SC-fYn"/>
</connections>
</button>
<button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" buttonType="roundedRect" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="rUJ-G6-RPv" userLabel="Camera">
<color key="backgroundColor" red="0.0" green="0.0" blue="0.0" alpha="0.29999999999999999" colorSpace="custom" customColorSpace="sRGB"/>
<fontDescription key="fontDescription" type="system" pointSize="20"/>
<state key="normal" title="Camera">
<color key="titleShadowColor" red="0.5" green="0.5" blue="0.5" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
</state>
<userDefinedRuntimeAttributes>
<userDefinedRuntimeAttribute type="number" keyPath="layer.cornerRadius">
<integer key="value" value="4"/>
</userDefinedRuntimeAttribute>
</userDefinedRuntimeAttributes>
<connections>
<action selector="changeCamera:" destination="BYZ-38-t0r" eventType="touchUpInside" id="3W0-h3-6fc"/>
</connections>
</button>
<segmentedControl opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="left" contentVerticalAlignment="top" segmentControlStyle="plain" selectedSegmentIndex="0" translatesAutoresizingMaskIntoConstraints="NO" id="FAC-co-10c">
<segments>
<segment title="Photo"/>
<segment title="Movie"/>
</segments>
<connections>
<action selector="toggleCaptureMode:" destination="BYZ-38-t0r" eventType="valueChanged" id="SKd-67-ZHh"/>
</connections>
</segmentedControl>
<button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" buttonType="roundedRect" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="eI6-gV-W7d">
<color key="backgroundColor" red="0.0" green="0.0" blue="0.0" alpha="0.29999999999999999" colorSpace="custom" customColorSpace="sRGB"/>
<constraints>
<constraint firstAttribute="width" constant="200" id="heR-zX-F6K"/>
</constraints>
<fontDescription key="fontDescription" type="system" pointSize="20"/>
<state key="normal" title="Live Photo Mode: On"/>
<userDefinedRuntimeAttributes>
<userDefinedRuntimeAttribute type="number" keyPath="layer.cornerRadius">
<integer key="value" value="4"/>
</userDefinedRuntimeAttribute>
</userDefinedRuntimeAttributes>
<connections>
<action selector="toggleLivePhotoMode:" destination="BYZ-38-t0r" eventType="touchUpInside" id="JqX-wJ-Xf1"/>
</connections>
</button>
<label hidden="YES" opaque="NO" clipsSubviews="YES" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="Live" textAlignment="center" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="Pii-2r-R2l">
<color key="backgroundColor" red="0.0" green="0.0" blue="0.0" alpha="0.29999999999999999" colorSpace="custom" customColorSpace="sRGB"/>
<constraints>
<constraint firstAttribute="height" constant="25" id="Kxo-zf-Fe1"/>
<constraint firstAttribute="width" constant="40" id="eRd-mj-8Du"/>
</constraints>
<fontDescription key="fontDescription" type="system" pointSize="17"/>
<color key="textColor" red="1" green="1" blue="0.0" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<nil key="highlightedColor"/>
<userDefinedRuntimeAttributes>
<userDefinedRuntimeAttribute type="number" keyPath="layer.cornerRadius">
<integer key="value" value="4"/>
</userDefinedRuntimeAttribute>
</userDefinedRuntimeAttributes>
</label>
</subviews>
<color key="backgroundColor" red="0.0" green="0.0" blue="0.0" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<constraints>
<constraint firstItem="3eR-Rn-XpZ" firstAttribute="centerX" secondItem="8bC-Xf-vdC" secondAttribute="centerX" id="125-kC-WZF"/>
<constraint firstItem="uCj-6P-mHF" firstAttribute="height" secondItem="eRT-dK-6dM" secondAttribute="height" id="AEV-ew-H4g"/>
<constraint firstItem="eI6-gV-W7d" firstAttribute="top" secondItem="y3c-jy-aDJ" secondAttribute="bottom" constant="8" id="Aao-6b-vLN"/>
<constraint firstItem="3eR-Rn-XpZ" firstAttribute="height" secondItem="8bC-Xf-vdC" secondAttribute="height" id="Ice-47-M9N"/>
<constraint firstItem="uCj-6P-mHF" firstAttribute="top" secondItem="rUJ-G6-RPv" secondAttribute="top" id="NFm-e8-abT"/>
<constraint firstItem="FZr-Ip-7WL" firstAttribute="centerX" secondItem="8bC-Xf-vdC" secondAttribute="centerX" id="OaZ-uO-vXY"/>
<constraint firstItem="FAC-co-10c" firstAttribute="centerX" secondItem="8bC-Xf-vdC" secondAttribute="centerX" id="Oow-A6-mDp"/>
<constraint firstItem="zf0-db-esM" firstAttribute="centerY" secondItem="8bC-Xf-vdC" secondAttribute="centerY" id="Ris-mI-8lA"/>
<constraint firstItem="Pii-2r-R2l" firstAttribute="centerX" secondItem="8bC-Xf-vdC" secondAttribute="centerX" id="Upd-h8-1dL"/>
<constraint firstItem="zf0-db-esM" firstAttribute="centerX" secondItem="8bC-Xf-vdC" secondAttribute="centerX" id="W6q-xJ-jfF"/>
<constraint firstItem="uCj-6P-mHF" firstAttribute="height" secondItem="rUJ-G6-RPv" secondAttribute="height" id="aQi-F7-E2b"/>
<constraint firstItem="uCj-6P-mHF" firstAttribute="top" secondItem="FAC-co-10c" secondAttribute="bottom" constant="20" id="aSR-Je-0lW"/>
<constraint firstItem="uCj-6P-mHF" firstAttribute="top" secondItem="eRT-dK-6dM" secondAttribute="top" id="bQd-ro-0Hw"/>
<constraint firstItem="wfy-db-euE" firstAttribute="top" secondItem="uCj-6P-mHF" secondAttribute="bottom" constant="20" id="eWs-co-Aaz"/>
<constraint firstItem="3eR-Rn-XpZ" firstAttribute="centerY" secondItem="8bC-Xf-vdC" secondAttribute="centerY" id="igk-MQ-CGt"/>
<constraint firstItem="rUJ-G6-RPv" firstAttribute="leading" secondItem="uCj-6P-mHF" secondAttribute="trailing" constant="20" id="lsk-Hm-rTd"/>
<constraint firstAttribute="centerX" secondItem="uCj-6P-mHF" secondAttribute="centerX" id="m8a-cF-Rf0"/>
<constraint firstItem="uCj-6P-mHF" firstAttribute="width" secondItem="rUJ-G6-RPv" secondAttribute="width" id="o8j-gw-35B"/>
<constraint firstItem="Pii-2r-R2l" firstAttribute="top" secondItem="eI6-gV-W7d" secondAttribute="bottom" constant="8" id="oDE-jY-ryC"/>
<constraint firstItem="3eR-Rn-XpZ" firstAttribute="width" secondItem="8bC-Xf-vdC" secondAttribute="width" id="pSC-xP-dl0"/>
<constraint firstItem="eI6-gV-W7d" firstAttribute="centerX" secondItem="8bC-Xf-vdC" secondAttribute="centerX" id="rqt-bn-mSt"/>
<constraint firstItem="uCj-6P-mHF" firstAttribute="width" secondItem="eRT-dK-6dM" secondAttribute="width" id="s8u-Y8-n27"/>
<constraint firstItem="FZr-Ip-7WL" firstAttribute="centerY" secondItem="8bC-Xf-vdC" secondAttribute="centerY" id="sTY-i6-czN"/>
<constraint firstItem="uCj-6P-mHF" firstAttribute="leading" secondItem="eRT-dK-6dM" secondAttribute="trailing" constant="20" id="zwj-TX-t6O"/>
</constraints>
</view>
<extendedEdge key="edgesForExtendedLayout"/>
<nil key="simulatedStatusBarMetrics"/>
<connections>
<outlet property="cameraButton" destination="rUJ-G6-RPv" id="dAV-WS-N1p"/>
<outlet property="cameraUnavailableLabel" destination="zf0-db-esM" id="P9W-lb-Pb8"/>
<outlet property="captureModeControl" destination="FAC-co-10c" id="KXj-wg-BvS"/>
<outlet property="capturingLivePhotoLabel" destination="Pii-2r-R2l" id="JAa-4l-5SD"/>
<outlet property="livePhotoModeButton" destination="eI6-gV-W7d" id="r9f-cN-YSH"/>
<outlet property="photoButton" destination="uCj-6P-mHF" id="Ha8-ua-hxy"/>
<outlet property="previewView" destination="3eR-Rn-XpZ" id="e7I-nu-L6j"/>
<outlet property="recordButton" destination="eRT-dK-6dM" id="iqk-en-NsW"/>
<outlet property="resumeButton" destination="FZr-Ip-7WL" id="tX5-Sx-rQK"/>
</connections>
</viewController>
<placeholder placeholderIdentifier="IBFirstResponder" id="dkx-z0-nzr" sceneMemberID="firstResponder"/>
<tapGestureRecognizer id="fY6-qX-ntV">
<connections>
<action selector="focusAndExposeTap:" destination="BYZ-38-t0r" id="65g-8k-5pv"/>
</connections>
</tapGestureRecognizer>
</objects>
<point key="canvasLocation" x="-656" y="-630"/>
</scene>
</scenes>
<color key="tintColor" red="1" green="1" blue="0.0" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
</document>

View File

@ -0,0 +1,940 @@
/*
Copyright (C) 2016 Apple Inc. All Rights Reserved.
See LICENSE.txt for this samples licensing information
Abstract:
View controller for camera interface.
*/
import UIKit
import AVFoundation
import Photos
class CameraViewController: UIViewController, AVCaptureFileOutputRecordingDelegate {
// MARK: View Controller Life Cycle
override func viewDidLoad() {
super.viewDidLoad()
// Disable UI. The UI is enabled if and only if the session starts running.
cameraButton.isEnabled = false
recordButton.isEnabled = false
photoButton.isEnabled = false
livePhotoModeButton.isEnabled = false
captureModeControl.isEnabled = false
// Set up the video preview view.
previewView.session = session
/*
Check video authorization status. Video access is required and audio
access is optional. If audio access is denied, audio is not recorded
during movie recording.
*/
switch AVCaptureDevice.authorizationStatus(forMediaType: AVMediaTypeVideo) {
case .authorized:
// The user has previously granted access to the camera.
break
case .notDetermined:
/*
The user has not yet been presented with the option to grant
video access. We suspend the session queue to delay session
setup until the access request has completed.
Note that audio access will be implicitly requested when we
create an AVCaptureDeviceInput for audio during session setup.
*/
sessionQueue.suspend()
AVCaptureDevice.requestAccess(forMediaType: AVMediaTypeVideo, completionHandler: { [unowned self] granted in
if !granted {
self.setupResult = .notAuthorized
}
self.sessionQueue.resume()
})
default:
// The user has previously denied access.
setupResult = .notAuthorized
}
/*
Setup the capture session.
In general it is not safe to mutate an AVCaptureSession or any of its
inputs, outputs, or connections from multiple threads at the same time.
Why not do all of this on the main queue?
Because AVCaptureSession.startRunning() is a blocking call which can
take a long time. We dispatch session setup to the sessionQueue so
that the main queue isn't blocked, which keeps the UI responsive.
*/
sessionQueue.async { [unowned self] in
self.configureSession()
}
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
sessionQueue.async {
switch self.setupResult {
case .success:
// Only setup observers and start the session running if setup succeeded.
self.addObservers()
self.session.startRunning()
self.isSessionRunning = self.session.isRunning
case .notAuthorized:
DispatchQueue.main.async { [unowned self] in
let message = NSLocalizedString("AVCam doesn't have permission to use the camera, please change privacy settings", comment: "Alert message when the user has denied access to the camera")
let alertController = UIAlertController(title: "AVCam", message: message, preferredStyle: .alert)
alertController.addAction(UIAlertAction(title: NSLocalizedString("OK", comment: "Alert OK button"), style: .cancel, handler: nil))
alertController.addAction(UIAlertAction(title: NSLocalizedString("Settings", comment: "Alert button to open Settings"), style: .`default`, handler: { action in
UIApplication.shared.open(URL(string: UIApplicationOpenSettingsURLString)!, options: [:], completionHandler: nil)
}))
self.present(alertController, animated: true, completion: nil)
}
case .configurationFailed:
DispatchQueue.main.async { [unowned self] in
let message = NSLocalizedString("Unable to capture media", comment: "Alert message when something goes wrong during capture session configuration")
let alertController = UIAlertController(title: "AVCam", message: message, preferredStyle: .alert)
alertController.addAction(UIAlertAction(title: NSLocalizedString("OK", comment: "Alert OK button"), style: .cancel, handler: nil))
self.present(alertController, animated: true, completion: nil)
}
}
}
}
override func viewWillDisappear(_ animated: Bool) {
sessionQueue.async { [unowned self] in
if self.setupResult == .success {
self.session.stopRunning()
self.isSessionRunning = self.session.isRunning
self.removeObservers()
}
}
super.viewWillDisappear(animated)
}
override var shouldAutorotate: Bool {
// Disable autorotation of the interface when recording is in progress.
if let movieFileOutput = movieFileOutput {
return !movieFileOutput.isRecording
}
return true
}
override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
return .all
}
override func viewWillTransition(to size: CGSize, with coordinator: UIViewControllerTransitionCoordinator) {
super.viewWillTransition(to: size, with: coordinator)
if let videoPreviewLayerConnection = previewView.videoPreviewLayer.connection {
let deviceOrientation = UIDevice.current.orientation
guard let newVideoOrientation = deviceOrientation.videoOrientation, deviceOrientation.isPortrait || deviceOrientation.isLandscape else {
return
}
videoPreviewLayerConnection.videoOrientation = newVideoOrientation
}
}
// MARK: Session Management
private enum SessionSetupResult {
case success
case notAuthorized
case configurationFailed
}
private let session = AVCaptureSession()
private var isSessionRunning = false
private let sessionQueue = DispatchQueue(label: "session queue", attributes: [], target: nil) // Communicate with the session and other session objects on this queue.
private var setupResult: SessionSetupResult = .success
var videoDeviceInput: AVCaptureDeviceInput!
@IBOutlet private weak var previewView: PreviewView!
// Call this on the session queue.
private func configureSession() {
if setupResult != .success {
return
}
session.beginConfiguration()
/*
We do not create an AVCaptureMovieFileOutput when setting up the session because the
AVCaptureMovieFileOutput does not support movie recording with AVCaptureSessionPresetPhoto.
*/
session.sessionPreset = AVCaptureSessionPresetPhoto
// Add video input.
do {
var defaultVideoDevice: AVCaptureDevice?
// Choose the back dual camera if available, otherwise default to a wide angle camera.
if let dualCameraDevice = AVCaptureDevice.defaultDevice(withDeviceType: .builtInDuoCamera, mediaType: AVMediaTypeVideo, position: .back) {
defaultVideoDevice = dualCameraDevice
}
else if let backCameraDevice = AVCaptureDevice.defaultDevice(withDeviceType: .builtInWideAngleCamera, mediaType: AVMediaTypeVideo, position: .back) {
// If the back dual camera is not available, default to the back wide angle camera.
defaultVideoDevice = backCameraDevice
}
else if let frontCameraDevice = AVCaptureDevice.defaultDevice(withDeviceType: .builtInWideAngleCamera, mediaType: AVMediaTypeVideo, position: .front) {
// In some cases where users break their phones, the back wide angle camera is not available. In this case, we should default to the front wide angle camera.
defaultVideoDevice = frontCameraDevice
}
let videoDeviceInput = try AVCaptureDeviceInput(device: defaultVideoDevice)
if session.canAddInput(videoDeviceInput) {
session.addInput(videoDeviceInput)
self.videoDeviceInput = videoDeviceInput
DispatchQueue.main.async {
/*
Why are we dispatching this to the main queue?
Because AVCaptureVideoPreviewLayer is the backing layer for PreviewView and UIView
can only be manipulated on the main thread.
Note: As an exception to the above rule, it is not necessary to serialize video orientation changes
on the AVCaptureVideoPreviewLayers connection with other session manipulation.
Use the status bar orientation as the initial video orientation. Subsequent orientation changes are
handled by CameraViewController.viewWillTransition(to:with:).
*/
let statusBarOrientation = UIApplication.shared.statusBarOrientation
var initialVideoOrientation: AVCaptureVideoOrientation = .portrait
if statusBarOrientation != .unknown {
if let videoOrientation = statusBarOrientation.videoOrientation {
initialVideoOrientation = videoOrientation
}
}
self.previewView.videoPreviewLayer.connection.videoOrientation = initialVideoOrientation
}
}
else {
print("Could not add video device input to the session")
setupResult = .configurationFailed
session.commitConfiguration()
return
}
}
catch {
print("Could not create video device input: \(error)")
setupResult = .configurationFailed
session.commitConfiguration()
return
}
// Add audio input.
do {
let audioDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio)
let audioDeviceInput = try AVCaptureDeviceInput(device: audioDevice)
if session.canAddInput(audioDeviceInput) {
session.addInput(audioDeviceInput)
}
else {
print("Could not add audio device input to the session")
}
}
catch {
print("Could not create audio device input: \(error)")
}
// Add photo output.
if session.canAddOutput(photoOutput)
{
session.addOutput(photoOutput)
photoOutput.isHighResolutionCaptureEnabled = true
photoOutput.isLivePhotoCaptureEnabled = photoOutput.isLivePhotoCaptureSupported
livePhotoMode = photoOutput.isLivePhotoCaptureSupported ? .on : .off
}
else {
print("Could not add photo output to the session")
setupResult = .configurationFailed
session.commitConfiguration()
return
}
session.commitConfiguration()
}
@IBAction private func resumeInterruptedSession(_ resumeButton: UIButton)
{
sessionQueue.async { [unowned self] in
/*
The session might fail to start running, e.g., if a phone or FaceTime call is still
using audio or video. A failure to start the session running will be communicated via
a session runtime error notification. To avoid repeatedly failing to start the session
running, we only try to restart the session running in the session runtime error handler
if we aren't trying to resume the session running.
*/
self.session.startRunning()
self.isSessionRunning = self.session.isRunning
if !self.session.isRunning {
DispatchQueue.main.async { [unowned self] in
let message = NSLocalizedString("Unable to resume", comment: "Alert message when unable to resume the session running")
let alertController = UIAlertController(title: "AVCam", message: message, preferredStyle: .alert)
let cancelAction = UIAlertAction(title: NSLocalizedString("OK", comment: "Alert OK button"), style: .cancel, handler: nil)
alertController.addAction(cancelAction)
self.present(alertController, animated: true, completion: nil)
}
}
else {
DispatchQueue.main.async { [unowned self] in
self.resumeButton.isHidden = true
}
}
}
}
private enum CaptureMode: Int {
case photo = 0
case movie = 1
}
@IBOutlet private weak var captureModeControl: UISegmentedControl!
@IBAction private func toggleCaptureMode(_ captureModeControl: UISegmentedControl) {
if captureModeControl.selectedSegmentIndex == CaptureMode.photo.rawValue {
recordButton.isEnabled = false
sessionQueue.async { [unowned self] in
/*
Remove the AVCaptureMovieFileOutput from the session because movie recording is
not supported with AVCaptureSessionPresetPhoto. Additionally, Live Photo
capture is not supported when an AVCaptureMovieFileOutput is connected to the session.
*/
self.session.beginConfiguration()
self.session.removeOutput(self.movieFileOutput)
self.session.sessionPreset = AVCaptureSessionPresetPhoto
self.session.commitConfiguration()
self.movieFileOutput = nil
if self.photoOutput.isLivePhotoCaptureSupported {
self.photoOutput.isLivePhotoCaptureEnabled = true
DispatchQueue.main.async {
self.livePhotoModeButton.isEnabled = true
self.livePhotoModeButton.isHidden = false
}
}
}
}
else if captureModeControl.selectedSegmentIndex == CaptureMode.movie.rawValue
{
livePhotoModeButton.isHidden = true
sessionQueue.async { [unowned self] in
let movieFileOutput = AVCaptureMovieFileOutput()
if self.session.canAddOutput(movieFileOutput) {
self.session.beginConfiguration()
self.session.addOutput(movieFileOutput)
self.session.sessionPreset = AVCaptureSessionPresetHigh
if let connection = movieFileOutput.connection(withMediaType: AVMediaTypeVideo) {
if connection.isVideoStabilizationSupported {
connection.preferredVideoStabilizationMode = .auto
}
}
self.session.commitConfiguration()
self.movieFileOutput = movieFileOutput
DispatchQueue.main.async { [unowned self] in
self.recordButton.isEnabled = true
}
}
}
}
}
// MARK: Device Configuration
@IBOutlet private weak var cameraButton: UIButton!
@IBOutlet private weak var cameraUnavailableLabel: UILabel!
private let videoDeviceDiscoverySession = AVCaptureDeviceDiscoverySession(deviceTypes: [.builtInWideAngleCamera, .builtInDuoCamera], mediaType: AVMediaTypeVideo, position: .unspecified)!
@IBAction private func changeCamera(_ cameraButton: UIButton) {
cameraButton.isEnabled = false
recordButton.isEnabled = false
photoButton.isEnabled = false
livePhotoModeButton.isEnabled = false
captureModeControl.isEnabled = false
sessionQueue.async { [unowned self] in
let currentVideoDevice = self.videoDeviceInput.device
let currentPosition = currentVideoDevice!.position
let preferredPosition: AVCaptureDevicePosition
let preferredDeviceType: AVCaptureDeviceType
switch currentPosition {
case .unspecified, .front:
preferredPosition = .back
preferredDeviceType = .builtInDuoCamera
case .back:
preferredPosition = .front
preferredDeviceType = .builtInWideAngleCamera
}
let devices = self.videoDeviceDiscoverySession.devices!
var newVideoDevice: AVCaptureDevice? = nil
// First, look for a device with both the preferred position and device type. Otherwise, look for a device with only the preferred position.
if let device = devices.filter({ $0.position == preferredPosition && $0.deviceType == preferredDeviceType }).first {
newVideoDevice = device
}
else if let device = devices.filter({ $0.position == preferredPosition }).first {
newVideoDevice = device
}
if let videoDevice = newVideoDevice {
do {
let videoDeviceInput = try AVCaptureDeviceInput(device: videoDevice)
self.session.beginConfiguration()
// Remove the existing device input first, since using the front and back camera simultaneously is not supported.
self.session.removeInput(self.videoDeviceInput)
if self.session.canAddInput(videoDeviceInput) {
NotificationCenter.default.removeObserver(self, name: Notification.Name("AVCaptureDeviceSubjectAreaDidChangeNotification"), object: currentVideoDevice!)
NotificationCenter.default.addObserver(self, selector: #selector(self.subjectAreaDidChange), name: Notification.Name("AVCaptureDeviceSubjectAreaDidChangeNotification"), object: videoDeviceInput.device)
self.session.addInput(videoDeviceInput)
self.videoDeviceInput = videoDeviceInput
}
else {
self.session.addInput(self.videoDeviceInput);
}
if let connection = self.movieFileOutput?.connection(withMediaType: AVMediaTypeVideo) {
if connection.isVideoStabilizationSupported {
connection.preferredVideoStabilizationMode = .auto
}
}
/*
Set Live Photo capture enabled if it is supported. When changing cameras, the
`isLivePhotoCaptureEnabled` property of the AVCapturePhotoOutput gets set to NO when
a video device is disconnected from the session. After the new video device is
added to the session, re-enable Live Photo capture on the AVCapturePhotoOutput if it is supported.
*/
self.photoOutput.isLivePhotoCaptureEnabled = self.photoOutput.isLivePhotoCaptureSupported;
self.session.commitConfiguration()
}
catch {
print("Error occured while creating video device input: \(error)")
}
}
DispatchQueue.main.async { [unowned self] in
self.cameraButton.isEnabled = true
self.recordButton.isEnabled = self.movieFileOutput != nil
self.photoButton.isEnabled = true
self.livePhotoModeButton.isEnabled = true
self.captureModeControl.isEnabled = true
}
}
}
@IBAction private func focusAndExposeTap(_ gestureRecognizer: UITapGestureRecognizer) {
let devicePoint = self.previewView.videoPreviewLayer.captureDevicePointOfInterest(for: gestureRecognizer.location(in: gestureRecognizer.view))
focus(with: .autoFocus, exposureMode: .autoExpose, at: devicePoint, monitorSubjectAreaChange: true)
}
private func focus(with focusMode: AVCaptureFocusMode, exposureMode: AVCaptureExposureMode, at devicePoint: CGPoint, monitorSubjectAreaChange: Bool) {
sessionQueue.async { [unowned self] in
if let device = self.videoDeviceInput.device {
do {
try device.lockForConfiguration()
/*
Setting (focus/exposure)PointOfInterest alone does not initiate a (focus/exposure) operation.
Call set(Focus/Exposure)Mode() to apply the new point of interest.
*/
if device.isFocusPointOfInterestSupported && device.isFocusModeSupported(focusMode) {
device.focusPointOfInterest = devicePoint
device.focusMode = focusMode
}
if device.isExposurePointOfInterestSupported && device.isExposureModeSupported(exposureMode) {
device.exposurePointOfInterest = devicePoint
device.exposureMode = exposureMode
}
device.isSubjectAreaChangeMonitoringEnabled = monitorSubjectAreaChange
device.unlockForConfiguration()
}
catch {
print("Could not lock device for configuration: \(error)")
}
}
}
}
// MARK: Capturing Photos
private let photoOutput = AVCapturePhotoOutput()
private var inProgressPhotoCaptureDelegates = [Int64 : PhotoCaptureDelegate]()
@IBOutlet private weak var photoButton: UIButton!
@IBAction private func capturePhoto(_ photoButton: UIButton) {
/*
Retrieve the video preview layer's video orientation on the main queue before
entering the session queue. We do this to ensure UI elements are accessed on
the main thread and session configuration is done on the session queue.
*/
let videoPreviewLayerOrientation = previewView.videoPreviewLayer.connection.videoOrientation
sessionQueue.async {
// Update the photo output's connection to match the video orientation of the video preview layer.
if let photoOutputConnection = self.photoOutput.connection(withMediaType: AVMediaTypeVideo) {
photoOutputConnection.videoOrientation = videoPreviewLayerOrientation
}
// Capture a JPEG photo with flash set to auto and high resolution photo enabled.
let photoSettings = AVCapturePhotoSettings()
photoSettings.flashMode = .auto
photoSettings.isHighResolutionPhotoEnabled = true
if photoSettings.availablePreviewPhotoPixelFormatTypes.count > 0 {
photoSettings.previewPhotoFormat = [kCVPixelBufferPixelFormatTypeKey as String : photoSettings.availablePreviewPhotoPixelFormatTypes.first!]
}
if self.livePhotoMode == .on && self.photoOutput.isLivePhotoCaptureSupported { // Live Photo capture is not supported in movie mode.
let livePhotoMovieFileName = NSUUID().uuidString
let livePhotoMovieFilePath = (NSTemporaryDirectory() as NSString).appendingPathComponent((livePhotoMovieFileName as NSString).appendingPathExtension("mov")!)
photoSettings.livePhotoMovieFileURL = URL(fileURLWithPath: livePhotoMovieFilePath)
}
// Use a separate object for the photo capture delegate to isolate each capture life cycle.
let photoCaptureDelegate = PhotoCaptureDelegate(with: photoSettings, willCapturePhotoAnimation: {
DispatchQueue.main.async { [unowned self] in
self.previewView.videoPreviewLayer.opacity = 0
UIView.animate(withDuration: 0.25) { [unowned self] in
self.previewView.videoPreviewLayer.opacity = 1
}
}
}, capturingLivePhoto: { capturing in
/*
Because Live Photo captures can overlap, we need to keep track of the
number of in progress Live Photo captures to ensure that the
Live Photo label stays visible during these captures.
*/
self.sessionQueue.async { [unowned self] in
if capturing {
self.inProgressLivePhotoCapturesCount += 1
}
else {
self.inProgressLivePhotoCapturesCount -= 1
}
let inProgressLivePhotoCapturesCount = self.inProgressLivePhotoCapturesCount
DispatchQueue.main.async { [unowned self] in
if inProgressLivePhotoCapturesCount > 0 {
self.capturingLivePhotoLabel.isHidden = false
}
else if inProgressLivePhotoCapturesCount == 0 {
self.capturingLivePhotoLabel.isHidden = true
}
else {
print("Error: In progress live photo capture count is less than 0");
}
}
}
}, completed: { [unowned self] photoCaptureDelegate in
// When the capture is complete, remove a reference to the photo capture delegate so it can be deallocated.
self.sessionQueue.async { [unowned self] in
self.inProgressPhotoCaptureDelegates[photoCaptureDelegate.requestedPhotoSettings.uniqueID] = nil
}
}
)
/*
The Photo Output keeps a weak reference to the photo capture delegate so
we store it in an array to maintain a strong reference to this object
until the capture is completed.
*/
self.inProgressPhotoCaptureDelegates[photoCaptureDelegate.requestedPhotoSettings.uniqueID] = photoCaptureDelegate
self.photoOutput.capturePhoto(with: photoSettings, delegate: photoCaptureDelegate)
}
}
private enum LivePhotoMode {
case on
case off
}
private var livePhotoMode: LivePhotoMode = .off
@IBOutlet private weak var livePhotoModeButton: UIButton!
@IBAction private func toggleLivePhotoMode(_ livePhotoModeButton: UIButton) {
sessionQueue.async { [unowned self] in
self.livePhotoMode = (self.livePhotoMode == .on) ? .off : .on
let livePhotoMode = self.livePhotoMode
DispatchQueue.main.async { [unowned self] in
if livePhotoMode == .on {
self.livePhotoModeButton.setTitle(NSLocalizedString("Live Photo Mode: On", comment: "Live photo mode button on title"), for: [])
}
else {
self.livePhotoModeButton.setTitle(NSLocalizedString("Live Photo Mode: Off", comment: "Live photo mode button off title"), for: [])
}
}
}
}
private var inProgressLivePhotoCapturesCount = 0
@IBOutlet var capturingLivePhotoLabel: UILabel!
// MARK: Recording Movies
private var movieFileOutput: AVCaptureMovieFileOutput? = nil
private var backgroundRecordingID: UIBackgroundTaskIdentifier? = nil
@IBOutlet private weak var recordButton: UIButton!
@IBOutlet private weak var resumeButton: UIButton!
@IBAction private func toggleMovieRecording(_ recordButton: UIButton) {
guard let movieFileOutput = self.movieFileOutput else {
return
}
/*
Disable the Camera button until recording finishes, and disable
the Record button until recording starts or finishes.
See the AVCaptureFileOutputRecordingDelegate methods.
*/
cameraButton.isEnabled = false
recordButton.isEnabled = false
captureModeControl.isEnabled = false
/*
Retrieve the video preview layer's video orientation on the main queue
before entering the session queue. We do this to ensure UI elements are
accessed on the main thread and session configuration is done on the session queue.
*/
let videoPreviewLayerOrientation = previewView.videoPreviewLayer.connection.videoOrientation
sessionQueue.async { [unowned self] in
if !movieFileOutput.isRecording {
if UIDevice.current.isMultitaskingSupported {
/*
Setup background task.
This is needed because the `capture(_:, didFinishRecordingToOutputFileAt:, fromConnections:, error:)`
callback is not received until AVCam returns to the foreground unless you request background execution time.
This also ensures that there will be time to write the file to the photo library when AVCam is backgrounded.
To conclude this background execution, endBackgroundTask(_:) is called in
`capture(_:, didFinishRecordingToOutputFileAt:, fromConnections:, error:)` after the recorded file has been saved.
*/
self.backgroundRecordingID = UIApplication.shared.beginBackgroundTask(expirationHandler: nil)
}
// Update the orientation on the movie file output video connection before starting recording.
let movieFileOutputConnection = self.movieFileOutput?.connection(withMediaType: AVMediaTypeVideo)
movieFileOutputConnection?.videoOrientation = videoPreviewLayerOrientation
// Start recording to a temporary file.
let outputFileName = NSUUID().uuidString
let outputFilePath = (NSTemporaryDirectory() as NSString).appendingPathComponent((outputFileName as NSString).appendingPathExtension("mov")!)
movieFileOutput.startRecording(toOutputFileURL: URL(fileURLWithPath: outputFilePath), recordingDelegate: self)
}
else {
movieFileOutput.stopRecording()
}
}
}
func capture(_ captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAt fileURL: URL!, fromConnections connections: [Any]!) {
// Enable the Record button to let the user stop the recording.
DispatchQueue.main.async { [unowned self] in
self.recordButton.isEnabled = true
self.recordButton.setTitle(NSLocalizedString("Stop", comment: "Recording button stop title"), for: [])
}
}
func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) {
/*
Note that currentBackgroundRecordingID is used to end the background task
associated with this recording. This allows a new recording to be started,
associated with a new UIBackgroundTaskIdentifier, once the movie file output's
`isRecording` property is back to false which happens sometime after this method
returns.
Note: Since we use a unique file path for each recording, a new recording will
not overwrite a recording currently being saved.
*/
func cleanup() {
let path = outputFileURL.path
if FileManager.default.fileExists(atPath: path) {
do {
try FileManager.default.removeItem(atPath: path)
}
catch {
print("Could not remove file at url: \(outputFileURL)")
}
}
if let currentBackgroundRecordingID = backgroundRecordingID {
backgroundRecordingID = UIBackgroundTaskInvalid
if currentBackgroundRecordingID != UIBackgroundTaskInvalid {
UIApplication.shared.endBackgroundTask(currentBackgroundRecordingID)
}
}
}
var success = true
if error != nil {
print("Movie file finishing error: \(error)")
success = (((error as NSError).userInfo[AVErrorRecordingSuccessfullyFinishedKey] as AnyObject).boolValue)!
}
if success {
// Check authorization status.
PHPhotoLibrary.requestAuthorization { status in
if status == .authorized {
// Save the movie file to the photo library and cleanup.
PHPhotoLibrary.shared().performChanges({
let options = PHAssetResourceCreationOptions()
options.shouldMoveFile = true
let creationRequest = PHAssetCreationRequest.forAsset()
creationRequest.addResource(with: .video, fileURL: outputFileURL, options: options)
}, completionHandler: { success, error in
if !success {
print("Could not save movie to photo library: \(error)")
}
cleanup()
}
)
}
else {
cleanup()
}
}
}
else {
cleanup()
}
// Enable the Camera and Record buttons to let the user switch camera and start another recording.
DispatchQueue.main.async { [unowned self] in
// Only enable the ability to change camera if the device has more than one camera.
self.cameraButton.isEnabled = self.videoDeviceDiscoverySession.uniqueDevicePositionsCount() > 1
self.recordButton.isEnabled = true
self.captureModeControl.isEnabled = true
self.recordButton.setTitle(NSLocalizedString("Record", comment: "Recording button record title"), for: [])
}
}
// MARK: KVO and Notifications
private var sessionRunningObserveContext = 0
private func addObservers() {
session.addObserver(self, forKeyPath: "running", options: .new, context: &sessionRunningObserveContext)
NotificationCenter.default.addObserver(self, selector: #selector(subjectAreaDidChange), name: Notification.Name("AVCaptureDeviceSubjectAreaDidChangeNotification"), object: videoDeviceInput.device)
NotificationCenter.default.addObserver(self, selector: #selector(sessionRuntimeError), name: Notification.Name("AVCaptureSessionRuntimeErrorNotification"), object: session)
/*
A session can only run when the app is full screen. It will be interrupted
in a multi-app layout, introduced in iOS 9, see also the documentation of
AVCaptureSessionInterruptionReason. Add observers to handle these session
interruptions and show a preview is paused message. See the documentation
of AVCaptureSessionWasInterruptedNotification for other interruption reasons.
*/
NotificationCenter.default.addObserver(self, selector: #selector(sessionWasInterrupted), name: Notification.Name("AVCaptureSessionWasInterruptedNotification"), object: session)
NotificationCenter.default.addObserver(self, selector: #selector(sessionInterruptionEnded), name: Notification.Name("AVCaptureSessionInterruptionEndedNotification"), object: session)
}
private func removeObservers() {
NotificationCenter.default.removeObserver(self)
session.removeObserver(self, forKeyPath: "running", context: &sessionRunningObserveContext)
}
override func observeValue(forKeyPath keyPath: String?, of object: Any?, change: [NSKeyValueChangeKey : Any]?, context: UnsafeMutableRawPointer?) {
if context == &sessionRunningObserveContext {
let newValue = change?[.newKey] as AnyObject?
guard let isSessionRunning = newValue?.boolValue else { return }
let isLivePhotoCaptureSupported = photoOutput.isLivePhotoCaptureSupported
let isLivePhotoCaptureEnabled = photoOutput.isLivePhotoCaptureEnabled
DispatchQueue.main.async { [unowned self] in
// Only enable the ability to change camera if the device has more than one camera.
self.cameraButton.isEnabled = isSessionRunning && self.videoDeviceDiscoverySession.uniqueDevicePositionsCount() > 1
self.recordButton.isEnabled = isSessionRunning && self.movieFileOutput != nil
self.photoButton.isEnabled = isSessionRunning
self.captureModeControl.isEnabled = isSessionRunning
self.livePhotoModeButton.isEnabled = isSessionRunning && isLivePhotoCaptureEnabled
self.livePhotoModeButton.isHidden = !(isSessionRunning && isLivePhotoCaptureSupported)
}
}
else {
super.observeValue(forKeyPath: keyPath, of: object, change: change, context: context)
}
}
func subjectAreaDidChange(notification: NSNotification) {
let devicePoint = CGPoint(x: 0.5, y: 0.5)
focus(with: .autoFocus, exposureMode: .continuousAutoExposure, at: devicePoint, monitorSubjectAreaChange: false)
}
func sessionRuntimeError(notification: NSNotification) {
guard let errorValue = notification.userInfo?[AVCaptureSessionErrorKey] as? NSError else {
return
}
let error = AVError(_nsError: errorValue)
print("Capture session runtime error: \(error)")
/*
Automatically try to restart the session running if media services were
reset and the last start running succeeded. Otherwise, enable the user
to try to resume the session running.
*/
if error.code == .mediaServicesWereReset {
sessionQueue.async { [unowned self] in
if self.isSessionRunning {
self.session.startRunning()
self.isSessionRunning = self.session.isRunning
}
else {
DispatchQueue.main.async { [unowned self] in
self.resumeButton.isHidden = false
}
}
}
}
else {
resumeButton.isHidden = false
}
}
func sessionWasInterrupted(notification: NSNotification) {
/*
In some scenarios we want to enable the user to resume the session running.
For example, if music playback is initiated via control center while
using AVCam, then the user can let AVCam resume
the session running, which will stop music playback. Note that stopping
music playback in control center will not automatically resume the session
running. Also note that it is not always possible to resume, see `resumeInterruptedSession(_:)`.
*/
if let userInfoValue = notification.userInfo?[AVCaptureSessionInterruptionReasonKey] as AnyObject?, let reasonIntegerValue = userInfoValue.integerValue, let reason = AVCaptureSessionInterruptionReason(rawValue: reasonIntegerValue) {
print("Capture session was interrupted with reason \(reason)")
var showResumeButton = false
if reason == AVCaptureSessionInterruptionReason.audioDeviceInUseByAnotherClient || reason == AVCaptureSessionInterruptionReason.videoDeviceInUseByAnotherClient {
showResumeButton = true
}
else if reason == AVCaptureSessionInterruptionReason.videoDeviceNotAvailableWithMultipleForegroundApps {
// Simply fade-in a label to inform the user that the camera is unavailable.
cameraUnavailableLabel.alpha = 0
cameraUnavailableLabel.isHidden = false
UIView.animate(withDuration: 0.25) { [unowned self] in
self.cameraUnavailableLabel.alpha = 1
}
}
if showResumeButton {
// Simply fade-in a button to enable the user to try to resume the session running.
resumeButton.alpha = 0
resumeButton.isHidden = false
UIView.animate(withDuration: 0.25) { [unowned self] in
self.resumeButton.alpha = 1
}
}
}
}
func sessionInterruptionEnded(notification: NSNotification) {
print("Capture session interruption ended")
if !resumeButton.isHidden {
UIView.animate(withDuration: 0.25,
animations: { [unowned self] in
self.resumeButton.alpha = 0
}, completion: { [unowned self] finished in
self.resumeButton.isHidden = true
}
)
}
if !cameraUnavailableLabel.isHidden {
UIView.animate(withDuration: 0.25,
animations: { [unowned self] in
self.cameraUnavailableLabel.alpha = 0
}, completion: { [unowned self] finished in
self.cameraUnavailableLabel.isHidden = true
}
)
}
}
}
extension UIDeviceOrientation {
var videoOrientation: AVCaptureVideoOrientation? {
switch self {
case .portrait: return .portrait
case .portraitUpsideDown: return .portraitUpsideDown
case .landscapeLeft: return .landscapeRight
case .landscapeRight: return .landscapeLeft
default: return nil
}
}
}
extension UIInterfaceOrientation {
var videoOrientation: AVCaptureVideoOrientation? {
switch self {
case .portrait: return .portrait
case .portraitUpsideDown: return .portraitUpsideDown
case .landscapeLeft: return .landscapeLeft
case .landscapeRight: return .landscapeRight
default: return nil
}
}
}
extension AVCaptureDeviceDiscoverySession {
func uniqueDevicePositionsCount() -> Int {
var uniqueDevicePositions = [AVCaptureDevicePosition]()
for device in devices {
if !uniqueDevicePositions.contains(device.position) {
uniqueDevicePositions.append(device.position)
}
}
return uniqueDevicePositions.count
}
}

View File

@ -0,0 +1,60 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleDevelopmentRegion</key>
<string>en</string>
<key>CFBundleExecutable</key>
<string>$(EXECUTABLE_NAME)</string>
<key>CFBundleIdentifier</key>
<string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>$(PRODUCT_NAME)</string>
<key>CFBundlePackageType</key>
<string>APPL</string>
<key>CFBundleShortVersionString</key>
<string>5.0</string>
<key>CFBundleSignature</key>
<string>????</string>
<key>CFBundleVersion</key>
<string>1</string>
<key>LSRequiresIPhoneOS</key>
<true/>
<key>NSCameraUsageDescription</key>
<string>to take photos and video</string>
<key>NSMicrophoneUsageDescription</key>
<string>to record Live Photos and movies</string>
<key>NSPhotoLibraryUsageDescription</key>
<string>to save photos and videos to your Photo Library</string>
<key>UILaunchStoryboardName</key>
<string>Launch Screen</string>
<key>UIMainStoryboardFile</key>
<string>Main</string>
<key>UIRequiredDeviceCapabilities</key>
<array>
<string>armv7</string>
</array>
<key>UIRequiresFullScreen</key>
<true/>
<key>UIStatusBarHidden</key>
<true/>
<key>UISupportedInterfaceOrientations</key>
<array>
<string>UIInterfaceOrientationPortrait</string>
<string>UIInterfaceOrientationPortraitUpsideDown</string>
<string>UIInterfaceOrientationLandscapeLeft</string>
<string>UIInterfaceOrientationLandscapeRight</string>
</array>
<key>UISupportedInterfaceOrientations~ipad</key>
<array>
<string>UIInterfaceOrientationPortrait</string>
<string>UIInterfaceOrientationPortraitUpsideDown</string>
<string>UIInterfaceOrientationLandscapeLeft</string>
<string>UIInterfaceOrientationLandscapeRight</string>
</array>
<key>UIViewControllerBasedStatusBarAppearance</key>
<false/>
</dict>
</plist>

View File

@ -0,0 +1,119 @@
/*
Copyright (C) 2016 Apple Inc. All Rights Reserved.
See LICENSE.txt for this samples licensing information
Abstract:
Photo capture delegate.
*/
import AVFoundation
import Photos
class PhotoCaptureDelegate: NSObject, AVCapturePhotoCaptureDelegate {
private(set) var requestedPhotoSettings: AVCapturePhotoSettings
private let willCapturePhotoAnimation: () -> ()
private let capturingLivePhoto: (Bool) -> ()
private let completed: (PhotoCaptureDelegate) -> ()
private var photoData: Data? = nil
private var livePhotoCompanionMovieURL: URL? = nil
init(with requestedPhotoSettings: AVCapturePhotoSettings, willCapturePhotoAnimation: @escaping () -> (), capturingLivePhoto: @escaping (Bool) -> (), completed: @escaping (PhotoCaptureDelegate) -> ()) {
self.requestedPhotoSettings = requestedPhotoSettings
self.willCapturePhotoAnimation = willCapturePhotoAnimation
self.capturingLivePhoto = capturingLivePhoto
self.completed = completed
}
private func didFinish() {
if let livePhotoCompanionMoviePath = livePhotoCompanionMovieURL?.path {
if FileManager.default.fileExists(atPath: livePhotoCompanionMoviePath) {
do {
try FileManager.default.removeItem(atPath: livePhotoCompanionMoviePath)
}
catch {
print("Could not remove file at url: \(livePhotoCompanionMoviePath)")
}
}
}
completed(self)
}
func capture(_ captureOutput: AVCapturePhotoOutput, willBeginCaptureForResolvedSettings resolvedSettings: AVCaptureResolvedPhotoSettings) {
if resolvedSettings.livePhotoMovieDimensions.width > 0 && resolvedSettings.livePhotoMovieDimensions.height > 0 {
capturingLivePhoto(true)
}
}
func capture(_ captureOutput: AVCapturePhotoOutput, willCapturePhotoForResolvedSettings resolvedSettings: AVCaptureResolvedPhotoSettings) {
willCapturePhotoAnimation()
}
func capture(_ captureOutput: AVCapturePhotoOutput, didFinishProcessingPhotoSampleBuffer photoSampleBuffer: CMSampleBuffer?, previewPhotoSampleBuffer: CMSampleBuffer?, resolvedSettings: AVCaptureResolvedPhotoSettings, bracketSettings: AVCaptureBracketedStillImageSettings?, error: Error?) {
if let photoSampleBuffer = photoSampleBuffer {
photoData = AVCapturePhotoOutput.jpegPhotoDataRepresentation(forJPEGSampleBuffer: photoSampleBuffer, previewPhotoSampleBuffer: previewPhotoSampleBuffer)
}
else {
print("Error capturing photo: \(error)")
return
}
}
func capture(_ captureOutput: AVCapturePhotoOutput, didFinishRecordingLivePhotoMovieForEventualFileAt outputFileURL: URL, resolvedSettings: AVCaptureResolvedPhotoSettings) {
capturingLivePhoto(false)
}
func capture(_ captureOutput: AVCapturePhotoOutput, didFinishProcessingLivePhotoToMovieFileAt outputFileURL: URL, duration: CMTime, photoDisplay photoDisplayTime: CMTime, resolvedSettings: AVCaptureResolvedPhotoSettings, error: Error?) {
if let _ = error {
print("Error processing live photo companion movie: \(error)")
return
}
livePhotoCompanionMovieURL = outputFileURL
}
func capture(_ captureOutput: AVCapturePhotoOutput, didFinishCaptureForResolvedSettings resolvedSettings: AVCaptureResolvedPhotoSettings, error: Error?) {
if let error = error {
print("Error capturing photo: \(error)")
didFinish()
return
}
guard let photoData = photoData else {
print("No photo data resource")
didFinish()
return
}
PHPhotoLibrary.requestAuthorization { [unowned self] status in
if status == .authorized {
PHPhotoLibrary.shared().performChanges({ [unowned self] in
let creationRequest = PHAssetCreationRequest.forAsset()
creationRequest.addResource(with: .photo, data: photoData, options: nil)
if let livePhotoCompanionMovieURL = self.livePhotoCompanionMovieURL {
let livePhotoCompanionMovieFileResourceOptions = PHAssetResourceCreationOptions()
livePhotoCompanionMovieFileResourceOptions.shouldMoveFile = true
creationRequest.addResource(with: .pairedVideo, fileURL: livePhotoCompanionMovieURL, options: livePhotoCompanionMovieFileResourceOptions)
}
}, completionHandler: { [unowned self] success, error in
if let error = error {
print("Error occurered while saving photo to photo library: \(error)")
}
self.didFinish()
}
)
}
else {
self.didFinish()
}
}
}
}

View File

@ -0,0 +1,31 @@
/*
Copyright (C) 2016 Apple Inc. All Rights Reserved.
See LICENSE.txt for this samples licensing information
Abstract:
Application preview view.
*/
import UIKit
import AVFoundation
class PreviewView: UIView {
var videoPreviewLayer: AVCaptureVideoPreviewLayer {
return layer as! AVCaptureVideoPreviewLayer
}
var session: AVCaptureSession? {
get {
return videoPreviewLayer.session
}
set {
videoPreviewLayer.session = newValue
}
}
// MARK: UIView
override class var layerClass: AnyClass {
return AVCaptureVideoPreviewLayer.self
}
}

View File

@ -0,0 +1,324 @@
// !$*UTF8*$!
{
archiveVersion = 1;
classes = {
};
objectVersion = 46;
objects = {
/* Begin PBXBuildFile section */
7A5BA9721CD2B5EE0091A264 /* ItemSelectionViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7A5BA9711CD2B5EE0091A264 /* ItemSelectionViewController.swift */; };
7A921C841CD2858B00E7B04B /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7A921C831CD2858B00E7B04B /* AppDelegate.swift */; };
7A921C861CD2858B00E7B04B /* CameraViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7A921C851CD2858B00E7B04B /* CameraViewController.swift */; };
7A921C891CD2858B00E7B04B /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 7A921C871CD2858B00E7B04B /* Main.storyboard */; };
7A921C8B1CD2858B00E7B04B /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 7A921C8A1CD2858B00E7B04B /* Assets.xcassets */; };
7A921C8E1CD2858B00E7B04B /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 7A921C8C1CD2858B00E7B04B /* LaunchScreen.storyboard */; };
7A921C961CD2861000E7B04B /* PreviewView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7A921C951CD2861000E7B04B /* PreviewView.swift */; };
/* End PBXBuildFile section */
/* Begin PBXFileReference section */
7A00104A1CD28B5500302C83 /* README.md */ = {isa = PBXFileReference; lastKnownFileType = net.daringfireball.markdown; path = README.md; sourceTree = "<group>"; };
7A5BA9711CD2B5EE0091A264 /* ItemSelectionViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ItemSelectionViewController.swift; sourceTree = "<group>"; };
7A921C801CD2858B00E7B04B /* AVCamBarcode.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = AVCamBarcode.app; sourceTree = BUILT_PRODUCTS_DIR; };
7A921C831CD2858B00E7B04B /* AppDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = "<group>"; };
7A921C851CD2858B00E7B04B /* CameraViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraViewController.swift; sourceTree = "<group>"; };
7A921C881CD2858B00E7B04B /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = "<group>"; };
7A921C8A1CD2858B00E7B04B /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = "<group>"; };
7A921C8D1CD2858B00E7B04B /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = "<group>"; };
7A921C8F1CD2858B00E7B04B /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = "<group>"; };
7A921C951CD2861000E7B04B /* PreviewView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = PreviewView.swift; sourceTree = "<group>"; };
/* End PBXFileReference section */
/* Begin PBXFrameworksBuildPhase section */
7A921C7D1CD2858B00E7B04B /* Frameworks */ = {
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXFrameworksBuildPhase section */
/* Begin PBXGroup section */
7A921C771CD2858B00E7B04B = {
isa = PBXGroup;
children = (
7A00104A1CD28B5500302C83 /* README.md */,
7A921C821CD2858B00E7B04B /* AVCamBarcode */,
7A921C811CD2858B00E7B04B /* Products */,
);
sourceTree = "<group>";
};
7A921C811CD2858B00E7B04B /* Products */ = {
isa = PBXGroup;
children = (
7A921C801CD2858B00E7B04B /* AVCamBarcode.app */,
);
name = Products;
sourceTree = "<group>";
};
7A921C821CD2858B00E7B04B /* AVCamBarcode */ = {
isa = PBXGroup;
children = (
7A921C831CD2858B00E7B04B /* AppDelegate.swift */,
7A921C951CD2861000E7B04B /* PreviewView.swift */,
7A921C851CD2858B00E7B04B /* CameraViewController.swift */,
7A5BA9711CD2B5EE0091A264 /* ItemSelectionViewController.swift */,
7A921C871CD2858B00E7B04B /* Main.storyboard */,
7A921C8A1CD2858B00E7B04B /* Assets.xcassets */,
7A921C8C1CD2858B00E7B04B /* LaunchScreen.storyboard */,
7A921C8F1CD2858B00E7B04B /* Info.plist */,
);
path = AVCamBarcode;
sourceTree = "<group>";
};
/* End PBXGroup section */
/* Begin PBXNativeTarget section */
7A921C7F1CD2858B00E7B04B /* AVCamBarcode */ = {
isa = PBXNativeTarget;
buildConfigurationList = 7A921C921CD2858B00E7B04B /* Build configuration list for PBXNativeTarget "AVCamBarcode" */;
buildPhases = (
7A921C7C1CD2858B00E7B04B /* Sources */,
7A921C7D1CD2858B00E7B04B /* Frameworks */,
7A921C7E1CD2858B00E7B04B /* Resources */,
);
buildRules = (
);
dependencies = (
);
name = AVCamBarcode;
productName = AVCamBarcode;
productReference = 7A921C801CD2858B00E7B04B /* AVCamBarcode.app */;
productType = "com.apple.product-type.application";
};
/* End PBXNativeTarget section */
/* Begin PBXProject section */
7A921C781CD2858B00E7B04B /* Project object */ = {
isa = PBXProject;
attributes = {
LastSwiftUpdateCheck = 0800;
LastUpgradeCheck = 0800;
ORGANIZATIONNAME = "Apple, Inc.";
TargetAttributes = {
7A921C7F1CD2858B00E7B04B = {
CreatedOnToolsVersion = 8.0;
ProvisioningStyle = Automatic;
};
};
};
buildConfigurationList = 7A921C7B1CD2858B00E7B04B /* Build configuration list for PBXProject "AVCamBarcode" */;
compatibilityVersion = "Xcode 3.2";
developmentRegion = English;
hasScannedForEncodings = 0;
knownRegions = (
en,
Base,
);
mainGroup = 7A921C771CD2858B00E7B04B;
productRefGroup = 7A921C811CD2858B00E7B04B /* Products */;
projectDirPath = "";
projectRoot = "";
targets = (
7A921C7F1CD2858B00E7B04B /* AVCamBarcode */,
);
};
/* End PBXProject section */
/* Begin PBXResourcesBuildPhase section */
7A921C7E1CD2858B00E7B04B /* Resources */ = {
isa = PBXResourcesBuildPhase;
buildActionMask = 2147483647;
files = (
7A921C8E1CD2858B00E7B04B /* LaunchScreen.storyboard in Resources */,
7A921C8B1CD2858B00E7B04B /* Assets.xcassets in Resources */,
7A921C891CD2858B00E7B04B /* Main.storyboard in Resources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXResourcesBuildPhase section */
/* Begin PBXSourcesBuildPhase section */
7A921C7C1CD2858B00E7B04B /* Sources */ = {
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
7A5BA9721CD2B5EE0091A264 /* ItemSelectionViewController.swift in Sources */,
7A921C861CD2858B00E7B04B /* CameraViewController.swift in Sources */,
7A921C841CD2858B00E7B04B /* AppDelegate.swift in Sources */,
7A921C961CD2861000E7B04B /* PreviewView.swift in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXSourcesBuildPhase section */
/* Begin PBXVariantGroup section */
7A921C871CD2858B00E7B04B /* Main.storyboard */ = {
isa = PBXVariantGroup;
children = (
7A921C881CD2858B00E7B04B /* Base */,
);
name = Main.storyboard;
sourceTree = "<group>";
};
7A921C8C1CD2858B00E7B04B /* LaunchScreen.storyboard */ = {
isa = PBXVariantGroup;
children = (
7A921C8D1CD2858B00E7B04B /* Base */,
);
name = LaunchScreen.storyboard;
sourceTree = "<group>";
};
/* End PBXVariantGroup section */
/* Begin XCBuildConfiguration section */
7A921C901CD2858B00E7B04B /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
ASSETCATALOG_COMPRESSION = lossless;
CLANG_ANALYZER_NONNULL = YES;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = dwarf;
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_TESTABILITY = YES;
GCC_C_LANGUAGE_STANDARD = gnu99;
GCC_DYNAMIC_NO_PIC = NO;
GCC_NO_COMMON_BLOCKS = YES;
GCC_OPTIMIZATION_LEVEL = 0;
GCC_PREPROCESSOR_DEFINITIONS = (
"DEBUG=1",
"$(inherited)",
);
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 10.0;
MTL_ENABLE_DEBUG_INFO = YES;
ONLY_ACTIVE_ARCH = YES;
SDKROOT = iphoneos;
SWIFT_OPTIMIZATION_LEVEL = "-Onone";
TARGETED_DEVICE_FAMILY = "1,2";
};
name = Debug;
};
7A921C911CD2858B00E7B04B /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
ASSETCATALOG_COMPRESSION = "respect-asset-catalog";
CLANG_ANALYZER_NONNULL = YES;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
ENABLE_NS_ASSERTIONS = NO;
ENABLE_STRICT_OBJC_MSGSEND = YES;
GCC_C_LANGUAGE_STANDARD = gnu99;
GCC_NO_COMMON_BLOCKS = YES;
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 10.0;
MTL_ENABLE_DEBUG_INFO = NO;
SDKROOT = iphoneos;
TARGETED_DEVICE_FAMILY = "1,2";
VALIDATE_PRODUCT = YES;
};
name = Release;
};
7A921C931CD2858B00E7B04B /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
CLANG_ENABLE_MODULES = YES;
CODE_SIGN_IDENTITY = "iPhone Developer";
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
DEVELOPMENT_TEAM = "";
INFOPLIST_FILE = AVCamBarcode/Info.plist;
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
PRODUCT_BUNDLE_IDENTIFIER = "com.example.apple-samplecode.AVCamBarcode";
PRODUCT_NAME = "$(TARGET_NAME)";
SDKROOT = iphoneos;
SWIFT_OBJC_BRIDGING_HEADER = "";
SWIFT_OPTIMIZATION_LEVEL = "-Onone";
SWIFT_VERSION = 3.0;
};
name = Debug;
};
7A921C941CD2858B00E7B04B /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
CLANG_ENABLE_MODULES = YES;
CODE_SIGN_IDENTITY = "iPhone Developer";
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
DEVELOPMENT_TEAM = "";
INFOPLIST_FILE = AVCamBarcode/Info.plist;
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
PRODUCT_BUNDLE_IDENTIFIER = "com.example.apple-samplecode.AVCamBarcode";
PRODUCT_NAME = "$(TARGET_NAME)";
SDKROOT = iphoneos;
SWIFT_OBJC_BRIDGING_HEADER = "";
SWIFT_VERSION = 3.0;
};
name = Release;
};
/* End XCBuildConfiguration section */
/* Begin XCConfigurationList section */
7A921C7B1CD2858B00E7B04B /* Build configuration list for PBXProject "AVCamBarcode" */ = {
isa = XCConfigurationList;
buildConfigurations = (
7A921C901CD2858B00E7B04B /* Debug */,
7A921C911CD2858B00E7B04B /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
7A921C921CD2858B00E7B04B /* Build configuration list for PBXNativeTarget "AVCamBarcode" */ = {
isa = XCConfigurationList;
buildConfigurations = (
7A921C931CD2858B00E7B04B /* Debug */,
7A921C941CD2858B00E7B04B /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
/* End XCConfigurationList section */
};
rootObject = 7A921C781CD2858B00E7B04B /* Project object */;
}

View File

@ -0,0 +1,14 @@
/*
Copyright (C) 2016 Apple Inc. All Rights Reserved.
See LICENSE.txt for this samples licensing information
Abstract:
Application delegate.
*/
import UIKit
@UIApplicationMain
class AppDelegate: UIResponder, UIApplicationDelegate {
var window: UIWindow?
}

View File

@ -0,0 +1,108 @@
{
"images" : [
{
"idiom" : "iphone",
"size" : "29x29",
"scale" : "1x"
},
{
"idiom" : "iphone",
"size" : "29x29",
"scale" : "2x"
},
{
"idiom" : "iphone",
"size" : "29x29",
"scale" : "3x"
},
{
"idiom" : "iphone",
"size" : "40x40",
"scale" : "2x"
},
{
"idiom" : "iphone",
"size" : "40x40",
"scale" : "3x"
},
{
"idiom" : "iphone",
"size" : "57x57",
"scale" : "1x"
},
{
"idiom" : "iphone",
"size" : "57x57",
"scale" : "2x"
},
{
"idiom" : "iphone",
"size" : "60x60",
"scale" : "2x"
},
{
"idiom" : "iphone",
"size" : "60x60",
"scale" : "3x"
},
{
"idiom" : "ipad",
"size" : "29x29",
"scale" : "1x"
},
{
"idiom" : "ipad",
"size" : "29x29",
"scale" : "2x"
},
{
"idiom" : "ipad",
"size" : "40x40",
"scale" : "1x"
},
{
"idiom" : "ipad",
"size" : "40x40",
"scale" : "2x"
},
{
"idiom" : "ipad",
"size" : "50x50",
"scale" : "1x"
},
{
"idiom" : "ipad",
"size" : "50x50",
"scale" : "2x"
},
{
"idiom" : "ipad",
"size" : "72x72",
"scale" : "1x"
},
{
"idiom" : "ipad",
"size" : "72x72",
"scale" : "2x"
},
{
"idiom" : "ipad",
"size" : "76x76",
"scale" : "1x"
},
{
"idiom" : "ipad",
"size" : "76x76",
"scale" : "2x"
},
{
"idiom" : "ipad",
"size" : "83.5x83.5",
"scale" : "2x"
}
],
"info" : {
"version" : 1,
"author" : "xcode"
}
}

View File

@ -0,0 +1,6 @@
{
"info" : {
"version" : 1,
"author" : "xcode"
}
}

View File

@ -0,0 +1,27 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="11019" systemVersion="15F24" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" launchScreen="YES" useTraitCollections="YES" initialViewController="01J-lp-oVM">
<dependencies>
<deployment identifier="iOS"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="11056.3"/>
</dependencies>
<scenes>
<!--View Controller-->
<scene sceneID="EHf-IW-A2E">
<objects>
<viewController id="01J-lp-oVM" sceneMemberID="viewController">
<layoutGuides>
<viewControllerLayoutGuide type="top" id="Llm-lL-Icb"/>
<viewControllerLayoutGuide type="bottom" id="xb3-aO-Qok"/>
</layoutGuides>
<view key="view" contentMode="scaleToFill" id="Ze5-6b-2t3">
<rect key="frame" x="0.0" y="0.0" width="600" height="600"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<color key="backgroundColor" white="0.0" alpha="1" colorSpace="calibratedWhite"/>
</view>
</viewController>
<placeholder placeholderIdentifier="IBFirstResponder" id="iYj-Kq-Ea1" userLabel="First Responder" sceneMemberID="firstResponder"/>
</objects>
<point key="canvasLocation" x="53" y="375"/>
</scene>
</scenes>
</document>

View File

@ -0,0 +1,185 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="11129.12" systemVersion="15F24" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" colorMatched="YES" initialViewController="pYE-xa-CpI">
<dependencies>
<deployment identifier="iOS"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="11103.9"/>
<capability name="Constraints to layout margins" minToolsVersion="6.0"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
</dependencies>
<scenes>
<!--Camera View Controller-->
<scene sceneID="hlg-yS-qYF">
<objects>
<viewController id="pYE-xa-CpI" userLabel="Camera View Controller" customClass="CameraViewController" customModule="AVCamBarcode" customModuleProvider="target" sceneMemberID="viewController">
<layoutGuides>
<viewControllerLayoutGuide type="top" id="pf6-pA-EQ2"/>
<viewControllerLayoutGuide type="bottom" id="30E-zC-PQO"/>
</layoutGuides>
<view key="view" contentMode="scaleToFill" id="GaW-eL-ee2">
<rect key="frame" x="0.0" y="0.0" width="375" height="667"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<subviews>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="guD-bB-Q2w" customClass="PreviewView" customModule="AVCamBarcode" customModuleProvider="target">
<color key="backgroundColor" red="0.0" green="0.0" blue="0.0" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
</view>
<label hidden="YES" opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="Camera Unavailable" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="nFV-EL-Pn1">
<fontDescription key="fontDescription" type="system" pointSize="24"/>
<color key="textColor" red="1" green="1" blue="0.0" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<nil key="highlightedColor"/>
</label>
<slider opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" value="1" minValue="1" maxValue="1" translatesAutoresizingMaskIntoConstraints="NO" id="6Am-YQ-PDc">
<connections>
<action selector="zoomCameraWith:" destination="pYE-xa-CpI" eventType="valueChanged" id="fRL-Lx-e7U"/>
</connections>
</slider>
<button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" buttonType="roundedRect" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="4Ea-Lg-Xeq">
<color key="backgroundColor" red="0.0" green="0.0" blue="0.0" alpha="0.29999999999999999" colorSpace="custom" customColorSpace="sRGB"/>
<fontDescription key="fontDescription" type="system" pointSize="20"/>
<state key="normal" title="Metadata"/>
<userDefinedRuntimeAttributes>
<userDefinedRuntimeAttribute type="number" keyPath="layer.cornerRadius">
<integer key="value" value="4"/>
</userDefinedRuntimeAttribute>
</userDefinedRuntimeAttributes>
<connections>
<segue destination="GLg-Kk-VL1" kind="presentation" identifier="SelectMetadataObjectTypes" id="mrU-EN-8wF"/>
</connections>
</button>
<button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" buttonType="roundedRect" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="hro-jk-MAk">
<color key="backgroundColor" red="0.0" green="0.0" blue="0.0" alpha="0.29999999999999999" colorSpace="custom" customColorSpace="sRGB"/>
<constraints>
<constraint firstAttribute="width" constant="90" id="0zu-de-BQm"/>
<constraint firstAttribute="height" constant="36" id="wvY-TG-C33"/>
</constraints>
<fontDescription key="fontDescription" type="system" pointSize="20"/>
<state key="normal" title="Presets"/>
<userDefinedRuntimeAttributes>
<userDefinedRuntimeAttribute type="number" keyPath="layer.cornerRadius">
<integer key="value" value="4"/>
</userDefinedRuntimeAttribute>
</userDefinedRuntimeAttributes>
<connections>
<segue destination="GLg-Kk-VL1" kind="presentation" identifier="SelectSessionPreset" id="lq3-bM-d5h"/>
</connections>
</button>
<button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" buttonType="roundedRect" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="LVm-ag-Lfj">
<color key="backgroundColor" red="0.0" green="0.0" blue="0.0" alpha="0.29999999999999999" colorSpace="custom" customColorSpace="sRGB"/>
<fontDescription key="fontDescription" type="system" pointSize="20"/>
<state key="normal" title="Camera"/>
<userDefinedRuntimeAttributes>
<userDefinedRuntimeAttribute type="number" keyPath="layer.cornerRadius">
<integer key="value" value="4"/>
</userDefinedRuntimeAttribute>
</userDefinedRuntimeAttributes>
<connections>
<action selector="changeCamera" destination="pYE-xa-CpI" eventType="touchUpInside" id="N71-0l-W8k"/>
</connections>
</button>
</subviews>
<color key="backgroundColor" red="1" green="1" blue="1" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<constraints>
<constraint firstAttribute="trailing" secondItem="guD-bB-Q2w" secondAttribute="trailing" id="1cL-8Q-X7G"/>
<constraint firstItem="nFV-EL-Pn1" firstAttribute="centerY" secondItem="GaW-eL-ee2" secondAttribute="centerY" id="4U2-KX-s1p"/>
<constraint firstItem="4Ea-Lg-Xeq" firstAttribute="width" secondItem="hro-jk-MAk" secondAttribute="width" id="7Qt-Vy-WcD"/>
<constraint firstItem="4Ea-Lg-Xeq" firstAttribute="height" secondItem="hro-jk-MAk" secondAttribute="height" id="83v-wj-3YX"/>
<constraint firstItem="LVm-ag-Lfj" firstAttribute="width" secondItem="hro-jk-MAk" secondAttribute="width" id="B8Y-z3-0un"/>
<constraint firstItem="6Am-YQ-PDc" firstAttribute="leading" secondItem="GaW-eL-ee2" secondAttribute="leadingMargin" id="ClA-FM-vHr"/>
<constraint firstItem="4Ea-Lg-Xeq" firstAttribute="centerY" secondItem="hro-jk-MAk" secondAttribute="centerY" id="GOM-rg-CZ5"/>
<constraint firstItem="guD-bB-Q2w" firstAttribute="leading" secondItem="GaW-eL-ee2" secondAttribute="leading" id="Mt7-Rd-2RD"/>
<constraint firstItem="LVm-ag-Lfj" firstAttribute="height" secondItem="hro-jk-MAk" secondAttribute="height" id="O6P-3v-cFP"/>
<constraint firstItem="6Am-YQ-PDc" firstAttribute="trailing" secondItem="GaW-eL-ee2" secondAttribute="trailingMargin" id="Pb7-LM-JPO"/>
<constraint firstItem="LVm-ag-Lfj" firstAttribute="leading" secondItem="hro-jk-MAk" secondAttribute="trailing" constant="20" id="VYm-7A-j8n"/>
<constraint firstItem="nFV-EL-Pn1" firstAttribute="centerX" secondItem="GaW-eL-ee2" secondAttribute="centerX" id="WN4-Uh-Ja1"/>
<constraint firstItem="LVm-ag-Lfj" firstAttribute="top" secondItem="6Am-YQ-PDc" secondAttribute="bottom" constant="8" id="aGW-ge-Xfh"/>
<constraint firstItem="30E-zC-PQO" firstAttribute="top" secondItem="guD-bB-Q2w" secondAttribute="bottom" id="bvU-g8-pba"/>
<constraint firstItem="LVm-ag-Lfj" firstAttribute="centerY" secondItem="hro-jk-MAk" secondAttribute="centerY" id="hcR-OA-jjb"/>
<constraint firstItem="hro-jk-MAk" firstAttribute="leading" secondItem="4Ea-Lg-Xeq" secondAttribute="trailing" constant="20" id="qM0-DK-HYJ"/>
<constraint firstItem="hro-jk-MAk" firstAttribute="centerX" secondItem="GaW-eL-ee2" secondAttribute="centerX" id="tL3-wg-lIX"/>
<constraint firstItem="guD-bB-Q2w" firstAttribute="top" secondItem="pf6-pA-EQ2" secondAttribute="bottom" id="xPs-b3-y3x"/>
<constraint firstItem="30E-zC-PQO" firstAttribute="top" secondItem="hro-jk-MAk" secondAttribute="bottom" constant="20" id="xdG-H2-P3i"/>
</constraints>
</view>
<nil key="simulatedStatusBarMetrics"/>
<connections>
<outlet property="cameraButton" destination="LVm-ag-Lfj" id="7Yg-nk-caj"/>
<outlet property="cameraUnavailableLabel" destination="nFV-EL-Pn1" id="xoc-xK-b7e"/>
<outlet property="metadataObjectTypesButton" destination="4Ea-Lg-Xeq" id="feH-Gs-m1J"/>
<outlet property="previewView" destination="guD-bB-Q2w" id="m5z-tC-RRN"/>
<outlet property="sessionPresetsButton" destination="hro-jk-MAk" id="i6d-EX-oQN"/>
<outlet property="zoomSlider" destination="6Am-YQ-PDc" id="VpL-Eh-Tpw"/>
</connections>
</viewController>
<placeholder placeholderIdentifier="IBFirstResponder" id="B6K-l6-dJ5" sceneMemberID="firstResponder"/>
</objects>
<point key="canvasLocation" x="305" y="389"/>
</scene>
<!--Item Selection View Controller-->
<scene sceneID="fTa-Za-MkS">
<objects>
<tableViewController id="PBC-n5-MIm" userLabel="Item Selection View Controller" customClass="ItemSelectionViewController" customModule="AVCamBarcode" customModuleProvider="target" sceneMemberID="viewController">
<tableView key="view" clipsSubviews="YES" contentMode="scaleToFill" alwaysBounceVertical="YES" dataMode="prototypes" style="grouped" separatorStyle="default" rowHeight="44" sectionHeaderHeight="18" sectionFooterHeight="18" id="N68-f6-mTy">
<rect key="frame" x="0.0" y="64" width="375" height="603"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<color key="backgroundColor" red="0.93725490199999995" green="0.93725490199999995" blue="0.95686274510000002" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<prototypes>
<tableViewCell clipsSubviews="YES" contentMode="scaleToFill" selectionStyle="default" indentationWidth="10" reuseIdentifier="Item" textLabel="R6q-rR-Tno" style="IBUITableViewCellStyleDefault" id="PJ2-9Z-PLE">
<frame key="frameInset" minY="56" width="375" height="44"/>
<autoresizingMask key="autoresizingMask"/>
<tableViewCellContentView key="contentView" opaque="NO" clipsSubviews="YES" multipleTouchEnabled="YES" contentMode="center" tableViewCell="PJ2-9Z-PLE" id="qQK-uf-VyZ">
<frame key="frameInset" width="375" height="43"/>
<autoresizingMask key="autoresizingMask"/>
<subviews>
<label opaque="NO" multipleTouchEnabled="YES" contentMode="left" text="Title" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" id="R6q-rR-Tno">
<frame key="frameInset" minX="15" width="345" height="43"/>
<autoresizingMask key="autoresizingMask" flexibleMaxX="YES" flexibleMaxY="YES"/>
<fontDescription key="fontDescription" type="system" pointSize="16"/>
<color key="textColor" red="0.0" green="0.0" blue="0.0" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<nil key="highlightedColor"/>
</label>
</subviews>
</tableViewCellContentView>
</tableViewCell>
</prototypes>
<sections/>
<connections>
<outlet property="dataSource" destination="PBC-n5-MIm" id="Npa-Tv-GhR"/>
<outlet property="delegate" destination="PBC-n5-MIm" id="0Gr-Y1-ACe"/>
</connections>
</tableView>
<navigationItem key="navigationItem" id="eh9-zC-GKS">
<barButtonItem key="rightBarButtonItem" style="done" systemItem="done" id="UQG-Fm-Ghg">
<connections>
<action selector="done" destination="PBC-n5-MIm" id="LdY-gh-NJi"/>
</connections>
</barButtonItem>
</navigationItem>
</tableViewController>
<placeholder placeholderIdentifier="IBFirstResponder" id="fmz-Lz-F62" userLabel="First Responder" sceneMemberID="firstResponder"/>
</objects>
<point key="canvasLocation" x="2062" y="389"/>
</scene>
<!--Navigation Controller-->
<scene sceneID="ZDG-M1-580">
<objects>
<navigationController automaticallyAdjustsScrollViewInsets="NO" id="GLg-Kk-VL1" sceneMemberID="viewController">
<toolbarItems/>
<simulatedStatusBarMetrics key="simulatedStatusBarMetrics" statusBarStyle="lightContent"/>
<navigationBar key="navigationBar" contentMode="scaleToFill" barStyle="black" translucent="NO" id="bLZ-a8-d7u">
<rect key="frame" x="0.0" y="0.0" width="320" height="44"/>
<autoresizingMask key="autoresizingMask"/>
</navigationBar>
<nil name="viewControllers"/>
<connections>
<segue destination="PBC-n5-MIm" kind="relationship" relationship="rootViewController" id="ACT-R5-6oB"/>
</connections>
</navigationController>
<placeholder placeholderIdentifier="IBFirstResponder" id="2L1-Rt-Rk0" userLabel="First Responder" sceneMemberID="firstResponder"/>
</objects>
<point key="canvasLocation" x="1250" y="389"/>
</scene>
</scenes>
<inferredMetricsTieBreakers>
<segue reference="mrU-EN-8wF"/>
</inferredMetricsTieBreakers>
<color key="tintColor" red="1" green="1" blue="0.0" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
</document>

View File

@ -0,0 +1,816 @@
/*
Copyright (C) 2016 Apple Inc. All Rights Reserved.
See LICENSE.txt for this samples licensing information
Abstract:
View controller for camera interface.
*/
import UIKit
import AVFoundation
class CameraViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate, ItemSelectionViewControllerDelegate {
// MARK: View Controller Life Cycle
override func viewDidLoad() {
super.viewDidLoad()
// Disable UI. The UI is enabled if and only if the session starts running.
metadataObjectTypesButton.isEnabled = false
sessionPresetsButton.isEnabled = false
cameraButton.isEnabled = false
zoomSlider.isEnabled = false
// Add the open barcode gesture recognizer to the region of interest view.
previewView.addGestureRecognizer(openBarcodeURLGestureRecognizer)
// Set up the video preview view.
previewView.session = session
/*
Check video authorization status. Video access is required and audio
access is optional. If audio access is denied, audio is not recorded
during movie recording.
*/
switch AVCaptureDevice.authorizationStatus(forMediaType: AVMediaTypeVideo) {
case .authorized:
// The user has previously granted access to the camera.
break
case .notDetermined:
/*
The user has not yet been presented with the option to grant
video access. We suspend the session queue to delay session
setup until the access request has completed.
*/
sessionQueue.suspend()
AVCaptureDevice.requestAccess(forMediaType: AVMediaTypeVideo, completionHandler: { [unowned self] granted in
if !granted {
self.setupResult = .notAuthorized
}
self.sessionQueue.resume()
})
default:
// The user has previously denied access.
setupResult = .notAuthorized
}
/*
Setup the capture session.
In general it is not safe to mutate an AVCaptureSession or any of its
inputs, outputs, or connections from multiple threads at the same time.
Why not do all of this on the main queue?
Because AVCaptureSession.startRunning() is a blocking call which can
take a long time. We dispatch session setup to the sessionQueue so
that the main queue isn't blocked, which keeps the UI responsive.
*/
sessionQueue.async { [unowned self] in
self.configureSession()
}
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
sessionQueue.async { [unowned self] in
switch self.setupResult {
case .success:
// Only setup observers and start the session running if setup succeeded.
self.addObservers()
self.session.startRunning()
self.isSessionRunning = self.session.isRunning
case .notAuthorized:
DispatchQueue.main.async { [unowned self] in
let message = NSLocalizedString("AVCamBarcode doesn't have permission to use the camera, please change privacy settings", comment: "Alert message when the user has denied access to the camera")
let alertController = UIAlertController(title: "AVCamBarcode", message: message, preferredStyle: .alert)
alertController.addAction(UIAlertAction(title: NSLocalizedString("OK", comment: "Alert OK button"), style: .cancel, handler: nil))
alertController.addAction(UIAlertAction(title: NSLocalizedString("Settings", comment: "Alert button to open Settings"), style: .`default`, handler: { action in
UIApplication.shared.open(URL(string: UIApplicationOpenSettingsURLString)!, options: [:], completionHandler: nil)
}))
self.present(alertController, animated: true, completion: nil)
}
case .configurationFailed:
DispatchQueue.main.async { [unowned self] in
let message = NSLocalizedString("Unable to capture media", comment: "Alert message when something goes wrong during capture session configuration")
let alertController = UIAlertController(title: "AVCamBarcode", message: message, preferredStyle: .alert)
alertController.addAction(UIAlertAction(title: NSLocalizedString("OK", comment: "Alert OK button"), style: .cancel, handler: nil))
self.present(alertController, animated: true, completion: nil)
}
}
}
}
override func viewWillDisappear(_ animated: Bool) {
sessionQueue.async { [unowned self] in
if self.setupResult == .success {
self.session.stopRunning()
self.isSessionRunning = self.session.isRunning
self.removeObservers()
}
}
super.viewWillDisappear(animated)
}
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
if segue.identifier == "SelectMetadataObjectTypes" {
let navigationController = segue.destination as! UINavigationController
let itemSelectionViewController = navigationController.viewControllers[0] as! ItemSelectionViewController
itemSelectionViewController.title = NSLocalizedString("Metadata Object Types", comment: "The title when selecting metadata object types.")
itemSelectionViewController.delegate = self
itemSelectionViewController.identifier = metadataObjectTypeItemSelectionIdentifier
itemSelectionViewController.allItems = metadataOutput.availableMetadataObjectTypes as! [String]
itemSelectionViewController.selectedItems = metadataOutput.metadataObjectTypes as! [String]
itemSelectionViewController.allowsMultipleSelection = true
}
else if segue.identifier == "SelectSessionPreset" {
let navigationController = segue.destination as! UINavigationController
let itemSelectionViewController = navigationController.viewControllers[0] as! ItemSelectionViewController
itemSelectionViewController.title = NSLocalizedString("Session Presets", comment: "The title when selecting a session preset.")
itemSelectionViewController.delegate = self
itemSelectionViewController.identifier = sessionPresetItemSelectionIdentifier
itemSelectionViewController.allItems = availableSessionPresets()
itemSelectionViewController.selectedItems = [session.sessionPreset]
itemSelectionViewController.allowsMultipleSelection = false
}
}
override var shouldAutorotate: Bool {
// Do not allow rotation if the region of interest is being resized.
return !previewView.isResizingRegionOfInterest
}
override func viewWillTransition(to size: CGSize, with coordinator: UIViewControllerTransitionCoordinator) {
super.viewWillTransition(to: size, with: coordinator)
if let videoPreviewLayerConnection = previewView.videoPreviewLayer.connection {
let deviceOrientation = UIDevice.current.orientation
guard let newVideoOrientation = deviceOrientation.videoOrientation, deviceOrientation.isPortrait || deviceOrientation.isLandscape else {
return
}
let oldSize = view.frame.size
let oldVideoOrientation = videoPreviewLayerConnection.videoOrientation
videoPreviewLayerConnection.videoOrientation = newVideoOrientation
/*
When we transition to the new size, we need to adjust the region
of interest's origin and size so that it stays anchored relative
to the camera.
*/
coordinator.animate(alongsideTransition: { [unowned self] context in
let oldRegionOfInterest = self.previewView.regionOfInterest
var newRegionOfInterest = CGRect()
if oldVideoOrientation == .landscapeRight && newVideoOrientation == .landscapeLeft {
newRegionOfInterest.origin.x = oldSize.width - oldRegionOfInterest.origin.x - oldRegionOfInterest.size.width
newRegionOfInterest.origin.y = oldRegionOfInterest.origin.y
newRegionOfInterest.size.width = oldRegionOfInterest.size.width
newRegionOfInterest.size.height = oldRegionOfInterest.size.height
}
else if oldVideoOrientation == .landscapeRight && newVideoOrientation == .portrait {
newRegionOfInterest.origin.x = size.width - oldRegionOfInterest.origin.y - oldRegionOfInterest.size.height
newRegionOfInterest.origin.y = oldRegionOfInterest.origin.x
newRegionOfInterest.size.width = oldRegionOfInterest.size.height
newRegionOfInterest.size.height = oldRegionOfInterest.size.width
}
else if oldVideoOrientation == .landscapeLeft && newVideoOrientation == .landscapeRight {
newRegionOfInterest.origin.x = oldSize.width - oldRegionOfInterest.origin.x - oldRegionOfInterest.size.width
newRegionOfInterest.origin.y = oldRegionOfInterest.origin.y
newRegionOfInterest.size.width = oldRegionOfInterest.size.width
newRegionOfInterest.size.height = oldRegionOfInterest.size.height
}
else if oldVideoOrientation == .landscapeLeft && newVideoOrientation == .portrait {
newRegionOfInterest.origin.x = oldRegionOfInterest.origin.y
newRegionOfInterest.origin.y = oldSize.width - oldRegionOfInterest.origin.x - oldRegionOfInterest.size.width
newRegionOfInterest.size.width = oldRegionOfInterest.size.height
newRegionOfInterest.size.height = oldRegionOfInterest.size.width
}
else if oldVideoOrientation == .portrait && newVideoOrientation == .landscapeRight {
newRegionOfInterest.origin.x = oldRegionOfInterest.origin.y
newRegionOfInterest.origin.y = size.height - oldRegionOfInterest.origin.x - oldRegionOfInterest.size.width
newRegionOfInterest.size.width = oldRegionOfInterest.size.height
newRegionOfInterest.size.height = oldRegionOfInterest.size.width
}
else if oldVideoOrientation == .portrait && newVideoOrientation == .landscapeLeft {
newRegionOfInterest.origin.x = oldSize.height - oldRegionOfInterest.origin.y - oldRegionOfInterest.size.height
newRegionOfInterest.origin.y = oldRegionOfInterest.origin.x
newRegionOfInterest.size.width = oldRegionOfInterest.size.height
newRegionOfInterest.size.height = oldRegionOfInterest.size.width
}
self.previewView.setRegionOfInterestWithProposedRegionOfInterest(newRegionOfInterest)
},
completion: { [unowned self] context in
self.sessionQueue.async {
self.metadataOutput.rectOfInterest = self.previewView.videoPreviewLayer.metadataOutputRectOfInterest(for: self.previewView.regionOfInterest)
}
// Remove the old metadata object overlays.
self.removeMetadataObjectOverlayLayers()
}
)
}
}
// MARK: Session Management
private enum SessionSetupResult {
case success
case notAuthorized
case configurationFailed
}
private let session = AVCaptureSession()
private var isSessionRunning = false
private let sessionQueue = DispatchQueue(label: "session queue", attributes: [], target: nil) // Communicate with the session and other session objects on this queue.
private var setupResult: SessionSetupResult = .success
var videoDeviceInput: AVCaptureDeviceInput!
@IBOutlet private var previewView: PreviewView!
// Call this on the session queue.
private func configureSession() {
if self.setupResult != .success {
return
}
session.beginConfiguration()
// Add video input.
do {
var defaultVideoDevice: AVCaptureDevice?
// Choose the back dual camera if available, otherwise default to a wide angle camera.
if let dualCameraDevice = AVCaptureDevice.defaultDevice(withDeviceType: .builtInDuoCamera, mediaType: AVMediaTypeVideo, position: .back) {
defaultVideoDevice = dualCameraDevice
}
else if let backCameraDevice = AVCaptureDevice.defaultDevice(withDeviceType: .builtInWideAngleCamera, mediaType: AVMediaTypeVideo, position: .back) {
// If the back dual camera is not available, default to the back wide angle camera.
defaultVideoDevice = backCameraDevice
}
else if let frontCameraDevice = AVCaptureDevice.defaultDevice(withDeviceType: .builtInWideAngleCamera, mediaType: AVMediaTypeVideo, position: .front) {
// In some cases where users break their phones, the back wide angle camera is not available. In this case, we should default to the front wide angle camera.
defaultVideoDevice = frontCameraDevice
}
let videoDeviceInput = try AVCaptureDeviceInput(device: defaultVideoDevice)
if session.canAddInput(videoDeviceInput) {
session.addInput(videoDeviceInput)
self.videoDeviceInput = videoDeviceInput
DispatchQueue.main.async {
/*
Why are we dispatching this to the main queue?
Because AVCaptureVideoPreviewLayer is the backing layer for PreviewView and UIView
can only be manipulated on the main thread.
Note: As an exception to the above rule, it is not necessary to serialize video orientation changes
on the AVCaptureVideoPreviewLayers connection with other session manipulation.
Use the status bar orientation as the initial video orientation. Subsequent orientation changes are
handled by CameraViewController.viewWillTransition(to:with:).
*/
let statusBarOrientation = UIApplication.shared.statusBarOrientation
var initialVideoOrientation: AVCaptureVideoOrientation = .portrait
if statusBarOrientation != .unknown {
if let videoOrientation = statusBarOrientation.videoOrientation {
initialVideoOrientation = videoOrientation
}
}
self.previewView.videoPreviewLayer.connection.videoOrientation = initialVideoOrientation
}
}
else {
print("Could not add video device input to the session")
setupResult = .configurationFailed
session.commitConfiguration()
return
}
}
catch {
print("Could not create video device input: \(error)")
setupResult = .configurationFailed
session.commitConfiguration()
return
}
// Add metadata output.
if session.canAddOutput(metadataOutput) {
session.addOutput(metadataOutput)
// Set this view controller as the delegate for metadata objects.
metadataOutput.setMetadataObjectsDelegate(self, queue: metadataObjectsQueue)
metadataOutput.metadataObjectTypes = metadataOutput.availableMetadataObjectTypes // Use all metadata object types by default.
metadataOutput.rectOfInterest = CGRect.zero
}
else {
print("Could not add metadata output to the session")
setupResult = .configurationFailed
session.commitConfiguration()
return
}
session.commitConfiguration()
}
private let metadataOutput = AVCaptureMetadataOutput()
private let metadataObjectsQueue = DispatchQueue(label: "metadata objects queue", attributes: [], target: nil)
@IBOutlet private var sessionPresetsButton: UIButton!
private func availableSessionPresets() -> [String] {
let allSessionPresets = [AVCaptureSessionPresetPhoto,
AVCaptureSessionPresetLow,
AVCaptureSessionPresetMedium,
AVCaptureSessionPresetHigh,
AVCaptureSessionPreset352x288,
AVCaptureSessionPreset640x480,
AVCaptureSessionPreset1280x720,
AVCaptureSessionPresetiFrame960x540,
AVCaptureSessionPresetiFrame1280x720,
AVCaptureSessionPreset1920x1080,
AVCaptureSessionPreset3840x2160]
var availableSessionPresets = [String]()
for sessionPreset in allSessionPresets {
if session.canSetSessionPreset(sessionPreset) {
availableSessionPresets.append(sessionPreset)
}
}
return availableSessionPresets
}
// MARK: Device Configuration
@IBOutlet private var cameraButton: UIButton!
@IBOutlet private var cameraUnavailableLabel: UILabel!
private let videoDeviceDiscoverySession = AVCaptureDeviceDiscoverySession(deviceTypes: [.builtInWideAngleCamera, .builtInDuoCamera], mediaType: AVMediaTypeVideo, position: .unspecified)!
@IBAction private func changeCamera() {
metadataObjectTypesButton.isEnabled = false
sessionPresetsButton.isEnabled = false
cameraButton.isEnabled = false
zoomSlider.isEnabled = false
// Remove the metadata overlay layers, if any.
removeMetadataObjectOverlayLayers()
DispatchQueue.main.async { [unowned self] in
let currentVideoDevice = self.videoDeviceInput.device
let currentPosition = currentVideoDevice!.position
let preferredPosition: AVCaptureDevicePosition
let preferredDeviceType: AVCaptureDeviceType
switch currentPosition {
case .unspecified, .front:
preferredPosition = .back
preferredDeviceType = .builtInDuoCamera
case .back:
preferredPosition = .front
preferredDeviceType = .builtInWideAngleCamera
}
let devices = self.videoDeviceDiscoverySession.devices!
var newVideoDevice: AVCaptureDevice? = nil
// First, look for a device with both the preferred position and device type. Otherwise, look for a device with only the preferred position.
if let device = devices.filter({ $0.position == preferredPosition && $0.deviceType == preferredDeviceType }).first {
newVideoDevice = device
}
else if let device = devices.filter({ $0.position == preferredPosition }).first {
newVideoDevice = device
}
if let videoDevice = newVideoDevice {
do {
let videoDeviceInput = try AVCaptureDeviceInput.init(device: videoDevice)
self.session.beginConfiguration()
// Remove the existing device input first, since using the front and back camera simultaneously is not supported.
self.session.removeInput(self.videoDeviceInput)
/*
When changing devices, a session preset that may be supported
on one device may not be supported by another. To allow the
user to successfully switch devices, we must save the previous
session preset, set the default session preset (High), and
attempt to restore it after the new video device has been
added. For example, the 4K session preset is only supported
by the back device on the iPhone 6s and iPhone 6s Plus. As a
result, the session will not let us add a video device that
does not support the current session preset.
*/
let previousSessionPreset = self.session.sessionPreset
self.session.sessionPreset = AVCaptureSessionPresetHigh
if self.session.canAddInput(videoDeviceInput) {
self.session.addInput(videoDeviceInput)
self.videoDeviceInput = videoDeviceInput
}
else {
self.session.addInput(self.videoDeviceInput)
}
// Restore the previous session preset if we can.
if self.session.canSetSessionPreset(previousSessionPreset) {
self.session.sessionPreset = previousSessionPreset
}
self.session.commitConfiguration()
}
catch {
print("Error occured while creating video device input: \(error)")
}
}
DispatchQueue.main.async { [unowned self] in
self.metadataObjectTypesButton.isEnabled = true
self.sessionPresetsButton.isEnabled = true
self.cameraButton.isEnabled = self.videoDeviceDiscoverySession.uniqueDevicePositionsCount() > 1
self.zoomSlider.isEnabled = true
self.zoomSlider.maximumValue = Float(min(self.videoDeviceInput.device.activeFormat.videoMaxZoomFactor, CGFloat(8.0)))
self.zoomSlider.value = Float(self.videoDeviceInput.device.videoZoomFactor)
}
}
}
@IBOutlet private var zoomSlider: UISlider!
@IBAction private func zoomCamera(with zoomSlider: UISlider) {
do {
try videoDeviceInput.device.lockForConfiguration()
videoDeviceInput.device.videoZoomFactor = CGFloat(zoomSlider.value)
videoDeviceInput.device.unlockForConfiguration()
}
catch {
print("Could not lock for configuration: \(error)")
}
}
// MARK: KVO and Notifications
private var sessionRunningObserveContext = 0
private var previewViewRegionOfInterestObserveContext = 0
private func addObservers() {
session.addObserver(self, forKeyPath: "running", options: .new, context: &sessionRunningObserveContext)
/*
Observe the previewView's regionOfInterest to update the AVCaptureMetadataOutput's
rectOfInterest when the user finishes resizing the region of interest.
*/
previewView.addObserver(self, forKeyPath: "regionOfInterest", options: .new, context: &previewViewRegionOfInterestObserveContext)
NotificationCenter.default.addObserver(self, selector: #selector(sessionRuntimeError), name: Notification.Name("AVCaptureSessionRuntimeErrorNotification"), object: session)
/*
A session can only run when the app is full screen. It will be interrupted
in a multi-app layout, introduced in iOS 9, see also the documentation of
AVCaptureSessionInterruptionReason. Add observers to handle these session
interruptions and show a preview is paused message. See the documentation
of AVCaptureSessionWasInterruptedNotification for other interruption reasons.
*/
NotificationCenter.default.addObserver(self, selector: #selector(sessionWasInterrupted), name: Notification.Name("AVCaptureSessionWasInterruptedNotification"), object: session)
NotificationCenter.default.addObserver(self, selector: #selector(sessionInterruptionEnded), name: Notification.Name("AVCaptureSessionInterruptionEndedNotification"), object: session)
}
private func removeObservers() {
NotificationCenter.default.removeObserver(self)
session.removeObserver(self, forKeyPath: "running", context: &sessionRunningObserveContext)
previewView.removeObserver(self, forKeyPath: "regionOfInterest", context: &previewViewRegionOfInterestObserveContext)
}
override func observeValue(forKeyPath keyPath: String?, of object: Any?, change: [NSKeyValueChangeKey : Any]?, context: UnsafeMutableRawPointer?) {
let newValue = change?[.newKey] as AnyObject?
if context == &sessionRunningObserveContext {
guard let isSessionRunning = newValue?.boolValue else { return }
DispatchQueue.main.async { [unowned self] in
self.metadataObjectTypesButton.isEnabled = isSessionRunning
self.sessionPresetsButton.isEnabled = isSessionRunning
self.cameraButton.isEnabled = isSessionRunning && self.videoDeviceDiscoverySession.uniqueDevicePositionsCount() > 1
self.zoomSlider.isEnabled = isSessionRunning
self.zoomSlider.maximumValue = Float(min(self.videoDeviceInput.device.activeFormat.videoMaxZoomFactor, CGFloat(8.0)))
self.zoomSlider.value = Float(self.videoDeviceInput.device.videoZoomFactor)
/*
After the session stop running, remove the metadata object overlays,
if any, so that if the view appears again, the previously displayed
metadata object overlays are removed.
*/
if !isSessionRunning {
self.removeMetadataObjectOverlayLayers()
}
}
}
else if context == &previewViewRegionOfInterestObserveContext {
guard let regionOfInterest = newValue?.cgRectValue else { return }
// Update the AVCaptureMetadataOutput with the new region of interest.
sessionQueue.async {
// Translate the preview view's region of interest to the metadata output's coordinate system.
self.metadataOutput.rectOfInterest = self.previewView.videoPreviewLayer.metadataOutputRectOfInterest(for: regionOfInterest)
// Ensure we are not drawing old metadata object overlays.
DispatchQueue.main.async { [unowned self] in
self.removeMetadataObjectOverlayLayers()
}
}
}
else {
super.observeValue(forKeyPath: keyPath, of: object, change: change, context: context)
}
}
func sessionRuntimeError(notification: NSNotification) {
guard let errorValue = notification.userInfo?[AVCaptureSessionErrorKey] as? NSError else { return }
let error = AVError(_nsError: errorValue)
print("Capture session runtime error: \(error)")
/*
Automatically try to restart the session running if media services were
reset and the last start running succeeded. Otherwise, enable the user
to try to resume the session running.
*/
if error.code == .mediaServicesWereReset {
sessionQueue.async { [unowned self] in
if self.isSessionRunning {
self.session.startRunning()
self.isSessionRunning = self.session.isRunning
}
}
}
}
func sessionWasInterrupted(notification: NSNotification) {
/*
In some scenarios we want to enable the user to resume the session running.
For example, if music playback is initiated via control center while
using AVCamBarcode, then the user can let AVCamBarcode resume
the session running, which will stop music playback. Note that stopping
music playback in control center will not automatically resume the session
running. Also note that it is not always possible to resume, see `resumeInterruptedSession(_:)`.
*/
if let userInfoValue = notification.userInfo?[AVCaptureSessionInterruptionReasonKey] as AnyObject?, let reasonIntegerValue = userInfoValue.integerValue, let reason = AVCaptureSessionInterruptionReason(rawValue: reasonIntegerValue) {
print("Capture session was interrupted with reason \(reason)")
if reason == AVCaptureSessionInterruptionReason.videoDeviceNotAvailableWithMultipleForegroundApps {
// Simply fade-in a label to inform the user that the camera is unavailable.
self.cameraUnavailableLabel.isHidden = false
self.cameraUnavailableLabel.alpha = 0
UIView.animate(withDuration: 0.25) {
self.cameraUnavailableLabel.alpha = 1
}
}
}
}
func sessionInterruptionEnded(notification: NSNotification) {
print("Capture session interruption ended")
if cameraUnavailableLabel.isHidden {
UIView.animate(withDuration: 0.25,
animations: { [unowned self] in
self.cameraUnavailableLabel.alpha = 0
}, completion: { [unowned self] finished in
self.cameraUnavailableLabel.isHidden = true
}
)
}
}
// MARK: Drawing Metadata Object Overlay Layers
@IBOutlet private var metadataObjectTypesButton: UIButton!
private class MetadataObjectLayer: CAShapeLayer {
var metadataObject: AVMetadataObject?
}
/**
A dispatch semaphore is used for drawing metadata object overlays so that
only one group of metadata object overlays is drawn at a time.
*/
private let metadataObjectsOverlayLayersDrawingSemaphore = DispatchSemaphore(value: 1)
private var metadataObjectOverlayLayers = [MetadataObjectLayer]()
private func createMetadataObjectOverlayWithMetadataObject(_ metadataObject: AVMetadataObject) -> MetadataObjectLayer {
// Transform the metadata object so the bounds are updated to reflect those of the video preview layer.
let transformedMetadataObject = previewView.videoPreviewLayer.transformedMetadataObject(for: metadataObject)
// Create the initial metadata object overlay layer that can be used for either machine readable codes or faces.
let metadataObjectOverlayLayer = MetadataObjectLayer()
metadataObjectOverlayLayer.metadataObject = transformedMetadataObject
metadataObjectOverlayLayer.lineJoin = kCALineJoinRound
metadataObjectOverlayLayer.lineWidth = 7.0
metadataObjectOverlayLayer.strokeColor = view.tintColor.withAlphaComponent(0.7).cgColor
metadataObjectOverlayLayer.fillColor = view.tintColor.withAlphaComponent(0.3).cgColor
if transformedMetadataObject is AVMetadataMachineReadableCodeObject {
let barcodeMetadataObject = transformedMetadataObject as! AVMetadataMachineReadableCodeObject
let barcodeOverlayPath = barcodeOverlayPathWithCorners(barcodeMetadataObject.corners as! [CFDictionary])
metadataObjectOverlayLayer.path = barcodeOverlayPath
// If the metadata object has a string value, display it.
if barcodeMetadataObject.stringValue.characters.count > 0 {
let barcodeOverlayBoundingBox = barcodeOverlayPath.boundingBox
let textLayer = CATextLayer()
textLayer.alignmentMode = kCAAlignmentCenter
textLayer.bounds = CGRect(x: 0.0, y: 0.0, width: barcodeOverlayBoundingBox.size.width, height: barcodeOverlayBoundingBox.size.height)
textLayer.contentsScale = UIScreen.main.scale
textLayer.font = UIFont.boldSystemFont(ofSize: 19).fontName as CFString
textLayer.position = CGPoint(x: barcodeOverlayBoundingBox.midX, y: barcodeOverlayBoundingBox.midY)
textLayer.string = NSAttributedString(string: barcodeMetadataObject.stringValue, attributes: [
NSFontAttributeName : UIFont.boldSystemFont(ofSize: 19),
kCTForegroundColorAttributeName as String : UIColor.white.cgColor,
kCTStrokeWidthAttributeName as String : -5.0,
kCTStrokeColorAttributeName as String : UIColor.black.cgColor])
textLayer.isWrapped = true
// Invert the effect of transform of the video preview so the text is orientated with the interface orientation.
textLayer.transform = CATransform3DInvert(CATransform3DMakeAffineTransform(previewView.transform))
metadataObjectOverlayLayer.addSublayer(textLayer)
}
}
else if transformedMetadataObject is AVMetadataFaceObject {
metadataObjectOverlayLayer.path = CGPath(rect: transformedMetadataObject!.bounds, transform: nil)
}
return metadataObjectOverlayLayer
}
private func barcodeOverlayPathWithCorners(_ corners: [CFDictionary]) -> CGMutablePath {
let path = CGMutablePath()
if !corners.isEmpty {
guard let corner = CGPoint(dictionaryRepresentation: corners[0]) else { return path }
path.move(to: corner, transform: .identity)
for cornerDictionary in corners {
guard let corner = CGPoint(dictionaryRepresentation: cornerDictionary) else { return path }
path.addLine(to: corner)
}
path.closeSubpath()
}
return path
}
private var removeMetadataObjectOverlayLayersTimer: Timer?
@objc private func removeMetadataObjectOverlayLayers() {
for sublayer in metadataObjectOverlayLayers {
sublayer.removeFromSuperlayer()
}
metadataObjectOverlayLayers = []
removeMetadataObjectOverlayLayersTimer?.invalidate()
removeMetadataObjectOverlayLayersTimer = nil
}
private func addMetadataObjectOverlayLayersToVideoPreviewView(_ metadataObjectOverlayLayers: [MetadataObjectLayer]) {
// Add the metadata object overlays as sublayers of the video preview layer. We disable actions to allow for fast drawing.
CATransaction.begin()
CATransaction.setDisableActions(true)
for metadataObjectOverlayLayer in metadataObjectOverlayLayers {
previewView.videoPreviewLayer.addSublayer(metadataObjectOverlayLayer)
}
CATransaction.commit()
// Save the new metadata object overlays.
self.metadataObjectOverlayLayers = metadataObjectOverlayLayers
// Create a timer to destroy the metadata object overlays.
removeMetadataObjectOverlayLayersTimer = Timer.scheduledTimer(timeInterval: 1, target: self, selector: #selector(removeMetadataObjectOverlayLayers), userInfo: nil, repeats: false)
}
private lazy var openBarcodeURLGestureRecognizer: UITapGestureRecognizer = {
UITapGestureRecognizer(target: self, action: #selector(CameraViewController.openBarcodeURL(with:)))
}()
@objc private func openBarcodeURL(with openBarcodeURLGestureRecognizer: UITapGestureRecognizer) {
for metadataObjectOverlayLayer in metadataObjectOverlayLayers {
if metadataObjectOverlayLayer.path!.contains(openBarcodeURLGestureRecognizer.location(in: previewView), using: .winding, transform: .identity) {
if let barcodeMetadataObject = metadataObjectOverlayLayer.metadataObject as? AVMetadataMachineReadableCodeObject {
if barcodeMetadataObject.stringValue != nil {
if let url = URL(string: barcodeMetadataObject.stringValue), UIApplication.shared.canOpenURL(url) {
UIApplication.shared.open(url, options: [:], completionHandler: nil)
}
}
}
}
}
}
// MARK: AVCaptureMetadataOutputObjectsDelegate
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputMetadataObjects metadataObjects: [Any]!, from connection: AVCaptureConnection!) {
// wait() is used to drop new notifications if old ones are still processing, to avoid queueing up a bunch of stale data.
if metadataObjectsOverlayLayersDrawingSemaphore.wait(timeout: DispatchTime.now()) == .success {
DispatchQueue.main.async { [unowned self] in
self.removeMetadataObjectOverlayLayers()
var metadataObjectOverlayLayers = [MetadataObjectLayer]()
for metadataObject in metadataObjects as! [AVMetadataObject] {
let metadataObjectOverlayLayer = self.createMetadataObjectOverlayWithMetadataObject(metadataObject)
metadataObjectOverlayLayers.append(metadataObjectOverlayLayer)
}
self.addMetadataObjectOverlayLayersToVideoPreviewView(metadataObjectOverlayLayers)
self.metadataObjectsOverlayLayersDrawingSemaphore.signal()
}
}
}
// MARK: ItemSelectionViewControllerDelegate
let metadataObjectTypeItemSelectionIdentifier = "MetadataObjectTypes"
let sessionPresetItemSelectionIdentifier = "SessionPreset"
func itemSelectionViewController(_ itemSelectionViewController: ItemSelectionViewController, didFinishSelectingItems selectedItems: [String]) {
let identifier = itemSelectionViewController.identifier
if identifier == metadataObjectTypeItemSelectionIdentifier {
sessionQueue.async { [unowned self] in
self.metadataOutput.metadataObjectTypes = selectedItems
}
}
else if identifier == sessionPresetItemSelectionIdentifier {
sessionQueue.async { [unowned self] in
self.session.sessionPreset = selectedItems.first
}
}
}
}
extension AVCaptureDeviceDiscoverySession
{
func uniqueDevicePositionsCount() -> Int {
var uniqueDevicePositions = [AVCaptureDevicePosition]()
for device in devices {
if !uniqueDevicePositions.contains(device.position) {
uniqueDevicePositions.append(device.position)
}
}
return uniqueDevicePositions.count
}
}
extension UIDeviceOrientation {
var videoOrientation: AVCaptureVideoOrientation? {
switch self {
case .portrait: return .portrait
case .portraitUpsideDown: return .portraitUpsideDown
case .landscapeLeft: return .landscapeRight
case .landscapeRight: return .landscapeLeft
default: return nil
}
}
}
extension UIInterfaceOrientation {
var videoOrientation: AVCaptureVideoOrientation? {
switch self {
case .portrait: return .portrait
case .portraitUpsideDown: return .portraitUpsideDown
case .landscapeLeft: return .landscapeLeft
case .landscapeRight: return .landscapeRight
default: return nil
}
}
}

View File

@ -0,0 +1,55 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleDevelopmentRegion</key>
<string>en</string>
<key>CFBundleExecutable</key>
<string>$(EXECUTABLE_NAME)</string>
<key>CFBundleIdentifier</key>
<string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>$(PRODUCT_NAME)</string>
<key>CFBundlePackageType</key>
<string>APPL</string>
<key>CFBundleShortVersionString</key>
<string>1.0</string>
<key>CFBundleSignature</key>
<string>????</string>
<key>CFBundleVersion</key>
<string>1</string>
<key>LSRequiresIPhoneOS</key>
<true/>
<key>NSCameraUsageDescription</key>
<string>to scan barcodes and faces</string>
<key>UILaunchStoryboardName</key>
<string>LaunchScreen</string>
<key>UIMainStoryboardFile</key>
<string>Main</string>
<key>UIRequiredDeviceCapabilities</key>
<array>
<string>armv7</string>
</array>
<key>UIRequiresFullScreen</key>
<true/>
<key>UIStatusBarHidden</key>
<true/>
<key>UISupportedInterfaceOrientations</key>
<array>
<string>UIInterfaceOrientationPortrait</string>
<string>UIInterfaceOrientationLandscapeLeft</string>
<string>UIInterfaceOrientationLandscapeRight</string>
</array>
<key>UISupportedInterfaceOrientations~ipad</key>
<array>
<string>UIInterfaceOrientationPortrait</string>
<string>UIInterfaceOrientationPortraitUpsideDown</string>
<string>UIInterfaceOrientationLandscapeLeft</string>
<string>UIInterfaceOrientationLandscapeRight</string>
</array>
<key>UIViewControllerBasedStatusBarAppearance</key>
<false/>
</dict>
</plist>

View File

@ -0,0 +1,89 @@
/*
Copyright (C) 2016 Apple Inc. All Rights Reserved.
See LICENSE.txt for this samples licensing information
Abstract:
View controller for selecting items.
*/
import UIKit
protocol ItemSelectionViewControllerDelegate: class {
func itemSelectionViewController(_ itemSelectionViewController: ItemSelectionViewController, didFinishSelectingItems selectedItems: [String])
}
class ItemSelectionViewController: UITableViewController {
weak var delegate: ItemSelectionViewControllerDelegate?
var identifier = ""
var allItems = [String]()
var selectedItems = [String]()
var allowsMultipleSelection = false
@IBAction private func done() {
// Notify the delegate that selecting items is finished.
delegate?.itemSelectionViewController(self, didFinishSelectingItems: selectedItems)
// Dismiss the view controller.
dismiss(animated: true, completion: nil)
}
// MARK: UITableViewDataSource
override func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
let item = allItems[indexPath.row]
let cell = tableView.dequeueReusableCell(withIdentifier: "Item", for: indexPath)
cell.tintColor = UIColor.black
cell.textLabel?.text = item
if selectedItems.contains(item) {
cell.accessoryType = .checkmark
}
else {
cell.accessoryType = .none
}
return cell
}
override func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
return allItems.count
}
// MARK: - UITableViewDelegate
override func tableView(_ tableView: UITableView, didSelectRowAt indexPath: IndexPath) {
if allowsMultipleSelection {
let item = allItems[indexPath.row]
if selectedItems.contains(item) {
selectedItems = selectedItems.filter({ $0 != item })
}
else {
selectedItems.append(item)
}
tableView.deselectRow(at: indexPath, animated: true)
tableView.reloadRows(at: [indexPath], with: .automatic)
}
else {
let indexPathsToReload: [IndexPath]
if selectedItems.count > 0 {
indexPathsToReload = [indexPath, IndexPath(row: allItems.index(of: selectedItems[0])!, section: 0)]
}
else {
indexPathsToReload = [indexPath]
}
selectedItems = [allItems[indexPath.row]]
// Deselect the selected row & reload the table view cells for the old and new items to swap checkmarks.
tableView.deselectRow(at: indexPath, animated: true)
tableView.reloadRows(at: indexPathsToReload, with: .automatic)
}
}
}

View File

@ -0,0 +1,449 @@
/*
Copyright (C) 2016 Apple Inc. All Rights Reserved.
See LICENSE.txt for this samples licensing information
Abstract:
Application preview view.
*/
import UIKit
import AVFoundation
class PreviewView: UIView, UIGestureRecognizerDelegate {
// MARK: Types
private enum ControlCorner {
case none
case topLeft
case topRight
case bottomLeft
case bottomRight
}
// MARK: Initialization
override init(frame: CGRect) {
super.init(frame: frame)
commonInit()
}
required init?(coder aDecoder: NSCoder) {
super.init(coder: aDecoder)
commonInit()
}
private func commonInit() {
maskLayer.fillRule = kCAFillRuleEvenOdd
maskLayer.fillColor = UIColor.black.cgColor
maskLayer.opacity = 0.6
layer.addSublayer(maskLayer)
regionOfInterestOutline.path = UIBezierPath(rect: regionOfInterest).cgPath
regionOfInterestOutline.fillColor = UIColor.clear.cgColor
regionOfInterestOutline.strokeColor = UIColor.yellow.cgColor
layer.addSublayer(regionOfInterestOutline)
topLeftControl.path = UIBezierPath(ovalIn: CGRect(x: 0, y: 0, width: regionOfInterestControlDiameter, height: regionOfInterestControlDiameter)).cgPath
topLeftControl.fillColor = UIColor.white.cgColor
layer.addSublayer(topLeftControl)
topRightControl.path = UIBezierPath(ovalIn: CGRect(x: 0, y: 0, width: regionOfInterestControlDiameter, height: regionOfInterestControlDiameter)).cgPath
topRightControl.fillColor = UIColor.white.cgColor
layer.addSublayer(topRightControl)
bottomLeftControl.path = UIBezierPath(ovalIn: CGRect(x: 0, y: 0, width: regionOfInterestControlDiameter, height: regionOfInterestControlDiameter)).cgPath
bottomLeftControl.fillColor = UIColor.white.cgColor
layer.addSublayer(bottomLeftControl)
bottomRightControl.path = UIBezierPath(ovalIn: CGRect(x: 0, y: 0, width: regionOfInterestControlDiameter, height: regionOfInterestControlDiameter)).cgPath
bottomRightControl.fillColor = UIColor.white.cgColor
layer.addSublayer(bottomRightControl)
/*
Add the region of interest gesture recognizer to the region of interest
view so that the region of interest can be resized and moved. If you
would like to have a fixed region of interest that cannot be resized
or moved, do not add the following gesture recognizer. You will simply
need to set the region of interest once in
`observeValue(forKeyPath:, of:, change:, context:)`.
*/
resizeRegionOfInterestGestureRecognizer.delegate = self
addGestureRecognizer(resizeRegionOfInterestGestureRecognizer)
}
deinit {
session?.removeObserver(self, forKeyPath: "running", context: &sessionRunningObserveContext)
}
// MARK: AV capture properties
var videoPreviewLayer: AVCaptureVideoPreviewLayer {
return layer as! AVCaptureVideoPreviewLayer
}
var session: AVCaptureSession? {
get {
return videoPreviewLayer.session
}
set{
if let newValue = newValue {
newValue.addObserver(self, forKeyPath: "running", options: .new, context: &sessionRunningObserveContext)
}
else {
session?.removeObserver(self, forKeyPath: "running", context: &sessionRunningObserveContext)
}
videoPreviewLayer.session = newValue
}
}
// MARK: Region of Interest
private let regionOfInterestCornerTouchThreshold: CGFloat = 50
/**
The minimum region of interest's size cannot be smaller than the corner
touch threshold as to avoid control selection conflicts when a user tries
to resize the region of interest.
*/
private var minimumRegionOfInterestSize: CGFloat {
return regionOfInterestCornerTouchThreshold
}
private let regionOfInterestControlDiameter: CGFloat = 12.0
private var regionOfInterestControlRadius: CGFloat {
return regionOfInterestControlDiameter / 2.0
}
private let maskLayer = CAShapeLayer()
private let regionOfInterestOutline = CAShapeLayer()
/**
Saves a reference to the control corner that the user is using to resize
the region of interest in `resizeRegionOfInterestWithGestureRecognizer()`.
*/
private var currentControlCorner: ControlCorner = .none
/// White dot on the top left of the region of interest.
private let topLeftControl = CAShapeLayer()
/// White dot on the top right of the region of interest.
private let topRightControl = CAShapeLayer()
/// White dot on the bottom left of the region of interest.
private let bottomLeftControl = CAShapeLayer()
/// White dot on the bottom right of the region of interest.
private let bottomRightControl = CAShapeLayer()
/**
This property is set only in `setRegionOfInterestWithProposedRegionOfInterest()`.
When a user is resizing the region of interest in `resizeRegionOfInterestWithGestureRecognizer()`,
the KVO notification will be triggered when the resizing is finished.
*/
private(set) var regionOfInterest = CGRect.null
/**
Updates the region of interest with a proposed region of interest ensuring
the new region of interest is within the bounds of the video preview. When
a new region of interest is set, the region of interest is redrawn.
*/
func setRegionOfInterestWithProposedRegionOfInterest(_ proposedRegionOfInterest: CGRect)
{
// We standardize to ensure we have positive widths and heights with an origin at the top left.
let videoPreviewRect = videoPreviewLayer.rectForMetadataOutputRect(ofInterest: CGRect(x: 0, y: 0, width: 1, height: 1)).standardized
/*
Intersect the video preview view with the view's frame to only get
the visible portions of the video preview view.
*/
let visibleVideoPreviewRect = videoPreviewRect.intersection(frame)
let oldRegionOfInterest = regionOfInterest
var newRegionOfInterest = proposedRegionOfInterest.standardized
// Move the region of interest in bounds.
if currentControlCorner == .none {
var xOffset: CGFloat = 0
var yOffset: CGFloat = 0
if !visibleVideoPreviewRect.contains(newRegionOfInterest.origin) {
xOffset = max(visibleVideoPreviewRect.minX - newRegionOfInterest.minX, CGFloat(0))
yOffset = max(visibleVideoPreviewRect.minY - newRegionOfInterest.minY, CGFloat(0))
}
if !visibleVideoPreviewRect.contains(CGPoint(x: visibleVideoPreviewRect.maxX, y: visibleVideoPreviewRect.maxY)) {
xOffset = min(visibleVideoPreviewRect.maxX - newRegionOfInterest.maxX, xOffset)
yOffset = min(visibleVideoPreviewRect.maxY - newRegionOfInterest.maxY, yOffset)
}
newRegionOfInterest = newRegionOfInterest.offsetBy(dx: xOffset, dy: yOffset)
}
// Clamp the size when the region of interest is being resized.
newRegionOfInterest = visibleVideoPreviewRect.intersection(newRegionOfInterest)
// Fix a minimum width of the region of interest.
if proposedRegionOfInterest.size.width < minimumRegionOfInterestSize {
switch currentControlCorner {
case .topLeft, .bottomLeft:
newRegionOfInterest.origin.x = oldRegionOfInterest.origin.x + oldRegionOfInterest.size.width - minimumRegionOfInterestSize
newRegionOfInterest.size.width = minimumRegionOfInterestSize
case .topRight:
newRegionOfInterest.origin.x = oldRegionOfInterest.origin.x
newRegionOfInterest.size.width = minimumRegionOfInterestSize
default:
newRegionOfInterest.origin = oldRegionOfInterest.origin
newRegionOfInterest.size.width = minimumRegionOfInterestSize
}
}
// Fix a minimum height of the region of interest.
if proposedRegionOfInterest.size.height < minimumRegionOfInterestSize {
switch currentControlCorner {
case .topLeft, .topRight:
newRegionOfInterest.origin.y = oldRegionOfInterest.origin.y + oldRegionOfInterest.size.height - minimumRegionOfInterestSize
newRegionOfInterest.size.height = minimumRegionOfInterestSize
case .bottomLeft:
newRegionOfInterest.origin.y = oldRegionOfInterest.origin.y
newRegionOfInterest.size.height = minimumRegionOfInterestSize
default:
newRegionOfInterest.origin = oldRegionOfInterest.origin
newRegionOfInterest.size.height = minimumRegionOfInterestSize
}
}
regionOfInterest = newRegionOfInterest
setNeedsLayout()
}
var isResizingRegionOfInterest: Bool {
return resizeRegionOfInterestGestureRecognizer.state == .changed
}
private lazy var resizeRegionOfInterestGestureRecognizer: UIPanGestureRecognizer = {
UIPanGestureRecognizer(target: self, action: #selector(PreviewView.resizeRegionOfInterestWithGestureRecognizer(_:)))
}()
@objc func resizeRegionOfInterestWithGestureRecognizer(_ resizeRegionOfInterestGestureRecognizer: UIPanGestureRecognizer) {
let touchLocation = resizeRegionOfInterestGestureRecognizer.location(in: resizeRegionOfInterestGestureRecognizer.view)
let oldRegionOfInterest = regionOfInterest
switch resizeRegionOfInterestGestureRecognizer.state {
case .began:
willChangeValue(forKey: "regionOfInterest")
/*
When the gesture begins, save the corner that is closes to
the resize region of interest gesture recognizer's touch location.
*/
currentControlCorner = cornerOfRect(oldRegionOfInterest, closestToPointWithinTouchThreshold: touchLocation)
case .changed:
var newRegionOfInterest = oldRegionOfInterest
switch currentControlCorner {
case .none:
// Update the new region of interest with the gesture recognizer's translation.
let translation = resizeRegionOfInterestGestureRecognizer.translation(in: resizeRegionOfInterestGestureRecognizer.view)
// Move the region of interest with the gesture recognizer's translation.
if regionOfInterest.contains(touchLocation) {
newRegionOfInterest.origin.x += translation.x
newRegionOfInterest.origin.y += translation.y
}
/*
If the touch location goes outside the preview layer,
we will only translate the region of interest in the
plane that is not out of bounds.
*/
let normalizedRect = CGRect(x: 0, y: 0, width: 1, height: 1)
if !normalizedRect.contains(videoPreviewLayer.captureDevicePointOfInterest(for: touchLocation)) {
if touchLocation.x < regionOfInterest.minX || touchLocation.x > regionOfInterest.maxX {
newRegionOfInterest.origin.y += translation.y
}
else if touchLocation.y < regionOfInterest.minY || touchLocation.y > regionOfInterest.maxY {
newRegionOfInterest.origin.x += translation.x
}
}
/*
Set the translation to be zero so that the new gesture
recognizer's translation is in respect to the region of
interest's new position.
*/
resizeRegionOfInterestGestureRecognizer.setTranslation(CGPoint.zero, in: resizeRegionOfInterestGestureRecognizer.view)
case .topLeft:
newRegionOfInterest = CGRect(x: touchLocation.x,
y: touchLocation.y,
width: oldRegionOfInterest.size.width + oldRegionOfInterest.origin.x - touchLocation.x,
height: oldRegionOfInterest.size.height + oldRegionOfInterest.origin.y - touchLocation.y)
case .topRight:
newRegionOfInterest = CGRect(x: newRegionOfInterest.origin.x,
y: touchLocation.y,
width: touchLocation.x - newRegionOfInterest.origin.x,
height: oldRegionOfInterest.size.height + newRegionOfInterest.origin.y - touchLocation.y)
case .bottomLeft:
newRegionOfInterest = CGRect(x: touchLocation.x,
y: oldRegionOfInterest.origin.y,
width: oldRegionOfInterest.size.width + oldRegionOfInterest.origin.x - touchLocation.x,
height: touchLocation.y - oldRegionOfInterest.origin.y)
case .bottomRight:
newRegionOfInterest = CGRect(x: oldRegionOfInterest.origin.x,
y: oldRegionOfInterest.origin.y,
width: touchLocation.x - oldRegionOfInterest.origin.x,
height: touchLocation.y - oldRegionOfInterest.origin.y)
}
// Update the region of intresest with a valid CGRect.
setRegionOfInterestWithProposedRegionOfInterest(newRegionOfInterest)
case .ended:
didChangeValue(forKey: "regionOfInterest")
/*
Reset the current corner reference to none now that the resize.
gesture recognizer has ended.
*/
currentControlCorner = .none
default:
return
}
}
private func cornerOfRect(_ rect: CGRect, closestToPointWithinTouchThreshold point: CGPoint) -> ControlCorner {
var closestDistance = CGFloat.greatestFiniteMagnitude
var closestCorner: ControlCorner = .none
let corners: [(ControlCorner, CGPoint)] = [(.topLeft, rect.origin),
(.topRight, CGPoint(x: rect.maxX, y: rect.minY)),
(.bottomLeft, CGPoint(x: rect.minX, y: rect.maxY)),
(.bottomRight, CGPoint(x: rect.maxX, y: rect.maxY))]
for (corner, cornerPoint) in corners {
let dX = point.x - cornerPoint.x
let dY = point.y - cornerPoint.y
let distance = sqrt((dX * dX) + (dY * dY))
if distance < closestDistance {
closestDistance = distance
closestCorner = corner
}
}
if closestDistance > regionOfInterestCornerTouchThreshold {
closestCorner = .none
}
return closestCorner
}
// MARK: KVO
var sessionRunningObserveContext = 0
override func observeValue(forKeyPath keyPath: String?, of object: Any?, change: [NSKeyValueChangeKey : Any]?, context: UnsafeMutableRawPointer?) {
if context == &sessionRunningObserveContext {
let newValue = change?[.newKey] as AnyObject?
guard let isSessionRunning = newValue?.boolValue else { return }
DispatchQueue.main.async { [unowned self] in
/*
If the region of interest view's region of interest has not
been initialized yet, let's set an inital region of interest
that is 80% of the shortest side by 25% of the longest side
and centered in the root view.
*/
if self.regionOfInterest.isNull {
let width = min(self.frame.width, self.frame.height) * 0.8
let height = max(self.frame.width, self.frame.height) * 0.25
let newRegionOfInterest = self.frame.insetBy(dx: self.frame.midX - width / 2.0, dy: self.frame.midY - height / 2.0)
self.setRegionOfInterestWithProposedRegionOfInterest(newRegionOfInterest)
}
/*
If the region of interest view's region of interest has not
been initialized yet, let's set an inital region of interest
that is 80% of the shortest side by 25% of the longest side
and centered in the root view.
*/
if isSessionRunning {
self.setRegionOfInterestWithProposedRegionOfInterest(self.regionOfInterest)
}
}
}
else {
super.observeValue(forKeyPath: keyPath, of: object, change: change, context: context)
}
}
// MARK: UIView
override class var layerClass: AnyClass {
return AVCaptureVideoPreviewLayer.self
}
override func layoutSubviews() {
super.layoutSubviews()
// Disable CoreAnimation actions so that the positions of the sublayers immediately move to their new position.
CATransaction.begin()
CATransaction.setDisableActions(true)
// Create the path for the mask layer. We use the even odd fill rule so that the region of interest does not have a fill color.
let path = UIBezierPath(rect: CGRect(x: 0, y: 0, width: frame.size.width, height: frame.size.height))
path.append(UIBezierPath(rect: regionOfInterest))
path.usesEvenOddFillRule = true
maskLayer.path = path.cgPath
regionOfInterestOutline.path = CGPath(rect: regionOfInterest, transform: nil)
topLeftControl.position = CGPoint(x: regionOfInterest.origin.x - regionOfInterestControlRadius, y: regionOfInterest.origin.y - regionOfInterestControlRadius)
topRightControl.position = CGPoint(x: regionOfInterest.origin.x + regionOfInterest.size.width - regionOfInterestControlRadius, y: regionOfInterest.origin.y - regionOfInterestControlRadius)
bottomLeftControl.position = CGPoint(x: regionOfInterest.origin.x - regionOfInterestControlRadius, y: regionOfInterest.origin.y + regionOfInterest.size.height - regionOfInterestControlRadius)
bottomRightControl.position = CGPoint(x: regionOfInterest.origin.x + regionOfInterest.size.width - regionOfInterestControlRadius, y: regionOfInterest.origin.y + regionOfInterest.size.height - regionOfInterestControlRadius)
CATransaction.commit()
}
// MARK: UIGestureRecognizerDelegate
func gestureRecognizer(_ gestureRecognizer: UIGestureRecognizer, shouldReceive touch: UITouch) -> Bool {
// Ignore drags outside of the region of interest (plus some padding).
if gestureRecognizer == resizeRegionOfInterestGestureRecognizer {
let touchLocation = touch.location(in: gestureRecognizer.view)
let paddedRegionOfInterest = regionOfInterest.insetBy(dx: -regionOfInterestCornerTouchThreshold, dy: -regionOfInterestCornerTouchThreshold)
if !paddedRegionOfInterest.contains(touchLocation) {
return false
}
}
return true
}
func gestureRecognizer(_ gestureRecognizer: UIGestureRecognizer, shouldRecognizeSimultaneouslyWith otherGestureRecognizer: UIGestureRecognizer) -> Bool {
// Allow multiple gesture recognizers to be recognized simultaneously if and only if the touch location is not within the touch threshold.
if gestureRecognizer == resizeRegionOfInterestGestureRecognizer {
let touchLocation = gestureRecognizer.location(in: gestureRecognizer.view)
let closestCorner = cornerOfRect(regionOfInterest, closestToPointWithinTouchThreshold: touchLocation)
return closestCorner == .none
}
return false
}
}

42
AVCamBarcode/LICENSE.txt Normal file
View File

@ -0,0 +1,42 @@
Sample code project: AVCamBarcode: Using AVFoundation to Detect Barcodes and Faces
Version: 1.1
IMPORTANT: This Apple software is supplied to you by Apple
Inc. ("Apple") in consideration of your agreement to the following
terms, and your use, installation, modification or redistribution of
this Apple software constitutes acceptance of these terms. If you do
not agree with these terms, please do not use, install, modify or
redistribute this Apple software.
In consideration of your agreement to abide by the following terms, and
subject to these terms, Apple grants you a personal, non-exclusive
license, under Apple's copyrights in this original Apple software (the
"Apple Software"), to use, reproduce, modify and redistribute the Apple
Software, with or without modifications, in source and/or binary forms;
provided that if you redistribute the Apple Software in its entirety and
without modifications, you must retain this notice and the following
text and disclaimers in all such redistributions of the Apple Software.
Neither the name, trademarks, service marks or logos of Apple Inc. may
be used to endorse or promote products derived from the Apple Software
without specific prior written permission from Apple. Except as
expressly stated in this notice, no other rights or licenses, express or
implied, are granted by Apple herein, including but not limited to any
patent rights that may be infringed by your derivative works or by other
works in which the Apple Software may be incorporated.
The Apple Software is provided by Apple on an "AS IS" basis. APPLE
MAKES NO WARRANTIES, EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION
THE IMPLIED WARRANTIES OF NON-INFRINGEMENT, MERCHANTABILITY AND FITNESS
FOR A PARTICULAR PURPOSE, REGARDING THE APPLE SOFTWARE OR ITS USE AND
OPERATION ALONE OR IN COMBINATION WITH YOUR PRODUCTS.
IN NO EVENT SHALL APPLE BE LIABLE FOR ANY SPECIAL, INDIRECT, INCIDENTAL
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) ARISING IN ANY WAY OUT OF THE USE, REPRODUCTION,
MODIFICATION AND/OR DISTRIBUTION OF THE APPLE SOFTWARE, HOWEVER CAUSED
AND WHETHER UNDER THEORY OF CONTRACT, TORT (INCLUDING NEGLIGENCE),
STRICT LIABILITY OR OTHERWISE, EVEN IF APPLE HAS BEEN ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
Copyright (C) 2016 Apple Inc. All Rights Reserved.

15
AVCamBarcode/README.md Normal file
View File

@ -0,0 +1,15 @@
# AVCamBarcode: Using AVFoundation to Detect Barcodes and Faces
AVCamBarcode demonstrates how to use the AVFoundation capture API to detect barcodes and faces.
## Requirements
### Build
Xcode 8.0, iOS 10.0 SDK
### Runtime
iOS 10.0 or later
Copyright (C) 2016 Apple Inc. All rights reserved.

View File

@ -0,0 +1,42 @@
Sample code project: AVFoundationExporter: Exporting and Transcoding Movies
Version: 3.0
IMPORTANT: This Apple software is supplied to you by Apple
Inc. ("Apple") in consideration of your agreement to the following
terms, and your use, installation, modification or redistribution of
this Apple software constitutes acceptance of these terms. If you do
not agree with these terms, please do not use, install, modify or
redistribute this Apple software.
In consideration of your agreement to abide by the following terms, and
subject to these terms, Apple grants you a personal, non-exclusive
license, under Apple's copyrights in this original Apple software (the
"Apple Software"), to use, reproduce, modify and redistribute the Apple
Software, with or without modifications, in source and/or binary forms;
provided that if you redistribute the Apple Software in its entirety and
without modifications, you must retain this notice and the following
text and disclaimers in all such redistributions of the Apple Software.
Neither the name, trademarks, service marks or logos of Apple Inc. may
be used to endorse or promote products derived from the Apple Software
without specific prior written permission from Apple. Except as
expressly stated in this notice, no other rights or licenses, express or
implied, are granted by Apple herein, including but not limited to any
patent rights that may be infringed by your derivative works or by other
works in which the Apple Software may be incorporated.
The Apple Software is provided by Apple on an "AS IS" basis. APPLE
MAKES NO WARRANTIES, EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION
THE IMPLIED WARRANTIES OF NON-INFRINGEMENT, MERCHANTABILITY AND FITNESS
FOR A PARTICULAR PURPOSE, REGARDING THE APPLE SOFTWARE OR ITS USE AND
OPERATION ALONE OR IN COMBINATION WITH YOUR PRODUCTS.
IN NO EVENT SHALL APPLE BE LIABLE FOR ANY SPECIAL, INDIRECT, INCIDENTAL
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) ARISING IN ANY WAY OUT OF THE USE, REPRODUCTION,
MODIFICATION AND/OR DISTRIBUTION OF THE APPLE SOFTWARE, HOWEVER CAUSED
AND WHETHER UNDER THEORY OF CONTRACT, TORT (INCLUDING NEGLIGENCE),
STRICT LIABILITY OR OTHERWISE, EVEN IF APPLE HAS BEEN ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
Copyright (C) 2016 Apple Inc. All Rights Reserved.

View File

@ -0,0 +1,237 @@
// !$*UTF8*$!
{
archiveVersion = 1;
classes = {
};
objectVersion = 46;
objects = {
/* Begin PBXBuildFile section */
1E1D58BC1368D74F00D93743 /* Foundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 1E1D58BB1368D74F00D93743 /* Foundation.framework */; };
1E1D58BF1368D74F00D93743 /* AVFoundationExporter.m in Sources */ = {isa = PBXBuildFile; fileRef = 1E1D58BE1368D74F00D93743 /* AVFoundationExporter.m */; };
1E1D58CF1368D7E600D93743 /* CoreMedia.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 1E1D58CD1368D7E600D93743 /* CoreMedia.framework */; };
1E1D58D01368D7E600D93743 /* AVFoundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 1E1D58CE1368D7E600D93743 /* AVFoundation.framework */; };
/* End PBXBuildFile section */
/* Begin PBXCopyFilesBuildPhase section */
1E1D58B51368D74F00D93743 /* CopyFiles */ = {
isa = PBXCopyFilesBuildPhase;
buildActionMask = 2147483647;
dstPath = /usr/share/man/man1/;
dstSubfolderSpec = 0;
files = (
);
runOnlyForDeploymentPostprocessing = 1;
};
/* End PBXCopyFilesBuildPhase section */
/* Begin PBXFileReference section */
1E1D58B71368D74F00D93743 /* AVFoundationExporter */ = {isa = PBXFileReference; explicitFileType = "compiled.mach-o.executable"; includeInIndex = 0; path = AVFoundationExporter; sourceTree = BUILT_PRODUCTS_DIR; };
1E1D58BB1368D74F00D93743 /* Foundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Foundation.framework; path = System/Library/Frameworks/Foundation.framework; sourceTree = SDKROOT; };
1E1D58BE1368D74F00D93743 /* AVFoundationExporter.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AVFoundationExporter.m; sourceTree = "<group>"; };
1E1D58CD1368D7E600D93743 /* CoreMedia.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreMedia.framework; path = System/Library/Frameworks/CoreMedia.framework; sourceTree = "<group>"; };
1E1D58CE1368D7E600D93743 /* AVFoundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AVFoundation.framework; path = System/Library/Frameworks/AVFoundation.framework; sourceTree = "<group>"; };
3EAA11C51B1B895500EC0006 /* README.md */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = net.daringfireball.markdown; name = README.md; path = ../README.md; sourceTree = "<group>"; };
/* End PBXFileReference section */
/* Begin PBXFrameworksBuildPhase section */
1E1D58B41368D74F00D93743 /* Frameworks */ = {
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
1E1D58BC1368D74F00D93743 /* Foundation.framework in Frameworks */,
1E1D58CF1368D7E600D93743 /* CoreMedia.framework in Frameworks */,
1E1D58D01368D7E600D93743 /* AVFoundation.framework in Frameworks */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXFrameworksBuildPhase section */
/* Begin PBXGroup section */
1E1D58AC1368D74F00D93743 = {
isa = PBXGroup;
children = (
3EAA11C51B1B895500EC0006 /* README.md */,
1E1D58BD1368D74F00D93743 /* AVFoundationExporter */,
1E1D58BA1368D74F00D93743 /* Frameworks */,
1E1D58B81368D74F00D93743 /* Products */,
);
sourceTree = "<group>";
};
1E1D58B81368D74F00D93743 /* Products */ = {
isa = PBXGroup;
children = (
1E1D58B71368D74F00D93743 /* AVFoundationExporter */,
);
name = Products;
sourceTree = "<group>";
};
1E1D58BA1368D74F00D93743 /* Frameworks */ = {
isa = PBXGroup;
children = (
1E1D58BB1368D74F00D93743 /* Foundation.framework */,
1E1D58CD1368D7E600D93743 /* CoreMedia.framework */,
1E1D58CE1368D7E600D93743 /* AVFoundation.framework */,
);
name = Frameworks;
sourceTree = "<group>";
};
1E1D58BD1368D74F00D93743 /* AVFoundationExporter */ = {
isa = PBXGroup;
children = (
1E1D58BE1368D74F00D93743 /* AVFoundationExporter.m */,
);
path = AVFoundationExporter;
sourceTree = "<group>";
};
/* End PBXGroup section */
/* Begin PBXNativeTarget section */
1E1D58B61368D74F00D93743 /* AVFoundationExporter */ = {
isa = PBXNativeTarget;
buildConfigurationList = 1E1D58C61368D74F00D93743 /* Build configuration list for PBXNativeTarget "AVFoundationExporter" */;
buildPhases = (
1E1D58B31368D74F00D93743 /* Sources */,
1E1D58B41368D74F00D93743 /* Frameworks */,
1E1D58B51368D74F00D93743 /* CopyFiles */,
);
buildRules = (
);
dependencies = (
);
name = AVFoundationExporter;
productName = AVFoundationExporter;
productReference = 1E1D58B71368D74F00D93743 /* AVFoundationExporter */;
productType = "com.apple.product-type.tool";
};
/* End PBXNativeTarget section */
/* Begin PBXProject section */
1E1D58AE1368D74F00D93743 /* Project object */ = {
isa = PBXProject;
attributes = {
LastSwiftUpdateCheck = 0700;
LastUpgradeCheck = 0700;
ORGANIZATIONNAME = "Apple, Inc";
};
buildConfigurationList = 1E1D58B11368D74F00D93743 /* Build configuration list for PBXProject "AVFoundationExporter" */;
compatibilityVersion = "Xcode 3.2";
developmentRegion = English;
hasScannedForEncodings = 0;
knownRegions = (
en,
);
mainGroup = 1E1D58AC1368D74F00D93743;
productRefGroup = 1E1D58B81368D74F00D93743 /* Products */;
projectDirPath = "";
projectRoot = "";
targets = (
1E1D58B61368D74F00D93743 /* AVFoundationExporter */,
);
};
/* End PBXProject section */
/* Begin PBXSourcesBuildPhase section */
1E1D58B31368D74F00D93743 /* Sources */ = {
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
1E1D58BF1368D74F00D93743 /* AVFoundationExporter.m in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXSourcesBuildPhase section */
/* Begin XCBuildConfiguration section */
1E1D58C41368D74F00D93743 /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ENABLE_TESTABILITY = YES;
GCC_C_LANGUAGE_STANDARD = gnu99;
GCC_OPTIMIZATION_LEVEL = 0;
GCC_PREPROCESSOR_DEFINITIONS = "DEBUG=1";
GCC_SYMBOLS_PRIVATE_EXTERN = NO;
GCC_VERSION = "";
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_MISSING_PROTOTYPES = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
MACOSX_DEPLOYMENT_TARGET = 10.7;
ONLY_ACTIVE_ARCH = YES;
SDKROOT = macosx;
};
name = Debug;
};
1E1D58C51368D74F00D93743 /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
GCC_C_LANGUAGE_STANDARD = gnu99;
GCC_VERSION = "";
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_MISSING_PROTOTYPES = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
MACOSX_DEPLOYMENT_TARGET = 10.7;
SDKROOT = macosx;
};
name = Release;
};
1E1D58C71368D74F00D93743 /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
CLANG_ENABLE_MODULES = YES;
COPY_PHASE_STRIP = NO;
DEFINES_MODULE = NO;
GCC_DYNAMIC_NO_PIC = NO;
GCC_ENABLE_OBJC_EXCEPTIONS = YES;
GCC_PRECOMPILE_PREFIX_HEADER = NO;
GCC_PREFIX_HEADER = "";
PRODUCT_BUNDLE_IDENTIFIER = "com.example.apple-samplecode.${PRODUCT_NAME:rfc1034identifier}";
PRODUCT_NAME = "$(TARGET_NAME)";
SDKROOT = "";
};
name = Debug;
};
1E1D58C81368D74F00D93743 /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
CLANG_ENABLE_MODULES = YES;
COPY_PHASE_STRIP = YES;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
DEFINES_MODULE = NO;
GCC_ENABLE_OBJC_EXCEPTIONS = YES;
GCC_PRECOMPILE_PREFIX_HEADER = NO;
GCC_PREFIX_HEADER = "";
PRODUCT_BUNDLE_IDENTIFIER = "com.example.apple-samplecode.${PRODUCT_NAME:rfc1034identifier}";
PRODUCT_NAME = "$(TARGET_NAME)";
SDKROOT = "";
};
name = Release;
};
/* End XCBuildConfiguration section */
/* Begin XCConfigurationList section */
1E1D58B11368D74F00D93743 /* Build configuration list for PBXProject "AVFoundationExporter" */ = {
isa = XCConfigurationList;
buildConfigurations = (
1E1D58C41368D74F00D93743 /* Debug */,
1E1D58C51368D74F00D93743 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
1E1D58C61368D74F00D93743 /* Build configuration list for PBXNativeTarget "AVFoundationExporter" */ = {
isa = XCConfigurationList;
buildConfigurations = (
1E1D58C71368D74F00D93743 /* Debug */,
1E1D58C81368D74F00D93743 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
/* End XCConfigurationList section */
};
rootObject = 1E1D58AE1368D74F00D93743 /* Project object */;
}

View File

@ -0,0 +1,534 @@
/*
Copyright (C) 2016 Apple Inc. All Rights Reserved.
See LICENSE.txt for this samples licensing information
Abstract:
This file shows an example of using the export and metadata functions in AVFoundation as a part of a command line tool for simple exports.
*/
@import Foundation;
@import AVFoundation;
// ---------------------------------------------------------------------------
// Convenience Functions
// ---------------------------------------------------------------------------
static void printNSString(NSString *string);
static void printArgs(int argc, const char **argv);
// ---------------------------------------------------------------------------
// AAPLExporter Class Interface
// ---------------------------------------------------------------------------
@interface AAPLExporter: NSObject {
NSString *programName;
NSString *exportType;
NSString *preset;
NSString *sourcePath;
NSString *destinationPath;
NSString *fileType;
NSNumber *progress;
NSNumber *startSeconds;
NSNumber *durationSeconds;
BOOL showProgress;
BOOL verbose;
BOOL exportFailed;
BOOL exportComplete;
BOOL listTracks;
BOOL listMetadata;
BOOL removePreExistingFiles;
}
@property (copy) NSString *programName;
@property (copy) NSString *exportType;
@property (copy) NSString *preset;
@property (copy) NSString *sourcePath;
@property (copy) NSString *destinationPath;
@property (copy) NSString *fileType;
@property (strong) NSNumber *progress;
@property (strong) NSNumber *startSeconds;
@property (strong) NSNumber *durationSeconds;
@property (getter=isVerbose) BOOL verbose;
@property BOOL showProgress;
@property BOOL exportFailed;
@property BOOL exportComplete;
@property BOOL listTracks;
@property BOOL listMetadata;
@property BOOL removePreExistingFiles;
- (id)initWithArgs:(int)argc argv:(const char **)argv environ:(const char **)environ;
- (void)printUsage;
- (int)run;
- (NSArray *)addNewMetadata:(NSArray *)sourceMetadataList presetName:(NSString *)presetName;
+ (void)doListPresets;
- (void)doListTracks:(NSString *)assetPath;
- (void)doListMetadata:(NSString *)assetPath;
@end
// ---------------------------------------------------------------------------
// AAPLExporter Class Implementation
// ---------------------------------------------------------------------------
@implementation AAPLExporter
@synthesize programName, exportType, preset;
@synthesize sourcePath, destinationPath, progress, fileType;
@synthesize startSeconds, durationSeconds;
@synthesize verbose, showProgress, exportComplete, exportFailed;
@synthesize listTracks, listMetadata;
@synthesize removePreExistingFiles;
-(id) initWithArgs: (int) argc argv: (const char **) argv environ: (const char **) environ
{
self = [super init];
if (self == nil) {
return nil;
}
printArgs(argc,argv);
BOOL gotpreset = NO;
BOOL gotsource = NO;
BOOL gotout = NO;
BOOL parseOK = NO;
BOOL listPresets = NO;
[self setProgramName:[NSString stringWithUTF8String: *argv++]];
argc--;
while ( argc > 0 && **argv == '-' )
{
const char* args = &(*argv)[1];
argc--;
argv++;
if ( ! strcmp ( args, "source" ) )
{
[self setSourcePath: [NSString stringWithUTF8String: *argv++] ];
gotsource = YES;
argc--;
}
else if (( ! strcmp ( args, "dest" )) || ( ! strcmp ( args, "destination" )) )
{
[self setDestinationPath: [NSString stringWithUTF8String: *argv++]];
gotout = YES;
argc--;
}
else if ( ! strcmp ( args, "preset" ) )
{
[self setPreset: [NSString stringWithUTF8String: *argv++]];
gotpreset = YES;
argc--;
}
else if ( ! strcmp ( args, "replace" ) )
{
[self setRemovePreExistingFiles: YES];
}
else if ( ! strcmp ( args, "filetype" ) )
{
[self setFileType: [NSString stringWithUTF8String: *argv++]];
argc--;
}
else if ( ! strcmp ( args, "verbose" ) )
{
[self setVerbose:YES];
}
else if ( ! strcmp ( args, "progress" ) )
{
[self setShowProgress: YES];
}
else if ( ! strcmp ( args, "start" ) )
{
[self setStartSeconds: [NSNumber numberWithFloat:[[NSString stringWithUTF8String: *argv++] floatValue]]];
argc--;
}
else if ( ! strcmp ( args, "duration" ) )
{
[self setDurationSeconds: [NSNumber numberWithFloat:[[NSString stringWithUTF8String: *argv++] floatValue]]];
argc--;
}
else if ( ! strcmp ( args, "listpresets" ) )
{
listPresets = YES;
parseOK = YES;
}
else if ( ! strcmp ( args, "listtracks" ) )
{
[self setListTracks: YES];
parseOK = YES;
}
else if ( ! strcmp ( args, "listmetadata" ) )
{
[self setListMetadata: YES];
parseOK = YES;
}
else if ( ! strcmp ( args, "help" ) )
{
[self printUsage];
}
else {
printf("Invalid input parameter: %s\n", args );
[self printUsage];
return nil;
}
}
[self setProgress: [NSNumber numberWithFloat:(float)0.0]];
[self setExportFailed: NO];
[self setExportComplete: NO];
if (listPresets) {
[AAPLExporter doListPresets];
}
if ([self isVerbose]) {
printNSString([NSString stringWithFormat:@"Running: %@\n", [self programName]]);
}
// There must be a source and either a preset and output (the normal case) or parseOK set for a listing
if ((gotsource == NO) || ((parseOK == NO) && ((gotpreset == NO) || (gotout == NO)))) {
[self printUsage];
return nil;
}
return self;
}
-(void) printUsage
{
printf("AVFoundationExporter - usage:\n");
printf(" ./AVFoundationExporter [-parameter <value> ...]\n");
printf(" parameters are all preceded by a -<parameterName>. The order of the parameters is unimportant.\n");
printf(" Required parameters are -preset <presetName> -source <sourceFileURL> -dest <outputFileURL>\n");
printf(" Source and destination URL strings cannot contain spaces.\n");
printf(" Available parameters are:\n");
printf(" -preset <preset name>. The preset name eg: AVAssetExportPreset640x480 AVAssetExportPresetAppleM4VWiFi. Use -listpresets to see a full list.\n");
printf(" -destination (or -dest) <outputFileURL>\n");
printf(" -source <sourceMovieURL>\n");
printf(" -replace If there is a preexisting file at the destination location, remove it before exporting.");
printf(" -filetype <file type string> The file type (eg com.apple.m4v-video) for the output file. If not specified, the first supported type will be used.\n");
printf(" -start <start time> time in seconds (decimal are OK). Removes the startClip time from the beginning of the movie before exporting.\n");
printf(" -duration <duration> time in seconds (decimal are OK). Trims the movie to this duration before exporting. \n");
printf(" Also available are some setup options:\n");
printf(" -verbose Print more information about the execution.\n");
printf(" -progress Show progress information.\n");
printf(" -listpresets For sourceMovieURL sources only, lists the tracks in the source movie before the export. \n");
printf(" -listtracks For sourceMovieURL sources only, lists the tracks in the source movie before the export. \n");
printf(" Always lists the tracks in the destination asset at the end of the export.\n");
printf(" -listmetadata Lists the metadata in the source movie before the export. \n");
printf(" Also lists the metadata in the destination asset at the end of the export.\n");
printf(" Sample export lines:\n");
printf(" ./AVFoundationExporter -dest /tmp/testOut.m4v -replace -preset AVAssetExportPresetAppleM4ViPod -listmetadata -source /path/to/myTestMovie.m4v\n");
printf(" ./AVFoundationExporter -destination /tmp/testOut.mov -preset AVAssetExportPreset640x480 -listmetadata -listtracks -source /path/to/myTestMovie.mov\n");
}
static dispatch_time_t getDispatchTimeFromSeconds(float seconds) {
long long milliseconds = seconds * 1000.0;
dispatch_time_t waitTime = dispatch_time( DISPATCH_TIME_NOW, 1000000LL * milliseconds );
return waitTime;
}
- (int)run
{
NSURL *sourceURL = nil;
AVAssetExportSession *avsession = nil;
NSURL *destinationURL = nil;
BOOL success = YES;
NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
NSParameterAssert( [self sourcePath] != nil );
if ([self listTracks] && [self sourcePath]) {
[self doListTracks:[self sourcePath]];
}
if ([self listMetadata] && [self sourcePath]) {
[self doListMetadata:[self sourcePath]];
}
if ([self destinationPath] == nil) {
NSLog(@"No output path specified, only listing tracks and/or metadata, export was not performed.");
goto bail;
}
if ([self preset] == nil) {
NSLog(@"No preset specified, only listing tracks and/or metadata, export was not performed.");
goto bail;
}
if ( [self isVerbose] && [self sourcePath] ) {
printNSString([NSString stringWithFormat:@"all av asset presets:%@", [AVAssetExportSession allExportPresets]]);
}
if ([self sourcePath] != nil) {
sourceURL = [[NSURL fileURLWithPath: [self sourcePath] isDirectory: NO] retain];
}
AVAsset *sourceAsset = nil;
NSError* error = nil;
if ([self isVerbose]) {
printNSString([NSString stringWithFormat:@"AVAssetExport for preset:%@ to with source:%@", [self preset], [destinationURL path]]);
}
destinationURL = [NSURL fileURLWithPath: [self destinationPath] isDirectory: NO];
if ([self removePreExistingFiles] && [[NSFileManager defaultManager] fileExistsAtPath:[self destinationPath]]) {
if ([self isVerbose]) {
printNSString([NSString stringWithFormat:@"Removing re-existing destination file at:%@", destinationURL]);
}
[[NSFileManager defaultManager] removeItemAtURL:destinationURL error:&error];
}
sourceAsset = [[[AVURLAsset alloc] initWithURL:sourceURL options:nil] autorelease];
if ([self isVerbose]) {
printNSString([NSString stringWithFormat:@"Compatible av asset presets:%@", [AVAssetExportSession exportPresetsCompatibleWithAsset:sourceAsset]]);
}
avsession = [[AVAssetExportSession alloc] initWithAsset:sourceAsset presetName:[self preset]];
[avsession setOutputURL:destinationURL];
if ([self fileType] != nil) {
[avsession setOutputFileType:[self fileType]];
}
else {
[avsession setOutputFileType:[[avsession supportedFileTypes] objectAtIndex:0]];
}
if ([self isVerbose]) {
printNSString([NSString stringWithFormat:@"Created AVAssetExportSession: %p", avsession]);
printNSString([NSString stringWithFormat:@"presetName:%@", [avsession presetName]]);
printNSString([NSString stringWithFormat:@"source URL:%@", [sourceURL path]]);
printNSString([NSString stringWithFormat:@"destination URL:%@", [[avsession outputURL] path]]);
printNSString([NSString stringWithFormat:@"output file type:%@", [avsession outputFileType]]);
}
// Add a metadata item to indicate how thie destination file was created.
NSArray *sourceMetadataList = [avsession metadata];
sourceMetadataList = [self addNewMetadata: sourceMetadataList presetName:[self preset]];
[avsession setMetadata:sourceMetadataList];
// Set up the time range
CMTime startTime = kCMTimeZero;
CMTime durationTime = kCMTimePositiveInfinity;
if ([self startSeconds] != nil) {
startTime = CMTimeMake([[self startSeconds] floatValue] * 1000, 1000);
}
if ([self durationSeconds] != nil) {
durationTime = CMTimeMake([[self durationSeconds] floatValue] * 1000, 1000);
}
CMTimeRange exportTimeRange = CMTimeRangeMake(startTime, durationTime);
[avsession setTimeRange:exportTimeRange];
// start a fresh pool for the export.
[pool drain];
pool = [[NSAutoreleasePool alloc] init];
// Set up a semaphore for the completion handler and progress timer
dispatch_semaphore_t sessionWaitSemaphore = dispatch_semaphore_create( 0 );
void (^completionHandler)(void) = ^(void)
{
dispatch_semaphore_signal(sessionWaitSemaphore);
};
// do it.
[avsession exportAsynchronouslyWithCompletionHandler:completionHandler];
do {
dispatch_time_t dispatchTime = DISPATCH_TIME_FOREVER; // if we dont want progress, we will wait until it finishes.
if ([self showProgress]) {
dispatchTime = getDispatchTimeFromSeconds((float)1.0);
printNSString([NSString stringWithFormat:@"AVAssetExport running progress=%3.2f%%", [avsession progress]*100]);
}
dispatch_semaphore_wait(sessionWaitSemaphore, dispatchTime);
} while( [avsession status] < AVAssetExportSessionStatusCompleted );
if ([self showProgress]) {
printNSString([NSString stringWithFormat:@"AVAssetExport finished progress=%3.2f", [avsession progress]*100]);
}
[avsession release];
avsession = nil;
if ([self listMetadata] && [self destinationPath]) {
[self doListMetadata:[self destinationPath]];
}
if ([self listTracks] && [self destinationPath]) {
[self doListTracks:[self destinationPath]];
}
printNSString([NSString stringWithFormat:@"Finished export of %@ to %@ using preset:%@ success=%s\n", [self sourcePath], [self destinationPath], [self preset], (success ? "YES" : "NO")]);
bail:
[sourceURL release];
[pool drain];
return success;
}
- (NSArray *) addNewMetadata: (NSArray *)sourceMetadataList presetName:(NSString *)presetName
{
// This method creates a few new metadata items in different keySpaces to be inserted into the exported file along with the metadata that
// was in the original source.
// Depending on the output file format, not all of these items will be valid and not all of them will come through to the destination.
AVMutableMetadataItem *newUserDataCommentItem = [[[AVMutableMetadataItem alloc] init] autorelease];
[newUserDataCommentItem setKeySpace:AVMetadataKeySpaceQuickTimeUserData];
[newUserDataCommentItem setKey:AVMetadataQuickTimeUserDataKeyComment];
[newUserDataCommentItem setValue:[NSString stringWithFormat:@"QuickTime userdata: Exported to preset %@ using AVFoundationExporter at: %@", presetName,
[NSDateFormatter localizedStringFromDate:[NSDate date] dateStyle:NSDateFormatterMediumStyle timeStyle: NSDateFormatterShortStyle]]];
AVMutableMetadataItem *newMetaDataCommentItem = [[[AVMutableMetadataItem alloc] init] autorelease];
[newMetaDataCommentItem setKeySpace:AVMetadataKeySpaceQuickTimeMetadata];
[newMetaDataCommentItem setKey:AVMetadataQuickTimeMetadataKeyComment];
[newMetaDataCommentItem setValue:[NSString stringWithFormat:@"QuickTime metadata: Exported to preset %@ using AVFoundationExporter at: %@", presetName,
[NSDateFormatter localizedStringFromDate:[NSDate date] dateStyle:NSDateFormatterMediumStyle timeStyle: NSDateFormatterShortStyle]]];
AVMutableMetadataItem *newiTunesCommentItem = [[[AVMutableMetadataItem alloc] init] autorelease];
[newiTunesCommentItem setKeySpace:AVMetadataKeySpaceiTunes];
[newiTunesCommentItem setKey:AVMetadataiTunesMetadataKeyUserComment];
[newiTunesCommentItem setValue:[NSString stringWithFormat:@"iTunes metadata: Exported to preset %@ using AVFoundationExporter at: %@", presetName,
[NSDateFormatter localizedStringFromDate:[NSDate date] dateStyle:NSDateFormatterMediumStyle timeStyle: NSDateFormatterShortStyle]]];
NSArray *newMetadata = [NSArray arrayWithObjects:newUserDataCommentItem, newMetaDataCommentItem, newiTunesCommentItem, nil];
NSArray *newMetadataList = (sourceMetadataList == nil ? newMetadata : [sourceMetadataList arrayByAddingObjectsFromArray:newMetadata]);
return newMetadataList;
}
+ (void) doListPresets
{
// A simple listing of the presets available for export
printNSString(@"");
printNSString(@"Presets available for AVFoundation export:");
printNSString(@" QuickTime movie presets:");
printNSString([NSString stringWithFormat:@" %@", AVAssetExportPreset640x480]);
printNSString([NSString stringWithFormat:@" %@", AVAssetExportPreset960x540]);
printNSString([NSString stringWithFormat:@" %@", AVAssetExportPreset1280x720]);
printNSString([NSString stringWithFormat:@" %@", AVAssetExportPreset1920x1080]);
printNSString(@" Audio only preset:");
printNSString([NSString stringWithFormat:@" %@", AVAssetExportPresetAppleM4A]);
printNSString(@" Apple device presets:");
printNSString([NSString stringWithFormat:@" %@", AVAssetExportPresetAppleM4VCellular]);
printNSString([NSString stringWithFormat:@" %@", AVAssetExportPresetAppleM4ViPod]);
printNSString([NSString stringWithFormat:@" %@", AVAssetExportPresetAppleM4V480pSD]);
printNSString([NSString stringWithFormat:@" %@", AVAssetExportPresetAppleM4VAppleTV]);
printNSString([NSString stringWithFormat:@" %@", AVAssetExportPresetAppleM4VWiFi]);
printNSString([NSString stringWithFormat:@" %@", AVAssetExportPresetAppleM4V720pHD]);
printNSString(@" Interim format (QuickTime movie) preset:");
printNSString([NSString stringWithFormat:@" %@", AVAssetExportPresetAppleProRes422LPCM]);
printNSString(@" Passthrough preset:");
printNSString([NSString stringWithFormat:@" %@", AVAssetExportPresetPassthrough]);
printNSString(@"");
}
- (void)doListTracks:(NSString *)assetPath
{
// A simple listing of the tracks in the asset provided
NSURL *sourceURL = [NSURL fileURLWithPath: assetPath isDirectory: NO];
if (sourceURL) {
AVURLAsset *sourceAsset = [[[AVURLAsset alloc] initWithURL:sourceURL options:nil] autorelease];
printNSString([NSString stringWithFormat:@"Listing tracks for asset from url:%@", [sourceURL path]]);
NSInteger index = 0;
for (AVAssetTrack *track in [sourceAsset tracks]) {
[track retain];
printNSString([ NSString stringWithFormat:@" Track index:%ld, trackID:%d, mediaType:%@, enabled:%d, isSelfContained:%d", index, [track trackID], [track mediaType], [track isEnabled], [track isSelfContained] ] );
index++;
[track release];
}
}
}
enum {
kMaxMetadataValueLength = 80,
};
- (void)doListMetadata:(NSString *)assetPath
{
// A simple listing of the metadata in the asset provided
NSURL *sourceURL = [NSURL fileURLWithPath: assetPath isDirectory: NO];
if (sourceURL) {
AVURLAsset *sourceAsset = [[[AVURLAsset alloc] initWithURL:sourceURL options:nil] autorelease];
NSLog(@"Listing metadata for asset from url:%@", [sourceURL path]);
for (NSString *format in [sourceAsset availableMetadataFormats]) {
NSLog(@"Metadata for format:%@", format);
for (AVMetadataItem *item in [sourceAsset metadataForFormat:format]) {
NSObject *key = [item key];
NSString *itemValue = [[item value] description];
if ([itemValue length] > kMaxMetadataValueLength) {
itemValue = [NSString stringWithFormat:@"%@ ...", [itemValue substringToIndex:kMaxMetadataValueLength-4]];
}
if ([key isKindOfClass: [NSNumber class]]) {
NSInteger longValue = [(NSNumber *)key longValue];
char *charSource = (char *)&longValue;
char charValue[5] = {0};
charValue[0] = charSource[3];
charValue[1] = charSource[2];
charValue[2] = charSource[1];
charValue[3] = charSource[0];
NSString *stringKey = [[[NSString alloc] initWithBytes: charValue length:4 encoding:NSMacOSRomanStringEncoding] autorelease];
printNSString([NSString stringWithFormat:@" metadata item key:%@ (%ld), keySpace:%@ commonKey:%@ value:%@", stringKey, longValue, [item keySpace], [item commonKey], itemValue]);
}
else {
printNSString([NSString stringWithFormat:@" metadata item key:%@, keySpace:%@ commonKey:%@ value:%@", [item key], [item keySpace], [item commonKey], itemValue]);
}
}
}
}
}
@end
// ---------------------------------------------------------------------------
// main
// ---------------------------------------------------------------------------
int main (int argc, const char * argv[], const char* environ[])
{
NSAutoreleasePool* pool = [[NSAutoreleasePool alloc] init];
AAPLExporter* exportObj = [[AAPLExporter alloc] initWithArgs:argc argv:argv environ:environ];
BOOL success;
if (exportObj)
success = [exportObj run];
else {
success = NO;
}
[exportObj release];
[pool release];
return ((success == YES) ? 0 : -1);
}
// ---------------------------------------------------------------------------
// printNSString
// ---------------------------------------------------------------------------
static void printNSString(NSString *string)
{
printf("%s\n", [string cStringUsingEncoding:NSUTF8StringEncoding]);
}
// ---------------------------------------------------------------------------
// printArgs
// ---------------------------------------------------------------------------
static void printArgs(int argc, const char **argv)
{
int i;
for( i = 0; i < argc; i++ )
printf("%s ", argv[i]);
printf("\n");
}

View File

@ -0,0 +1,39 @@
# AVFoundationExporter
## Description
Demonstrates use of AVFoundation export APIs with a simple command line utility. The command line application will list some information about the asset, transcode the asset in accord with one of the AVAssetExportSession presets, and demonstrates simple manipulation of the metadata that is exported with the source.
## Build Requirements
Xcode 8.0, macOS 10.12
## Runtime Requirements
OS X 10.11
## Structure
The main files associates with this project are:
Objective-C Version:
Source file: AVFoundationExporter.m
Project bundle: Objective-C/AVFoundationExporter.xcodeproj
Swift Version:
Source files: main.swift, ArgumentParsing.swift
Project bundle: Swift/AVFoundationExporter.xcodeproj
## Changes
Version 1.0
- First version.
Version 2.0
- Add Swift version.
Version 3.0
- Updated project for Swift 2.3.
Copyright (C) 2015, 2016 Apple Inc. All rights reserved.

View File

@ -0,0 +1,261 @@
// !$*UTF8*$!
{
archiveVersion = 1;
classes = {
};
objectVersion = 46;
objects = {
/* Begin PBXBuildFile section */
004967D71AE9751900B10C98 /* main.swift in Sources */ = {isa = PBXBuildFile; fileRef = 004967D61AE9751900B10C98 /* main.swift */; };
00599BBF1B1CFCC20093572A /* ArgumentParsing.swift in Sources */ = {isa = PBXBuildFile; fileRef = 00599BBE1B1CFCC20093572A /* ArgumentParsing.swift */; };
/* End PBXBuildFile section */
/* Begin PBXCopyFilesBuildPhase section */
004967CA1AE974A600B10C98 /* CopyFiles */ = {
isa = PBXCopyFilesBuildPhase;
buildActionMask = 2147483647;
dstPath = /usr/share/man/man1/;
dstSubfolderSpec = 0;
files = (
);
runOnlyForDeploymentPostprocessing = 1;
};
/* End PBXCopyFilesBuildPhase section */
/* Begin PBXFileReference section */
004967CC1AE974A600B10C98 /* AVFoundationExporter */ = {isa = PBXFileReference; explicitFileType = "compiled.mach-o.executable"; includeInIndex = 0; path = AVFoundationExporter; sourceTree = BUILT_PRODUCTS_DIR; };
004967D61AE9751900B10C98 /* main.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = main.swift; sourceTree = "<group>"; };
00599BBE1B1CFCC20093572A /* ArgumentParsing.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ArgumentParsing.swift; sourceTree = "<group>"; };
3EAA11C31B1B894900EC0006 /* README.md */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = net.daringfireball.markdown; name = README.md; path = ../README.md; sourceTree = "<group>"; };
/* End PBXFileReference section */
/* Begin PBXFrameworksBuildPhase section */
004967C91AE974A600B10C98 /* Frameworks */ = {
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXFrameworksBuildPhase section */
/* Begin PBXGroup section */
004967C31AE974A600B10C98 = {
isa = PBXGroup;
children = (
3EAA11C31B1B894900EC0006 /* README.md */,
004967CE1AE974A600B10C98 /* AVFoundationExporter */,
004967CD1AE974A600B10C98 /* Products */,
);
sourceTree = "<group>";
};
004967CD1AE974A600B10C98 /* Products */ = {
isa = PBXGroup;
children = (
004967CC1AE974A600B10C98 /* AVFoundationExporter */,
);
name = Products;
sourceTree = "<group>";
};
004967CE1AE974A600B10C98 /* AVFoundationExporter */ = {
isa = PBXGroup;
children = (
004967D61AE9751900B10C98 /* main.swift */,
00599BBE1B1CFCC20093572A /* ArgumentParsing.swift */,
);
path = AVFoundationExporter;
sourceTree = "<group>";
};
/* End PBXGroup section */
/* Begin PBXNativeTarget section */
004967CB1AE974A600B10C98 /* AVFoundationExporter */ = {
isa = PBXNativeTarget;
buildConfigurationList = 004967D31AE974A600B10C98 /* Build configuration list for PBXNativeTarget "AVFoundationExporter" */;
buildPhases = (
004967C81AE974A600B10C98 /* Sources */,
004967C91AE974A600B10C98 /* Frameworks */,
004967CA1AE974A600B10C98 /* CopyFiles */,
);
buildRules = (
);
dependencies = (
);
name = AVFoundationExporter;
productName = AVFoundationExporter;
productReference = 004967CC1AE974A600B10C98 /* AVFoundationExporter */;
productType = "com.apple.product-type.tool";
};
/* End PBXNativeTarget section */
/* Begin PBXProject section */
004967C41AE974A600B10C98 /* Project object */ = {
isa = PBXProject;
attributes = {
LastUpgradeCheck = 0800;
TargetAttributes = {
004967CB1AE974A600B10C98 = {
CreatedOnToolsVersion = 6.3;
};
};
};
buildConfigurationList = 004967C71AE974A600B10C98 /* Build configuration list for PBXProject "AVFoundationExporter" */;
compatibilityVersion = "Xcode 3.2";
developmentRegion = English;
hasScannedForEncodings = 0;
knownRegions = (
en,
);
mainGroup = 004967C31AE974A600B10C98;
productRefGroup = 004967CD1AE974A600B10C98 /* Products */;
projectDirPath = "";
projectRoot = "";
targets = (
004967CB1AE974A600B10C98 /* AVFoundationExporter */,
);
};
/* End PBXProject section */
/* Begin PBXSourcesBuildPhase section */
004967C81AE974A600B10C98 /* Sources */ = {
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
00599BBF1B1CFCC20093572A /* ArgumentParsing.swift in Sources */,
004967D71AE9751900B10C98 /* main.swift in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXSourcesBuildPhase section */
/* Begin XCBuildConfiguration section */
004967D11AE974A600B10C98 /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = dwarf;
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_TESTABILITY = YES;
GCC_C_LANGUAGE_STANDARD = gnu99;
GCC_DYNAMIC_NO_PIC = NO;
GCC_NO_COMMON_BLOCKS = YES;
GCC_OPTIMIZATION_LEVEL = 0;
GCC_PREPROCESSOR_DEFINITIONS = (
"DEBUG=1",
"$(inherited)",
);
GCC_SYMBOLS_PRIVATE_EXTERN = NO;
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
MACOSX_DEPLOYMENT_TARGET = 10.10;
MTL_ENABLE_DEBUG_INFO = YES;
ONLY_ACTIVE_ARCH = YES;
SDKROOT = macosx;
SWIFT_VERSION = 2.3;
};
name = Debug;
};
004967D21AE974A600B10C98 /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
ENABLE_NS_ASSERTIONS = NO;
ENABLE_STRICT_OBJC_MSGSEND = YES;
GCC_C_LANGUAGE_STANDARD = gnu99;
GCC_NO_COMMON_BLOCKS = YES;
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
MACOSX_DEPLOYMENT_TARGET = 10.10;
MTL_ENABLE_DEBUG_INFO = NO;
SDKROOT = macosx;
SWIFT_VERSION = 2.3;
};
name = Release;
};
004967D41AE974A600B10C98 /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
CLANG_ENABLE_MODULES = YES;
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/../Frameworks @loader_path/../Frameworks";
PRODUCT_BUNDLE_IDENTIFIER = "com.example.apple-samplecode.${PRODUCT_NAME:rfc1034identifier}";
PRODUCT_NAME = "$(TARGET_NAME)";
SWIFT_OPTIMIZATION_LEVEL = "-Onone";
SWIFT_VERSION = 2.3;
};
name = Debug;
};
004967D51AE974A600B10C98 /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
CLANG_ENABLE_MODULES = YES;
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/../Frameworks @loader_path/../Frameworks";
PRODUCT_BUNDLE_IDENTIFIER = "com.example.apple-samplecode.${PRODUCT_NAME:rfc1034identifier}";
PRODUCT_NAME = "$(TARGET_NAME)";
SWIFT_OPTIMIZATION_LEVEL = "-Owholemodule";
SWIFT_VERSION = 2.3;
};
name = Release;
};
/* End XCBuildConfiguration section */
/* Begin XCConfigurationList section */
004967C71AE974A600B10C98 /* Build configuration list for PBXProject "AVFoundationExporter" */ = {
isa = XCConfigurationList;
buildConfigurations = (
004967D11AE974A600B10C98 /* Debug */,
004967D21AE974A600B10C98 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
004967D31AE974A600B10C98 /* Build configuration list for PBXNativeTarget "AVFoundationExporter" */ = {
isa = XCConfigurationList;
buildConfigurations = (
004967D41AE974A600B10C98 /* Debug */,
004967D51AE974A600B10C98 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
/* End XCConfigurationList section */
};
rootObject = 004967C41AE974A600B10C98 /* Project object */;
}

View File

@ -0,0 +1,246 @@
/*
Copyright (C) 2016 Apple Inc. All Rights Reserved.
See LICENSE.txt for this samples licensing information
Abstract:
Parses command-line arguments and invokes the appropriate command
*/
import CoreMedia
import AVFoundation
// Use enums to enforce uniqueness of option labels.
enum LongLabel: String {
case FileType = "filetype"
case PresetName = "preset"
case DeleteExistingFile = "replace"
case LogEverything = "verbose"
case TrimStartTime = "trim-start-time"
case TrimEndTime = "trim-end-time"
case FilterMetadata = "filter-metadata"
case InjectMetadata = "inject-metadata"
}
enum ShortLabel: String {
case FileType = "f"
case PresetName = "p"
case DeleteExistingFile = "r"
case LogEverything = "v"
}
let executableName = NSString(string: Process.arguments.first!).pathComponents.last!
func usage() {
print("Usage:")
print("\t\(executableName) <source path> <dest path> [options]")
print("\t\(executableName) list-presets [<source path>]")
print("") // newline
print("In the first form, \(executableName) performs an export of the file at <source path>, writing the result to a file at <dest path>. If no options are given, a passthrough export to a QuickTime Movie file is performed.")
print("")
print("In the second form, \(executableName) lists the available parameters to the -preset option. If <source path> is specified, only the presets compatible with the file at <source path> will be listed.")
print("")
print("Options for first form:")
print("\t-f, -filetype <UTI>")
print("\t\tThe file type (e.g. com.apple.m4v-video) for the output file")
print("")
print("\t-p, -preset <preset>")
print("\t\tThe preset name; use commmand list-presets to see available preset names")
print("")
print("\t-r, -replace YES")
print("\t\tIf there is a pre-existing file at the destination location, remove it before exporting")
print("")
print("\t-v, -verbose YES")
print("\t\tPrint more information about the execution")
print("")
print("\t-trim-start-time <seconds>")
print("\t\tWhen specified, all media before the start time will be trimmed out")
print("")
print("\t-trim-end-time <seconds>")
print("\t\tWhen specified, all media after the end time will be trimmed out")
print("")
print("\t-filter-metadata YES")
print("\t\tFilter out privacy-sensitive metadata")
print("")
print("\t-inject-metadata YES")
print("\t\tAdd simple metadata during export")
}
// Errors that can occur during argument parsing.
enum CommandLineError: ErrorType, CustomStringConvertible {
case TooManyArguments
case TooFewArguments(descriptionOfRequiredArguments: String)
case InvalidArgument(reason: String)
var description: String {
switch self {
case .TooManyArguments:
return "Too many arguments"
case .TooFewArguments(let descriptionOfRequiredArguments):
return "Missing argument(s). Must specify \(descriptionOfRequiredArguments)."
case .InvalidArgument(let reason):
return "Invalid argument. \(reason)."
}
}
}
/// A set of convenience methods to use with our specific command line arguments.
extension NSUserDefaults {
func stringForLongLabel(longLabel: LongLabel) -> String? {
return stringForKey(longLabel.rawValue)
}
func stringForShortLabel(shortLabel: ShortLabel) -> String? {
return stringForKey(shortLabel.rawValue)
}
func boolForLongLabel(longLabel: LongLabel) -> Bool {
return boolForKey(longLabel.rawValue)
}
func boolForShortLabel(shortLabel: ShortLabel) -> Bool {
return boolForKey(shortLabel.rawValue)
}
func timeForLongLabel(longLabel: LongLabel) throws -> CMTime? {
if let timeAsString = stringForLongLabel(longLabel) {
guard let timeAsSeconds = Float64(timeAsString) else {
throw CommandLineError.InvalidArgument(reason: "Non-numeric time \"\(timeAsString)\".")
}
return CMTimeMakeWithSeconds(timeAsSeconds, 600)
}
return nil
}
func timeForShortLabel(shortLabel: ShortLabel) throws -> CMTime? {
if let timeAsString = stringForShortLabel(shortLabel) {
guard let timeAsSeconds = Float64(timeAsString) else {
throw CommandLineError.InvalidArgument(reason: "Non-numeric time \"\(timeAsString)\".")
}
return CMTimeMakeWithSeconds(timeAsSeconds, 600)
}
return nil
}
}
// Lists all presets, or the presets compatible with the file at the given path
func listPresets(sourcePath: String? = nil) {
let presets: [String]
switch sourcePath {
case let sourcePath?:
print("Presets compatible with \(sourcePath):.")
let sourceURL = NSURL(fileURLWithPath: sourcePath)
let asset = AVAsset(URL: sourceURL)
presets = AVAssetExportSession.exportPresetsCompatibleWithAsset(asset)
case nil:
print("Available presets:")
presets = AVAssetExportSession.allExportPresets()
}
let presetsDescription = presets.joinWithSeparator("\n\t")
print("\t\(presetsDescription)")
}
/// The main function that handles all of the command line argument parsing.
func actOnCommandLineArguments() {
let arguments = Process.arguments
let firstArgumentAfterExecutablePath: String? = (arguments.count >= 2) ? arguments[1] : nil
if arguments.contains("-help") || arguments.contains("-h") {
usage()
exit(0)
}
do {
switch firstArgumentAfterExecutablePath {
case nil, "help"?:
usage()
exit(0)
case "list-presets"?:
if arguments.count == 3 {
listPresets(arguments[2])
}
else if arguments.count > 3 {
throw CommandLineError.TooManyArguments
}
else {
listPresets()
}
default:
guard arguments.count >= 3 else {
throw CommandLineError.TooFewArguments(descriptionOfRequiredArguments: "source and dest paths")
}
let sourceURL = NSURL(fileURLWithPath: arguments[1])
let destinationURL = NSURL(fileURLWithPath: arguments[2])
var exporter = Exporter(sourceURL: sourceURL, destinationURL: destinationURL)
let options = NSUserDefaults.standardUserDefaults()
if let fileType = options.stringForLongLabel(.FileType) ?? options.stringForShortLabel(.FileType) {
exporter.destinationFileType = fileType
}
if let presetName = options.stringForLongLabel(.PresetName) ?? options.stringForShortLabel(.PresetName) {
exporter.presetName = presetName
}
exporter.deleteExistingFile = options.boolForLongLabel(.DeleteExistingFile) || options.boolForShortLabel(.DeleteExistingFile)
exporter.isVerbose = options.boolForLongLabel(.LogEverything) || options.boolForShortLabel(.LogEverything)
let trimStartTime = try options.timeForLongLabel(.TrimStartTime)
let trimEndTime = try options.timeForLongLabel(.TrimEndTime)
switch (trimStartTime, trimEndTime) {
case (nil, nil):
exporter.timeRange = nil
case (let realStartTime?, nil):
exporter.timeRange = CMTimeRange(start: realStartTime, duration: kCMTimePositiveInfinity)
case (nil, let realEndTime?):
exporter.timeRange = CMTimeRangeFromTimeToTime(kCMTimeZero, realEndTime)
case (let realStartTime?, let realEndTime?):
exporter.timeRange = CMTimeRangeFromTimeToTime(realStartTime, realEndTime)
}
exporter.filterMetadata = options.boolForLongLabel(.FilterMetadata)
exporter.injectMetadata = options.boolForLongLabel(.InjectMetadata)
try exporter.export()
}
}
catch let error as CommandLineError {
print("error parsing arguments: \(error).")
print("") // newline
usage()
exit(1)
}
catch let error as NSError {
let highLevelFailure = error.localizedDescription
var errorOutput = highLevelFailure
if let detailedFailure = error.localizedRecoverySuggestion ?? error.localizedFailureReason {
errorOutput += ": \(detailedFailure)"
}
print("error: \(errorOutput).")
exit(1)
}
}

View File

@ -0,0 +1,228 @@
/*
Copyright (C) 2016 Apple Inc. All Rights Reserved.
See LICENSE.txt for this samples licensing information
Abstract:
Demonstrates how to use AVAssetExportSession to export and transcode media files
*/
import AVFoundation
/*
Perform all of the argument parsing / set up. The interesting AV exporting
code is done in the `Exporter` type.
*/
actOnCommandLineArguments()
/// The type that performs all of the asset exporting.
struct Exporter {
// MARK: Properties
let sourceURL: NSURL
let destinationURL: NSURL
var destinationFileType = AVFileTypeQuickTimeMovie
var presetName = AVAssetExportPresetPassthrough
var timeRange: CMTimeRange?
var filterMetadata = false
var injectMetadata = false
var deleteExistingFile = false
var isVerbose = false
// MARK: Initialization
init(sourceURL: NSURL, destinationURL: NSURL) {
self.sourceURL = sourceURL
self.destinationURL = destinationURL
}
func export() throws {
let asset = AVURLAsset(URL: sourceURL)
printVerbose("Exporting \"\(sourceURL)\" to \"\(destinationURL)\" (file type \(destinationFileType)), using preset \(presetName).")
// Set up export session.
let exportSession = try setUpExportSession(asset, destinationURL: destinationURL)
// AVAssetExportSession will not overwrite existing files.
try deleteExistingFile(destinationURL)
describeSourceFile(asset)
// Kick off asynchronous export operation.
let group = dispatch_group_create()
dispatch_group_enter(group)
exportSession.exportAsynchronouslyWithCompletionHandler {
dispatch_group_leave(group)
}
waitForExportToFinish(exportSession, group: group)
if exportSession.status == .Failed {
// `error` is non-nil when in the "failed" status.
throw exportSession.error!
}
else {
describeDestFile(destinationURL)
}
printVerbose("Export completed successfully.")
}
func setUpExportSession(asset: AVAsset, destinationURL: NSURL) throws -> AVAssetExportSession {
guard let exportSession = AVAssetExportSession(asset: asset, presetName: presetName) else {
throw CommandLineError.InvalidArgument(reason: "Invalid preset \(presetName).")
}
// Set required properties.
exportSession.outputURL = destinationURL
exportSession.outputFileType = destinationFileType
if let timeRange = timeRange {
exportSession.timeRange = timeRange
printVerbose("Trimming to time range \(CMTimeRangeCopyDescription(nil, timeRange)!).")
}
if filterMetadata {
printVerbose("Filtering metadata.")
exportSession.metadataItemFilter = AVMetadataItemFilter.metadataItemFilterForSharing()
}
if injectMetadata {
printVerbose("Injecting metadata")
let now = NSDate()
let currentDate = NSDateFormatter.localizedStringFromDate(now, dateStyle: .MediumStyle, timeStyle: .ShortStyle)
let userDataCommentItem = AVMutableMetadataItem()
userDataCommentItem.identifier = AVMetadataIdentifierQuickTimeUserDataComment
userDataCommentItem.value = "QuickTime userdata: Exported to preset \(presetName) using AVFoundationExporter at: \(currentDate)."
let metadataCommentItem = AVMutableMetadataItem()
metadataCommentItem.identifier = AVMetadataIdentifierQuickTimeMetadataComment
metadataCommentItem.value = "QuickTime metadata: Exported to preset \(presetName) using AVFoundationExporter at: \(currentDate)."
let iTunesCommentItem = AVMutableMetadataItem()
iTunesCommentItem.identifier = AVMetadataIdentifieriTunesMetadataUserComment
iTunesCommentItem.value = "iTunes metadata: Exported to preset \(presetName) using AVFoundationExporter at: \(currentDate)."
/*
To avoid replacing metadata from the asset:
1. Fetch existing metadata from the asset.
2. Combine it with the new metadata.
3. Set the result on the export session.
*/
exportSession.metadata = asset.metadata + [
userDataCommentItem,
metadataCommentItem,
iTunesCommentItem
]
}
return exportSession
}
func deleteExistingFile(destinationURL: NSURL) throws {
let fileManager = NSFileManager()
if let destinationPath = destinationURL.path {
if deleteExistingFile && fileManager.fileExistsAtPath(destinationPath) {
printVerbose("Removing pre-existing file at destination path \"\(destinationPath)\".")
try fileManager.removeItemAtURL(destinationURL)
}
}
}
func describeSourceFile(asset: AVAsset) {
guard isVerbose else { return }
printVerbose("Tracks in source file:")
let trackDescriptions = trackDescriptionsForAsset(asset)
let tracksDescription = trackDescriptions.joinWithSeparator("\n\t")
printVerbose("\t\(tracksDescription)")
printVerbose("Metadata in source file:")
let metadataDescriptions = metadataDescriptionsForAsset(asset)
let metadataDescription = metadataDescriptions.joinWithSeparator("\n\t")
printVerbose("\t\(metadataDescription)")
}
// Periodically polls & prints export session progress while waiting for the export to finish.
func waitForExportToFinish(exportSession: AVAssetExportSession, group: dispatch_group_t) {
while exportSession.status == .Waiting || exportSession.status == .Exporting {
printVerbose("Progress: \(exportSession.progress * 100.0)%.")
dispatch_group_wait(group, dispatch_time(DISPATCH_TIME_NOW, Int64(500 * NSEC_PER_MSEC)))
}
printVerbose("Progress: \(exportSession.progress * 100.0)%.")
}
func describeDestFile(destinationURL: NSURL) {
guard isVerbose else { return }
let destinationAsset = AVAsset(URL:destinationURL)
printVerbose("Tracks in written file:")
let trackDescriptions = trackDescriptionsForAsset(destinationAsset)
let tracksDescription = trackDescriptions.joinWithSeparator("\n\t")
printVerbose("\t\(tracksDescription)")
printVerbose("Metadata in written file:")
let metadataDescriptions = metadataDescriptionsForAsset(destinationAsset)
let metadataDescription = metadataDescriptions.joinWithSeparator("\n\t")
printVerbose("\t\(metadataDescription)")
}
func trackDescriptionsForAsset(asset: AVAsset) -> [String] {
return asset.tracks.map { track in
let enabledString = track.enabled ? "YES" : "NO"
let selfContainedString = track.selfContained ? "YES" : "NO"
let formatDescriptions = track.formatDescriptions as! [CMFormatDescriptionRef]
let formatStrings = formatDescriptions.map { formatDescription -> String in
let mediaSubType = CMFormatDescriptionGetMediaSubType(formatDescription)
let mediaSubTypeString = NSFileTypeForHFSTypeCode(mediaSubType)
return "'\(track.mediaType)'/\(mediaSubTypeString)"
}
let formatString = !formatStrings.isEmpty ? formatStrings.joinWithSeparator(", ") : "'\(track.mediaType)'"
return "Track ID \(track.trackID): \(formatString), data length: \(track.totalSampleDataLength), enabled: \(enabledString), self-contained: \(selfContainedString)"
}
}
func metadataDescriptionsForAsset(asset: AVAsset) -> [String] {
return asset.metadata.map { item in
let identifier = item.identifier ?? "<no identifier>"
let value = item.value?.description ?? "<no value>"
return "metadata item \(identifier): \(value)"
}
}
func printVerbose(string: String) {
if isVerbose {
print(string)
}
}
}

Binary file not shown.

Binary file not shown.

Binary file not shown.

After

Width:  |  Height:  |  Size: 156 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 15 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 172 KiB

View File

@ -0,0 +1,22 @@
[
{
"title" : "Video File",
"mediaResourceName" : "ElephantSeals.mov",
"thumbnailResourceName" : "LocalVideoThumb.png"
},
{
"title" : "Audio File",
"mediaResourceName" : "Drums.m4a",
"thumbnailResourceName" : "LocalAudioThumb.png"
},
{
"title" : "HTTP Live Stream",
"mediaURL" : "https://devimages.apple.com.edgekey.net/samplecode/avfoundationMedia/AVFoundationQueuePlayer_HLS2/master.m3u8",
"thumbnailResourceName" : "HLSThumb.png"
},
{
"title" : "Progressive Download",
"mediaURL" : "https://devimages.apple.com.edgekey.net/samplecode/avfoundationMedia/AVFoundationQueuePlayer_Progressive.mov",
"thumbnailResourceName" : "ProgressiveThumb.png"
}
]

Binary file not shown.

After

Width:  |  Height:  |  Size: 168 KiB

View File

@ -0,0 +1,42 @@
Sample code project: AVFoundationQueuePlayer-iOS: Using a Mixture of Local File Based Assets and HTTP Live Streaming Assets with AVFoundation
Version: 2.0
IMPORTANT: This Apple software is supplied to you by Apple
Inc. ("Apple") in consideration of your agreement to the following
terms, and your use, installation, modification or redistribution of
this Apple software constitutes acceptance of these terms. If you do
not agree with these terms, please do not use, install, modify or
redistribute this Apple software.
In consideration of your agreement to abide by the following terms, and
subject to these terms, Apple grants you a personal, non-exclusive
license, under Apple's copyrights in this original Apple software (the
"Apple Software"), to use, reproduce, modify and redistribute the Apple
Software, with or without modifications, in source and/or binary forms;
provided that if you redistribute the Apple Software in its entirety and
without modifications, you must retain this notice and the following
text and disclaimers in all such redistributions of the Apple Software.
Neither the name, trademarks, service marks or logos of Apple Inc. may
be used to endorse or promote products derived from the Apple Software
without specific prior written permission from Apple. Except as
expressly stated in this notice, no other rights or licenses, express or
implied, are granted by Apple herein, including but not limited to any
patent rights that may be infringed by your derivative works or by other
works in which the Apple Software may be incorporated.
The Apple Software is provided by Apple on an "AS IS" basis. APPLE
MAKES NO WARRANTIES, EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION
THE IMPLIED WARRANTIES OF NON-INFRINGEMENT, MERCHANTABILITY AND FITNESS
FOR A PARTICULAR PURPOSE, REGARDING THE APPLE SOFTWARE OR ITS USE AND
OPERATION ALONE OR IN COMBINATION WITH YOUR PRODUCTS.
IN NO EVENT SHALL APPLE BE LIABLE FOR ANY SPECIAL, INDIRECT, INCIDENTAL
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) ARISING IN ANY WAY OUT OF THE USE, REPRODUCTION,
MODIFICATION AND/OR DISTRIBUTION OF THE APPLE SOFTWARE, HOWEVER CAUSED
AND WHETHER UNDER THEORY OF CONTRACT, TORT (INCLUDING NEGLIGENCE),
STRICT LIABILITY OR OTHERWISE, EVEN IF APPLE HAS BEEN ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
Copyright (C) 2016 Apple Inc. All Rights Reserved.

View File

@ -0,0 +1,383 @@
// !$*UTF8*$!
{
archiveVersion = 1;
classes = {
};
objectVersion = 46;
objects = {
/* Begin PBXBuildFile section */
D2385B461AF5181400DC8ADE /* AAPLAppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = D2385B451AF5181400DC8ADE /* AAPLAppDelegate.m */; };
D2385B4B1AF5181400DC8ADE /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = D2385B491AF5181400DC8ADE /* Main.storyboard */; };
D2385B4D1AF5181400DC8ADE /* Images.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = D2385B4C1AF5181400DC8ADE /* Images.xcassets */; };
D265BBB71B1792720005C539 /* AAPLPlayerViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = D265BBB61B1792720005C539 /* AAPLPlayerViewController.m */; };
D274898E1B1CC58A0020D82A /* AAPLPlayerView.m in Sources */ = {isa = PBXBuildFile; fileRef = D274898B1B1CC58A0020D82A /* AAPLPlayerView.m */; };
D274898F1B1CC58A0020D82A /* AAPLQueuedItemCollectionViewCell.m in Sources */ = {isa = PBXBuildFile; fileRef = D274898D1B1CC58A0020D82A /* AAPLQueuedItemCollectionViewCell.m */; };
D27489941B1CC6E00020D82A /* Localizable.strings in Resources */ = {isa = PBXBuildFile; fileRef = D27489901B1CC6E00020D82A /* Localizable.strings */; };
D27489951B1CC6E00020D82A /* Localizable.stringsdict in Resources */ = {isa = PBXBuildFile; fileRef = D27489921B1CC6E00020D82A /* Localizable.stringsdict */; };
D27755701B0D0A4100C9D649 /* MediaManifest.json in Resources */ = {isa = PBXBuildFile; fileRef = D277556F1B0D0A4100C9D649 /* MediaManifest.json */; };
D27755771B0D0A5400C9D649 /* HLSThumb.png in Resources */ = {isa = PBXBuildFile; fileRef = D27755731B0D0A5400C9D649 /* HLSThumb.png */; };
D27755781B0D0A5400C9D649 /* LocalAudioThumb.png in Resources */ = {isa = PBXBuildFile; fileRef = D27755741B0D0A5400C9D649 /* LocalAudioThumb.png */; };
D27755791B0D0A5400C9D649 /* LocalVideoThumb.png in Resources */ = {isa = PBXBuildFile; fileRef = D27755751B0D0A5400C9D649 /* LocalVideoThumb.png */; };
D277557A1B0D0A5400C9D649 /* ProgressiveThumb.png in Resources */ = {isa = PBXBuildFile; fileRef = D27755761B0D0A5400C9D649 /* ProgressiveThumb.png */; };
D2C148E71B0FA816004F41DA /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = D2C148E61B0FA816004F41DA /* main.m */; };
D2C148EC1B0FAD79004F41DA /* Drums.m4a in Resources */ = {isa = PBXBuildFile; fileRef = D2C148EB1B0FAD79004F41DA /* Drums.m4a */; };
D2C148EE1B0FAD83004F41DA /* ElephantSeals.mov in Resources */ = {isa = PBXBuildFile; fileRef = D2C148ED1B0FAD83004F41DA /* ElephantSeals.mov */; };
/* End PBXBuildFile section */
/* Begin PBXFileReference section */
D2385B421AF5181400DC8ADE /* AVFoundationQueuePlayer-ObjC.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = "AVFoundationQueuePlayer-ObjC.app"; sourceTree = BUILT_PRODUCTS_DIR; };
D2385B451AF5181400DC8ADE /* AAPLAppDelegate.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AAPLAppDelegate.m; sourceTree = "<group>"; };
D2385B4A1AF5181400DC8ADE /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = "<group>"; };
D2385B4C1AF5181400DC8ADE /* Images.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Images.xcassets; sourceTree = "<group>"; };
D2385B511AF5181400DC8ADE /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = "<group>"; };
D265BBB61B1792720005C539 /* AAPLPlayerViewController.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = AAPLPlayerViewController.m; sourceTree = "<group>"; };
D27328DB1B1E22FD004EE77D /* README.md */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = net.daringfireball.markdown; name = README.md; path = ../README.md; sourceTree = "<group>"; };
D274898A1B1CC58A0020D82A /* AAPLPlayerView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AAPLPlayerView.h; sourceTree = "<group>"; };
D274898B1B1CC58A0020D82A /* AAPLPlayerView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = AAPLPlayerView.m; sourceTree = "<group>"; };
D274898C1B1CC58A0020D82A /* AAPLQueuedItemCollectionViewCell.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AAPLQueuedItemCollectionViewCell.h; sourceTree = "<group>"; };
D274898D1B1CC58A0020D82A /* AAPLQueuedItemCollectionViewCell.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = AAPLQueuedItemCollectionViewCell.m; sourceTree = "<group>"; };
D27489911B1CC6E00020D82A /* en */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = en; path = en.lproj/Localizable.strings; sourceTree = "<group>"; };
D27489931B1CC6E00020D82A /* en */ = {isa = PBXFileReference; lastKnownFileType = text.xml; name = en; path = en.lproj/Localizable.stringsdict; sourceTree = "<group>"; };
D277556F1B0D0A4100C9D649 /* MediaManifest.json */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.json; name = MediaManifest.json; path = ../Common/MediaManifest.json; sourceTree = SOURCE_ROOT; };
D27755731B0D0A5400C9D649 /* HLSThumb.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; name = HLSThumb.png; path = ../Common/HLSThumb.png; sourceTree = SOURCE_ROOT; };
D27755741B0D0A5400C9D649 /* LocalAudioThumb.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; name = LocalAudioThumb.png; path = ../Common/LocalAudioThumb.png; sourceTree = SOURCE_ROOT; };
D27755751B0D0A5400C9D649 /* LocalVideoThumb.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; name = LocalVideoThumb.png; path = ../Common/LocalVideoThumb.png; sourceTree = SOURCE_ROOT; };
D27755761B0D0A5400C9D649 /* ProgressiveThumb.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; name = ProgressiveThumb.png; path = ../Common/ProgressiveThumb.png; sourceTree = SOURCE_ROOT; };
D2C148E61B0FA816004F41DA /* main.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = main.m; sourceTree = "<group>"; };
D2C148E91B0FAAE1004F41DA /* AAPLAppDelegate.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AAPLAppDelegate.h; sourceTree = "<group>"; };
D2C148EA1B0FAAF2004F41DA /* AAPLPlayerViewController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AAPLPlayerViewController.h; sourceTree = "<group>"; };
D2C148EB1B0FAD79004F41DA /* Drums.m4a */ = {isa = PBXFileReference; lastKnownFileType = file; name = Drums.m4a; path = ../Common/Drums.m4a; sourceTree = SOURCE_ROOT; };
D2C148ED1B0FAD83004F41DA /* ElephantSeals.mov */ = {isa = PBXFileReference; lastKnownFileType = video.quicktime; name = ElephantSeals.mov; path = ../Common/ElephantSeals.mov; sourceTree = SOURCE_ROOT; };
/* End PBXFileReference section */
/* Begin PBXFrameworksBuildPhase section */
D2385B3F1AF5181400DC8ADE /* Frameworks */ = {
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXFrameworksBuildPhase section */
/* Begin PBXGroup section */
D2385B391AF5181400DC8ADE = {
isa = PBXGroup;
children = (
D27328DB1B1E22FD004EE77D /* README.md */,
D2385B441AF5181400DC8ADE /* AVFoundationQueuePlayer-iOS */,
D2385B431AF5181400DC8ADE /* Products */,
);
sourceTree = "<group>";
};
D2385B431AF5181400DC8ADE /* Products */ = {
isa = PBXGroup;
children = (
D2385B421AF5181400DC8ADE /* AVFoundationQueuePlayer-ObjC.app */,
);
name = Products;
sourceTree = "<group>";
};
D2385B441AF5181400DC8ADE /* AVFoundationQueuePlayer-iOS */ = {
isa = PBXGroup;
children = (
D2C148E91B0FAAE1004F41DA /* AAPLAppDelegate.h */,
D2385B451AF5181400DC8ADE /* AAPLAppDelegate.m */,
D2C148EA1B0FAAF2004F41DA /* AAPLPlayerViewController.h */,
D265BBB61B1792720005C539 /* AAPLPlayerViewController.m */,
D274898A1B1CC58A0020D82A /* AAPLPlayerView.h */,
D274898B1B1CC58A0020D82A /* AAPLPlayerView.m */,
D274898C1B1CC58A0020D82A /* AAPLQueuedItemCollectionViewCell.h */,
D274898D1B1CC58A0020D82A /* AAPLQueuedItemCollectionViewCell.m */,
D2385B491AF5181400DC8ADE /* Main.storyboard */,
D2385B4C1AF5181400DC8ADE /* Images.xcassets */,
D2385B511AF5181400DC8ADE /* Info.plist */,
D27489901B1CC6E00020D82A /* Localizable.strings */,
D27489921B1CC6E00020D82A /* Localizable.stringsdict */,
D2C148E81B0FA81E004F41DA /* Supporting Files */,
D28778461B011B1900E31BDD /* Common */,
);
path = "AVFoundationQueuePlayer-iOS";
sourceTree = "<group>";
};
D28778461B011B1900E31BDD /* Common */ = {
isa = PBXGroup;
children = (
D2C148ED1B0FAD83004F41DA /* ElephantSeals.mov */,
D2C148EB1B0FAD79004F41DA /* Drums.m4a */,
D277556F1B0D0A4100C9D649 /* MediaManifest.json */,
D28778471B011B2800E31BDD /* thumbnails */,
);
name = Common;
sourceTree = "<group>";
};
D28778471B011B2800E31BDD /* thumbnails */ = {
isa = PBXGroup;
children = (
D27755731B0D0A5400C9D649 /* HLSThumb.png */,
D27755741B0D0A5400C9D649 /* LocalAudioThumb.png */,
D27755751B0D0A5400C9D649 /* LocalVideoThumb.png */,
D27755761B0D0A5400C9D649 /* ProgressiveThumb.png */,
);
name = thumbnails;
sourceTree = "<group>";
};
D2C148E81B0FA81E004F41DA /* Supporting Files */ = {
isa = PBXGroup;
children = (
D2C148E61B0FA816004F41DA /* main.m */,
);
name = "Supporting Files";
sourceTree = "<group>";
};
/* End PBXGroup section */
/* Begin PBXNativeTarget section */
D2385B411AF5181400DC8ADE /* AVFoundationQueuePlayer-iOS */ = {
isa = PBXNativeTarget;
buildConfigurationList = D2385B5F1AF5181400DC8ADE /* Build configuration list for PBXNativeTarget "AVFoundationQueuePlayer-iOS" */;
buildPhases = (
D2385B3E1AF5181400DC8ADE /* Sources */,
D2385B3F1AF5181400DC8ADE /* Frameworks */,
D2385B401AF5181400DC8ADE /* Resources */,
);
buildRules = (
);
dependencies = (
);
name = "AVFoundationQueuePlayer-iOS";
productName = "AVFoundationQueuePlayer-iOS";
productReference = D2385B421AF5181400DC8ADE /* AVFoundationQueuePlayer-ObjC.app */;
productType = "com.apple.product-type.application";
};
/* End PBXNativeTarget section */
/* Begin PBXProject section */
D2385B3A1AF5181400DC8ADE /* Project object */ = {
isa = PBXProject;
attributes = {
LastUpgradeCheck = 0700;
ORGANIZATIONNAME = "Apple Inc.";
TargetAttributes = {
D2385B411AF5181400DC8ADE = {
CreatedOnToolsVersion = 7.0;
};
};
};
buildConfigurationList = D2385B3D1AF5181400DC8ADE /* Build configuration list for PBXProject "AVFoundationQueuePlayer-iOS" */;
compatibilityVersion = "Xcode 3.2";
developmentRegion = English;
hasScannedForEncodings = 0;
knownRegions = (
en,
Base,
);
mainGroup = D2385B391AF5181400DC8ADE;
productRefGroup = D2385B431AF5181400DC8ADE /* Products */;
projectDirPath = "";
projectRoot = "";
targets = (
D2385B411AF5181400DC8ADE /* AVFoundationQueuePlayer-iOS */,
);
};
/* End PBXProject section */
/* Begin PBXResourcesBuildPhase section */
D2385B401AF5181400DC8ADE /* Resources */ = {
isa = PBXResourcesBuildPhase;
buildActionMask = 2147483647;
files = (
D2C148EC1B0FAD79004F41DA /* Drums.m4a in Resources */,
D2C148EE1B0FAD83004F41DA /* ElephantSeals.mov in Resources */,
D27489941B1CC6E00020D82A /* Localizable.strings in Resources */,
D277557A1B0D0A5400C9D649 /* ProgressiveThumb.png in Resources */,
D2385B4B1AF5181400DC8ADE /* Main.storyboard in Resources */,
D27489951B1CC6E00020D82A /* Localizable.stringsdict in Resources */,
D27755771B0D0A5400C9D649 /* HLSThumb.png in Resources */,
D27755701B0D0A4100C9D649 /* MediaManifest.json in Resources */,
D27755791B0D0A5400C9D649 /* LocalVideoThumb.png in Resources */,
D27755781B0D0A5400C9D649 /* LocalAudioThumb.png in Resources */,
D2385B4D1AF5181400DC8ADE /* Images.xcassets in Resources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXResourcesBuildPhase section */
/* Begin PBXSourcesBuildPhase section */
D2385B3E1AF5181400DC8ADE /* Sources */ = {
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
D274898F1B1CC58A0020D82A /* AAPLQueuedItemCollectionViewCell.m in Sources */,
D2385B461AF5181400DC8ADE /* AAPLAppDelegate.m in Sources */,
D274898E1B1CC58A0020D82A /* AAPLPlayerView.m in Sources */,
D265BBB71B1792720005C539 /* AAPLPlayerViewController.m in Sources */,
D2C148E71B0FA816004F41DA /* main.m in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXSourcesBuildPhase section */
/* Begin PBXVariantGroup section */
D2385B491AF5181400DC8ADE /* Main.storyboard */ = {
isa = PBXVariantGroup;
children = (
D2385B4A1AF5181400DC8ADE /* Base */,
);
name = Main.storyboard;
sourceTree = "<group>";
};
D27489901B1CC6E00020D82A /* Localizable.strings */ = {
isa = PBXVariantGroup;
children = (
D27489911B1CC6E00020D82A /* en */,
);
name = Localizable.strings;
sourceTree = "<group>";
};
D27489921B1CC6E00020D82A /* Localizable.stringsdict */ = {
isa = PBXVariantGroup;
children = (
D27489931B1CC6E00020D82A /* en */,
);
name = Localizable.stringsdict;
sourceTree = "<group>";
};
/* End PBXVariantGroup section */
/* Begin XCBuildConfiguration section */
D2385B5D1AF5181400DC8ADE /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
CODE_SIGN_IDENTITY = "iPhone Developer";
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
ENABLE_STRICT_OBJC_MSGSEND = YES;
GCC_C_LANGUAGE_STANDARD = gnu99;
GCC_DYNAMIC_NO_PIC = NO;
GCC_NO_COMMON_BLOCKS = YES;
GCC_OPTIMIZATION_LEVEL = 0;
GCC_PREPROCESSOR_DEFINITIONS = (
"DEBUG=1",
"$(inherited)",
);
GCC_SYMBOLS_PRIVATE_EXTERN = NO;
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 9.0;
MTL_ENABLE_DEBUG_INFO = YES;
ONLY_ACTIVE_ARCH = YES;
SDKROOT = iphoneos;
SWIFT_OPTIMIZATION_LEVEL = "-Onone";
TARGETED_DEVICE_FAMILY = "1,2";
};
name = Debug;
};
D2385B5E1AF5181400DC8ADE /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
CODE_SIGN_IDENTITY = "iPhone Developer";
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
ENABLE_NS_ASSERTIONS = NO;
ENABLE_STRICT_OBJC_MSGSEND = YES;
GCC_C_LANGUAGE_STANDARD = gnu99;
GCC_NO_COMMON_BLOCKS = YES;
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 9.0;
MTL_ENABLE_DEBUG_INFO = NO;
SDKROOT = iphoneos;
TARGETED_DEVICE_FAMILY = "1,2";
VALIDATE_PRODUCT = YES;
};
name = Release;
};
D2385B601AF5181400DC8ADE /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
CODE_SIGN_IDENTITY = "iPhone Developer";
INFOPLIST_FILE = "AVFoundationQueuePlayer-iOS/Info.plist";
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
PRODUCT_NAME = "AVFoundationQueuePlayer-ObjC";
PROVISIONING_PROFILE = "";
};
name = Debug;
};
D2385B611AF5181400DC8ADE /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
CODE_SIGN_IDENTITY = "iPhone Developer";
INFOPLIST_FILE = "AVFoundationQueuePlayer-iOS/Info.plist";
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
PRODUCT_NAME = "AVFoundationQueuePlayer-ObjC";
PROVISIONING_PROFILE = "";
};
name = Release;
};
/* End XCBuildConfiguration section */
/* Begin XCConfigurationList section */
D2385B3D1AF5181400DC8ADE /* Build configuration list for PBXProject "AVFoundationQueuePlayer-iOS" */ = {
isa = XCConfigurationList;
buildConfigurations = (
D2385B5D1AF5181400DC8ADE /* Debug */,
D2385B5E1AF5181400DC8ADE /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
D2385B5F1AF5181400DC8ADE /* Build configuration list for PBXNativeTarget "AVFoundationQueuePlayer-iOS" */ = {
isa = XCConfigurationList;
buildConfigurations = (
D2385B601AF5181400DC8ADE /* Debug */,
D2385B611AF5181400DC8ADE /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
/* End XCConfigurationList section */
};
rootObject = D2385B3A1AF5181400DC8ADE /* Project object */;
}

View File

@ -0,0 +1,16 @@
/*
Copyright (C) 2016 Apple Inc. All Rights Reserved.
See LICENSE.txt for this samples licensing information
Abstract:
Application delegate.
*/
@import UIKit;
@interface AAPLAppDelegate : UIResponder <UIApplicationDelegate>
@property (strong, nonatomic) UIWindow *window;
@end

View File

@ -0,0 +1,12 @@
/*
Copyright (C) 2016 Apple Inc. All Rights Reserved.
See LICENSE.txt for this samples licensing information
Abstract:
Application delegate.
*/
#import "AAPLAppDelegate.h"
@implementation AAPLAppDelegate
@end

View File

@ -0,0 +1,16 @@
/*
Copyright (C) 2016 Apple Inc. All Rights Reserved.
See LICENSE.txt for this samples licensing information
Abstract:
View containing an AVPlayerLayer.
*/
@import UIKit;
@class AVPlayer;
@interface AAPLPlayerView : UIView
@property AVPlayer *player;
@property (readonly) AVPlayerLayer *playerLayer;
@end

View File

@ -0,0 +1,34 @@
/*
Copyright (C) 2016 Apple Inc. All Rights Reserved.
See LICENSE.txt for this samples licensing information
Abstract:
View containing an AVPlayerLayer.
*/
@import Foundation;
@import AVFoundation;
#import "AAPLPlayerView.h"
@implementation AAPLPlayerView
- (AVPlayer *)player {
return self.playerLayer.player;
}
- (void)setPlayer:(AVPlayer *)player {
self.playerLayer.player = player;
}
// override UIView
+ (Class)layerClass {
return [AVPlayerLayer class];
}
- (AVPlayerLayer *)playerLayer {
return (AVPlayerLayer *)self.layer;
}
@end

View File

@ -0,0 +1,32 @@
/*
Copyright (C) 2016 Apple Inc. All Rights Reserved.
See LICENSE.txt for this samples licensing information
Abstract:
View controller containing a player view and basic playback controls.
*/
@import UIKit;
@class AAPLPlayerView;
@interface AAPLPlayerViewController : UIViewController
@property (readonly) AVQueuePlayer *player;
/*
@{
NSURL(asset URL) : @{
NSString(title) : NSString,
NSString(thumbnail) : UIImage
}
}
*/
@property NSMutableDictionary *loadedAssets;
@property CMTime currentTime;
@property (readonly) CMTime duration;
@property float rate;
@end

View File

@ -0,0 +1,491 @@
/*
Copyright (C) 2016 Apple Inc. All Rights Reserved.
See LICENSE.txt for this samples licensing information
Abstract:
View controller containing a player view and basic playback controls.
*/
@import Foundation;
@import AVFoundation;
@import CoreMedia.CMTime;
#import "AAPLPlayerViewController.h"
#import "AAPLPlayerView.h"
#import "AAPLQueuedItemCollectionViewCell.h"
// Private properties
@interface AAPLPlayerViewController ()
{
AVQueuePlayer *_player;
AVURLAsset *_asset;
/*
A token obtained from calling `player`'s `addPeriodicTimeObserverForInterval(_:queue:usingBlock:)`
method.
*/
id<NSObject> _timeObserverToken;
AVPlayerItem *_playerItem;
}
@property (readonly) AVPlayerLayer *playerLayer;
@property NSMutableDictionary *assetTitlesAndThumbnailsByURL;
// Formatter to provide formatted value for seconds displayed in `startTimeLabel` and `durationLabel`.
@property (readonly) NSDateComponentsFormatter *timeRemainingFormatter;
@property (weak) IBOutlet UISlider *timeSlider;
@property (weak) IBOutlet UILabel *startTimeLabel;
@property (weak) IBOutlet UILabel *durationLabel;
@property (weak) IBOutlet UIButton *rewindButton;
@property (weak) IBOutlet UIButton *playPauseButton;
@property (weak) IBOutlet UIButton *fastForwardButton;
@property (weak) IBOutlet UIButton *clearButton;
@property (weak) IBOutlet UICollectionView *collectionView;
@property (weak) IBOutlet UILabel *queueLabel;
@property (weak) IBOutlet AAPLPlayerView *playerView;
@end
@implementation AAPLPlayerViewController
// MARK: - View Controller
/*
KVO context used to differentiate KVO callbacks for this class versus other
classes in its class hierarchy.
*/
static int AAPLPlayerViewControllerKVOContext = 0;
- (void)viewWillAppear:(BOOL)animated {
[super viewWillAppear:animated];
/*
Update the UI when these player properties change.
Use the context parameter to distinguish KVO for our particular observers and not
those destined for a subclass that also happens to be observing these properties.
*/
[self addObserver:self forKeyPath:@"player.currentItem.duration" options:NSKeyValueObservingOptionNew | NSKeyValueObservingOptionInitial context:&AAPLPlayerViewControllerKVOContext];
[self addObserver:self forKeyPath:@"player.rate" options:NSKeyValueObservingOptionNew | NSKeyValueObservingOptionInitial context:&AAPLPlayerViewControllerKVOContext];
[self addObserver:self forKeyPath:@"player.currentItem.status" options:NSKeyValueObservingOptionNew | NSKeyValueObservingOptionInitial context:&AAPLPlayerViewControllerKVOContext];
[self addObserver:self forKeyPath:@"player.currentItem" options:NSKeyValueObservingOptionNew | NSKeyValueObservingOptionInitial context:&AAPLPlayerViewControllerKVOContext];
self.playerView.playerLayer.player = self.player;
/*
Read the list of assets we'll be using from a JSON file.
*/
[self asynchronouslyLoadURLAssetsWithManifestURL:[[NSBundle mainBundle] URLForResource:@"MediaManifest" withExtension:@"json"]];
// Use a weak self variable to avoid a retain cycle in the block.
AAPLPlayerViewController __weak *weakSelf = self;
_timeObserverToken = [self.player addPeriodicTimeObserverForInterval:CMTimeMake(1, 1) queue:dispatch_get_main_queue() usingBlock:^(CMTime time) {
double timeElapsed = CMTimeGetSeconds(time);
weakSelf.timeSlider.value = timeElapsed;
weakSelf.startTimeLabel.text = [weakSelf createTimeString: timeElapsed];
}];
}
- (void)viewDidDisappear:(BOOL)animated {
[super viewDidDisappear:animated];
if (_timeObserverToken) {
[self.player removeTimeObserver:_timeObserverToken];
_timeObserverToken = nil;
}
[self.player pause];
[self removeObserver:self forKeyPath:@"player.currentItem.duration" context:&AAPLPlayerViewControllerKVOContext];
[self removeObserver:self forKeyPath:@"player.rate" context:&AAPLPlayerViewControllerKVOContext];
[self removeObserver:self forKeyPath:@"player.currentItem.status" context:&AAPLPlayerViewControllerKVOContext];
[self removeObserver:self forKeyPath:@"player.currentItem" context:&AAPLPlayerViewControllerKVOContext];
}
// MARK: - Properties
// Will attempt load and test these asset keys before playing
+ (NSArray *)assetKeysRequiredToPlay {
return @[@"playable", @"hasProtectedContent"];
}
- (AVQueuePlayer *)player {
if (!_player) {
_player = [[AVQueuePlayer alloc] init];
}
return _player;
}
- (CMTime)currentTime {
return self.player.currentTime;
}
- (void)setCurrentTime:(CMTime)newCurrentTime {
[self.player seekToTime:newCurrentTime toleranceBefore:kCMTimeZero toleranceAfter:kCMTimeZero];
}
- (CMTime)duration {
return self.player.currentItem ? self.player.currentItem.duration : kCMTimeZero;
}
- (float)rate {
return self.player.rate;
}
- (void)setRate:(float)newRate {
self.player.rate = newRate;
}
- (AVPlayerLayer *)playerLayer {
return self.playerView.playerLayer;
}
- (NSDateComponentsFormatter *)timeRemainingFormatter {
NSDateComponentsFormatter *formatter = [[NSDateComponentsFormatter alloc] init];
formatter.zeroFormattingBehavior = NSDateComponentsFormatterZeroFormattingBehaviorPad;
formatter.allowedUnits = NSCalendarUnitMinute | NSCalendarUnitSecond;
return formatter;
}
// MARK: - Asset Loading
/*
Prepare an AVAsset for use on a background thread. When the minimum set
of properties we require (`assetKeysRequiredToPlay`) are loaded then add
the asset to the `assetTitlesAndThumbnails` dictionary. We'll use that
dictionary to populate the "Add Item" button popover.
*/
- (void)asynchronouslyLoadURLAsset:(AVURLAsset *)asset title:(NSString *)title thumbnailResourceName:(NSString *)thumbnailResourceName {
/*
Using AVAsset now runs the risk of blocking the current thread (the
main UI thread) whilst I/O happens to populate the properties. It's
prudent to defer our work until the properties we need have been loaded.
*/
[asset loadValuesAsynchronouslyForKeys:AAPLPlayerViewController.assetKeysRequiredToPlay completionHandler:^{
/*
The asset invokes its completion handler on an arbitrary queue.
To avoid multiple threads using our internal state at the same time
we'll elect to use the main thread at all times, let's dispatch
our handler to the main queue.
*/
dispatch_async(dispatch_get_main_queue(), ^{
/*
This method is called when the `AVAsset` for our URL has
completed the loading of the values of the specified array
of keys.
*/
/*
Test whether the values of each of the keys we need have been
successfully loaded.
*/
for (NSString *key in self.class.assetKeysRequiredToPlay) {
NSError *error = nil;
if ([asset statusOfValueForKey:key error:&error] == AVKeyValueStatusFailed) {
NSString *stringFormat = NSLocalizedString(@"error.asset_%@_key_%@_failed.description", @"Can't use this AVAsset because one of it's keys failed to load");
NSString *message = [NSString localizedStringWithFormat:stringFormat, title, key];
[self handleErrorWithMessage:message error:error];
return;
}
}
// We can't play this asset.
if (!asset.playable || asset.hasProtectedContent) {
NSString *stringFormat = NSLocalizedString(@"error.asset_%@_not_playable.description", @"Can't use this AVAsset because it isn't playable or has protected content");
NSString *message = [NSString localizedStringWithFormat:stringFormat, title];
[self handleErrorWithMessage:message error:nil];
return;
}
/*
We can play this asset. Create a new AVPlayerItem and make it
our player's current item.
*/
if (!self.loadedAssets)
self.loadedAssets = [NSMutableDictionary dictionary];
self.loadedAssets[title] = asset;
NSString *path = [[NSBundle mainBundle] pathForResource:[thumbnailResourceName stringByDeletingPathExtension] ofType:[thumbnailResourceName pathExtension]];
UIImage *thumbnail = [[UIImage alloc] initWithContentsOfFile:path];
if (!self.assetTitlesAndThumbnailsByURL) {
self.assetTitlesAndThumbnailsByURL = [NSMutableDictionary dictionary];
}
self.assetTitlesAndThumbnailsByURL[asset.URL] = @{ @"title" : title, @"thumbnail" : thumbnail };
});
}];
}
/*
Read the asset URLs, titles and thumbnail resource names from a JSON manifest
file - then load each asset.
*/
- (void)asynchronouslyLoadURLAssetsWithManifestURL:(NSURL *)jsonURL
{
NSArray *assetsArray = nil;
NSData *jsonData = [[NSData alloc] initWithContentsOfURL:jsonURL];
if (jsonData) {
assetsArray = (NSArray *)[NSJSONSerialization JSONObjectWithData:jsonData options:0 error:nil];
if (!assetsArray) {
[self handleErrorWithMessage:NSLocalizedString(@"error.json_parse_failed.description", @"Failed to parse the assets manifest JSON") error:nil];
}
}
else {
[self handleErrorWithMessage:NSLocalizedString(@"error.json_open_failed.description", @"Failed to open the assets manifest JSON") error:nil];
}
for (NSDictionary *assetDict in assetsArray) {
NSURL *mediaURL = nil;
NSString *optionalResourceName = assetDict[@"mediaResourceName"];
NSString *optionalURLString = assetDict[@"mediaURL"];
if (optionalResourceName) {
mediaURL = [[NSBundle mainBundle] URLForResource:[optionalResourceName stringByDeletingPathExtension] withExtension:optionalResourceName.pathExtension];
}
else if (optionalURLString) {
mediaURL = [NSURL URLWithString:optionalURLString];
}
[self asynchronouslyLoadURLAsset:[AVURLAsset URLAssetWithURL:mediaURL options:nil]
title:assetDict[@"title"]
thumbnailResourceName:assetDict[@"thumbnailResourceName"]];
}
}
// MARK: - IBActions
- (IBAction)playPauseButtonWasPressed:(UIButton *)sender {
if (self.player.rate != 1.0) {
// Not playing foward; so play.
if (CMTIME_COMPARE_INLINE(self.currentTime, ==, self.duration)) {
// At end; so got back to beginning.
self.currentTime = kCMTimeZero;
}
[self.player play];
} else {
// Playing; so pause.
[self.player pause];
}
}
- (IBAction)rewindButtonWasPressed:(UIButton *)sender {
self.rate = MAX(self.player.rate - 2.0, -2.0); // rewind no faster than -2.0
}
- (IBAction)fastForwardButtonWasPressed:(UIButton *)sender {
self.rate = MIN(self.player.rate + 2.0, 2.0); // fast forward no faster than 2.0
}
- (IBAction)timeSliderDidChange:(UISlider *)sender {
self.currentTime = CMTimeMakeWithSeconds(sender.value, 1000);
}
- (void)presentModalPopoverAlertController:(UIAlertController *)alertController sender:(UIButton *)sender {
alertController.modalPresentationStyle = UIModalPresentationPopover;
alertController.popoverPresentationController.sourceView = sender;
alertController.popoverPresentationController.sourceRect = sender.bounds;
alertController.popoverPresentationController.permittedArrowDirections = UIPopoverArrowDirectionAny;
[self presentViewController:alertController animated:true completion:nil];
}
- (IBAction)addItemToQueueButtonPressed:(UIButton *)sender {
NSString *alertTitle = NSLocalizedString(@"popover.title.addItem", @"Title of popover that adds items to the queue");
NSString *alertMessage = NSLocalizedString(@"popover.message.addItem", @"Message on popover that adds items to the queue");
UIAlertController *alertController = [UIAlertController alertControllerWithTitle:alertTitle message:alertMessage preferredStyle:UIAlertControllerStyleActionSheet];
// Populate the sheet with the titles of the assets we have loaded.
for (NSString *loadedAssetTitle in self.loadedAssets.allKeys) {
AVAsset *loadedAsset = self.loadedAssets[loadedAssetTitle];
AAPLPlayerViewController __weak *weakSelf = self;
[alertController addAction:[UIAlertAction actionWithTitle:loadedAssetTitle style:UIAlertActionStyleDefault handler:
^(UIAlertAction *action){
NSArray *oldItemsArray = [weakSelf.player items];
AVPlayerItem *newPlayerItem = [AVPlayerItem playerItemWithAsset:loadedAsset];
[weakSelf.player insertItem:newPlayerItem afterItem:nil];
[weakSelf queueDidChangeFromArray:oldItemsArray toArray:[self.player items]];
}]];
}
NSString *cancelActionTitle = NSLocalizedString(@"popover.title.cancel", @"Title of popover cancel action");
[alertController addAction:[UIAlertAction actionWithTitle:cancelActionTitle style:UIAlertActionStyleCancel handler:nil]];
[self presentModalPopoverAlertController:alertController sender:sender];
}
- (IBAction)clearQueueButtonWasPressed:(UIButton *)sender {
NSString *alertTitle = NSLocalizedString(@"popover.title.clear", @"Title of popover that clears the queue");
NSString *alertMessage = NSLocalizedString(@"popover.message.clear", @"Message on popover that clears the queue");
UIAlertController *alertController = [UIAlertController alertControllerWithTitle:alertTitle message:alertMessage preferredStyle:UIAlertControllerStyleActionSheet];
AAPLPlayerViewController __weak *weakSelf = self;
[alertController addAction:[UIAlertAction actionWithTitle:@"Clear Queue" style:UIAlertActionStyleDestructive handler:
^(UIAlertAction *action){
NSArray *oldItemsArray = [weakSelf.player items];
[weakSelf.player removeAllItems];
[weakSelf queueDidChangeFromArray:oldItemsArray toArray:[self.player items]];
}]];
NSString *cancelActionTitle = NSLocalizedString(@"popover.title.cancel", @"Title of popover cancel action");
[alertController addAction:[UIAlertAction actionWithTitle:cancelActionTitle style:UIAlertActionStyleCancel handler:nil]];
[self presentModalPopoverAlertController:alertController sender:sender];
}
// MARK: - KV Observation
// Update our UI when player or player.currentItem changes
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context {
if (context != &AAPLPlayerViewControllerKVOContext) {
// KVO isn't for us.
[super observeValueForKeyPath:keyPath ofObject:object change:change context:context];
return;
}
if ([keyPath isEqualToString:@"player.currentItem"]) {
[self queueDidChangeFromArray:nil toArray:[self.player items]];
}
else if ([keyPath isEqualToString:@"player.currentItem.duration"]) {
// Update timeSlider and enable/disable controls when duration > 0.0
// Handle NSNull value for NSKeyValueChangeNewKey, i.e. when player.currentItem is nil
NSValue *newDurationAsValue = change[NSKeyValueChangeNewKey];
CMTime newDuration = [newDurationAsValue isKindOfClass:[NSValue class]] ? newDurationAsValue.CMTimeValue : kCMTimeZero;
BOOL hasValidDuration = CMTIME_IS_NUMERIC(newDuration) && newDuration.value != 0;
double currentTime = hasValidDuration ? CMTimeGetSeconds(self.currentTime) : 0.0;
double newDurationSeconds = hasValidDuration ? CMTimeGetSeconds(newDuration) : 0.0;
self.timeSlider.maximumValue = newDurationSeconds;
self.timeSlider.value = currentTime;
self.rewindButton.enabled = hasValidDuration;
self.playPauseButton.enabled = hasValidDuration;
self.fastForwardButton.enabled = hasValidDuration;
self.timeSlider.enabled = hasValidDuration;
self.startTimeLabel.enabled = hasValidDuration;
self.startTimeLabel.text = [self createTimeString:currentTime];
self.durationLabel.enabled = hasValidDuration;
self.durationLabel.text = [self createTimeString:newDurationSeconds];
}
else if ([keyPath isEqualToString:@"player.rate"]) {
// Update playPauseButton image
double newRate = [change[NSKeyValueChangeNewKey] doubleValue];
UIImage *buttonImage = (newRate == 1.0) ? [UIImage imageNamed:@"PauseButton"] : [UIImage imageNamed:@"PlayButton"];
[self.playPauseButton setImage:buttonImage forState:UIControlStateNormal];
}
else if ([keyPath isEqualToString:@"player.currentItem.status"]) {
// Display an error if status becomes Failed
// Handle NSNull value for NSKeyValueChangeNewKey, i.e. when player.currentItem is nil
NSNumber *newStatusAsNumber = change[NSKeyValueChangeNewKey];
AVPlayerItemStatus newStatus = [newStatusAsNumber isKindOfClass:[NSNumber class]] ? newStatusAsNumber.integerValue : AVPlayerItemStatusUnknown;
if (newStatus == AVPlayerItemStatusFailed) {
[self handleErrorWithMessage:self.player.currentItem.error.localizedDescription error:self.player.currentItem.error];
}
} else {
[super observeValueForKeyPath:keyPath ofObject:object change:change context:context];
}
}
// Trigger KVO for anyone observing our properties affected by player and player.currentItem
+ (NSSet *)keyPathsForValuesAffectingValueForKey:(NSString *)key {
if ([key isEqualToString:@"duration"]) {
return [NSSet setWithArray:@[ @"player.currentItem.duration" ]];
} else if ([key isEqualToString:@"currentTime"]) {
return [NSSet setWithArray:@[ @"player.currentItem.currentTime" ]];
} else if ([key isEqualToString:@"rate"]) {
return [NSSet setWithArray:@[ @"player.rate" ]];
} else {
return [super keyPathsForValuesAffectingValueForKey:key];
}
}
// player.items is not KV observable so we need to call this function every time the queue changes
- (void)queueDidChangeFromArray:(NSArray *)oldPlayerItems toArray:(NSArray *)newPlayerItems {
if (newPlayerItems.count == 0) {
self.queueLabel.text = NSLocalizedString(@"label.queue.empty", @"Queue is empty");
}
else {
NSString *stringFormat = NSLocalizedString(@"label.queue.%lu items", @"Queue of n item(s)");
self.queueLabel.text = [NSString localizedStringWithFormat:stringFormat, newPlayerItems.count];
}
BOOL isQueueEmpty = newPlayerItems.count == 0;
self.clearButton.enabled = !isQueueEmpty;
[self.collectionView reloadData];
}
// MARK: - Error Handling
- (void)handleErrorWithMessage:(NSString *)message error:(NSError *)error {
NSLog(@"Error occurred with message: %@, error: %@.", message, error);
NSString *alertTitle = NSLocalizedString(@"alert.error.title", @"Alert title for errors");
NSString *defaultAlertMessage = NSLocalizedString(@"error.default.description", @"Default error message when no NSError provided");
UIAlertController *controller = [UIAlertController alertControllerWithTitle:alertTitle message:message ?: defaultAlertMessage preferredStyle:UIAlertControllerStyleAlert];
NSString *alertActionTitle = NSLocalizedString(@"alert.error.actions.OK", @"OK on error alert");
UIAlertAction *action = [UIAlertAction actionWithTitle:alertActionTitle style:UIAlertActionStyleDefault handler:nil];
[controller addAction:action];
[self presentViewController:controller animated:YES completion:nil];
}
// MARK: UICollectionViewDataSource
- (NSInteger)collectionView:(UICollectionView *)collectionView numberOfItemsInSection:(NSInteger)section {
return [self.player items].count;
}
- (NSDictionary *)titleAndThumbnailForPlayerItemAtIndexPath:(NSIndexPath *)indexPath {
AVPlayerItem *item = [self.player items][[indexPath indexAtPosition:1]];
return self.assetTitlesAndThumbnailsByURL[[(AVURLAsset *)item.asset URL]];
}
- (UICollectionViewCell *)collectionView:(UICollectionView *)collectionView cellForItemAtIndexPath:(NSIndexPath *)indexPath {
AAPLQueuedItemCollectionViewCell *cell = [collectionView dequeueReusableCellWithReuseIdentifier:@"ItemCell" forIndexPath:indexPath];
NSDictionary *titleAndThumbnail = [self titleAndThumbnailForPlayerItemAtIndexPath:indexPath];
cell.label.text = titleAndThumbnail[@"title"];
cell.backgroundView = [[UIImageView alloc] initWithImage:titleAndThumbnail[@"thumbnail"]];
return cell;
}
// MARK: Convenience
- (NSString *)createTimeString:(double)time {
NSDateComponents *components = [[NSDateComponents alloc] init];
components.second = (NSInteger)fmax(0.0, time);
return [self.timeRemainingFormatter stringFromDateComponents:components];
}
@end

View File

@ -0,0 +1,14 @@
/*
Copyright (C) 2016 Apple Inc. All Rights Reserved.
See LICENSE.txt for this samples licensing information
Abstract:
Collection view cell to represent an AVPlayerItem in an AVQueuePlayer's queue.
*/
@import UIKit;
@interface AAPLQueuedItemCollectionViewCell: UICollectionViewCell
@property (weak) IBOutlet UILabel *label;
@end

View File

@ -0,0 +1,13 @@
/*
Copyright (C) 2016 Apple Inc. All Rights Reserved.
See LICENSE.txt for this samples licensing information
Abstract:
Collection view cell to represent an AVPlayerItem in an AVQueuePlayer's queue.
*/
#import "AAPLQueuedItemCollectionViewCell.h"
@implementation AAPLQueuedItemCollectionViewCell
@end

View File

@ -0,0 +1,249 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="8173.3" systemVersion="15A244d" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" initialViewController="BYZ-38-t0r">
<dependencies>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="8142"/>
<capability name="Constraints to layout margins" minToolsVersion="6.0"/>
</dependencies>
<customFonts key="customFonts">
<mutableArray key="HelveticaNeue.ttc">
<string>HelveticaNeue-Italic</string>
</mutableArray>
</customFonts>
<scenes>
<!--Player View Controller-->
<scene sceneID="tne-QT-ifu">
<objects>
<viewController id="BYZ-38-t0r" customClass="AAPLPlayerViewController" sceneMemberID="viewController">
<layoutGuides>
<viewControllerLayoutGuide type="top" id="y3c-jy-aDJ"/>
<viewControllerLayoutGuide type="bottom" id="wfy-db-euE"/>
</layoutGuides>
<view key="view" contentMode="scaleToFill" id="8bC-Xf-vdC">
<rect key="frame" x="0.0" y="0.0" width="600" height="600"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<subviews>
<button opaque="NO" contentMode="scaleToFill" enabled="NO" contentHorizontalAlignment="center" contentVerticalAlignment="center" buttonType="roundedRect" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="xrl-4x-Edh">
<rect key="frame" x="70" y="550" width="70" height="30"/>
<constraints>
<constraint firstAttribute="height" constant="30" id="5cK-ah-alZ"/>
<constraint firstAttribute="width" constant="70" id="E4B-3O-1NJ"/>
</constraints>
<state key="normal" image="PlayButton"/>
<connections>
<action selector="playPauseButtonWasPressed:" destination="BYZ-38-t0r" eventType="touchUpInside" id="FYC-AI-liU"/>
</connections>
</button>
<button opaque="NO" contentMode="scaleToFill" enabled="NO" contentHorizontalAlignment="center" contentVerticalAlignment="center" buttonType="roundedRect" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="AcP-gW-NjJ">
<rect key="frame" x="140" y="550" width="70" height="30"/>
<constraints>
<constraint firstAttribute="height" constant="30" id="Ds3-V4-PI5"/>
<constraint firstAttribute="width" constant="70" id="cXK-QP-qNN"/>
</constraints>
<state key="normal" image="ScanForwardButton"/>
<connections>
<action selector="fastForwardButtonWasPressed:" destination="BYZ-38-t0r" eventType="touchUpInside" id="9VR-W7-z4u"/>
</connections>
</button>
<slider opaque="NO" contentMode="scaleToFill" enabled="NO" contentHorizontalAlignment="center" contentVerticalAlignment="center" value="0.5" minValue="0.0" maxValue="1" translatesAutoresizingMaskIntoConstraints="NO" id="zs2-tO-O9K">
<rect key="frame" x="218" y="550" width="364" height="31"/>
<constraints>
<constraint firstAttribute="height" constant="30" id="Wah-wD-K8s"/>
</constraints>
<connections>
<action selector="timeSliderDidChange:" destination="BYZ-38-t0r" eventType="valueChanged" id="iEm-kp-I1a"/>
</connections>
</slider>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="k2B-HR-6u1" customClass="AAPLPlayerView">
<rect key="frame" x="0.0" y="20" width="600" height="317"/>
<color key="backgroundColor" white="0.0" alpha="1" colorSpace="calibratedWhite"/>
</view>
<button opaque="NO" contentMode="scaleToFill" enabled="NO" contentHorizontalAlignment="center" contentVerticalAlignment="center" buttonType="roundedRect" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="1Ph-2J-7JL">
<rect key="frame" x="0.0" y="550" width="70" height="30"/>
<constraints>
<constraint firstAttribute="width" constant="70" id="0HV-El-Nhy"/>
<constraint firstAttribute="height" constant="30" id="Ju6-Bx-wC5"/>
</constraints>
<state key="normal" image="ScanBackwardButton"/>
<connections>
<action selector="rewindButtonWasPressed:" destination="BYZ-38-t0r" eventType="touchUpInside" id="a5z-7z-Dbp"/>
</connections>
</button>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="0:00" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" enabled="NO" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="lSE-mk-Glu" userLabel="StartTime">
<rect key="frame" x="220" y="522" width="50" height="20"/>
<constraints>
<constraint firstAttribute="height" constant="20" id="3FX-0e-O6P"/>
<constraint firstAttribute="width" constant="50" id="FxO-it-ceh"/>
</constraints>
<fontDescription key="fontDescription" type="system" pointSize="17"/>
<nil key="highlightedColor"/>
</label>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="-:--" textAlignment="right" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" enabled="NO" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="qOC-Kz-nZX" userLabel="Duration">
<rect key="frame" x="530" y="522" width="50" height="20"/>
<constraints>
<constraint firstAttribute="width" constant="50" id="dHY-sI-1Zo"/>
<constraint firstAttribute="height" constant="20" id="vny-2p-5hq"/>
</constraints>
<fontDescription key="fontDescription" type="system" pointSize="17"/>
<nil key="highlightedColor"/>
</label>
<collectionView clipsSubviews="YES" multipleTouchEnabled="YES" contentMode="scaleToFill" showsVerticalScrollIndicator="NO" dataMode="prototypes" translatesAutoresizingMaskIntoConstraints="NO" id="uC1-dn-wmH">
<rect key="frame" x="20" y="392" width="560" height="100"/>
<color key="backgroundColor" cocoaTouchSystemColor="groupTableViewBackgroundColor"/>
<constraints>
<constraint firstAttribute="height" constant="100" id="asZ-KA-Kfv"/>
</constraints>
<collectionViewFlowLayout key="collectionViewLayout" scrollDirection="horizontal" minimumLineSpacing="10" minimumInteritemSpacing="10" id="7QP-KK-YbF">
<size key="itemSize" width="80" height="80"/>
<size key="headerReferenceSize" width="0.0" height="0.0"/>
<size key="footerReferenceSize" width="0.0" height="0.0"/>
<inset key="sectionInset" minX="0.0" minY="0.0" maxX="0.0" maxY="0.0"/>
</collectionViewFlowLayout>
<cells>
<collectionViewCell opaque="NO" clipsSubviews="YES" multipleTouchEnabled="YES" contentMode="center" reuseIdentifier="ItemCell" id="BGF-2M-Mef" userLabel="Queued Item Cell" customClass="AAPLQueuedItemCollectionViewCell">
<rect key="frame" x="0.0" y="10" width="80" height="80"/>
<autoresizingMask key="autoresizingMask" flexibleMaxX="YES" flexibleMaxY="YES"/>
<view key="contentView" opaque="NO" clipsSubviews="YES" multipleTouchEnabled="YES" contentMode="center">
<rect key="frame" x="0.0" y="0.0" width="80" height="80"/>
<autoresizingMask key="autoresizingMask"/>
<subviews>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="Label" textAlignment="center" lineBreakMode="tailTruncation" numberOfLines="2" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="pVD-Jk-rvG">
<rect key="frame" x="0.0" y="0.0" width="80" height="80"/>
<fontDescription key="fontDescription" name="HelveticaNeue-Italic" family="Helvetica Neue" pointSize="12"/>
<color key="textColor" red="1" green="1" blue="1" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<nil key="highlightedColor"/>
</label>
</subviews>
<color key="backgroundColor" white="0.0" alpha="0.0" colorSpace="calibratedWhite"/>
</view>
<constraints>
<constraint firstAttribute="trailing" secondItem="pVD-Jk-rvG" secondAttribute="trailing" id="2sE-5d-FgF"/>
<constraint firstItem="pVD-Jk-rvG" firstAttribute="leading" secondItem="BGF-2M-Mef" secondAttribute="leading" id="Cli-2L-72M"/>
<constraint firstAttribute="bottom" secondItem="pVD-Jk-rvG" secondAttribute="bottom" id="GXi-vS-uuT"/>
<constraint firstItem="pVD-Jk-rvG" firstAttribute="top" secondItem="BGF-2M-Mef" secondAttribute="top" id="uLw-mo-NaR"/>
</constraints>
<size key="customSize" width="80" height="80"/>
<connections>
<outlet property="label" destination="pVD-Jk-rvG" id="HtZ-Kd-nBQ"/>
</connections>
</collectionViewCell>
</cells>
<connections>
<outlet property="dataSource" destination="BYZ-38-t0r" id="7gb-sV-l3B"/>
<outlet property="delegate" destination="BYZ-38-t0r" id="zbu-3m-OVl"/>
</connections>
</collectionView>
<button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" buttonType="roundedRect" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="oq2-Hd-jlM">
<rect key="frame" x="430" y="361" width="100" height="21"/>
<constraints>
<constraint firstAttribute="height" constant="21" id="Dmi-sZ-qZW"/>
<constraint firstAttribute="width" constant="100" id="maH-EK-dVM"/>
</constraints>
<state key="normal" title="Add Item"/>
<connections>
<action selector="addItemToQueueButtonPressed:" destination="BYZ-38-t0r" eventType="touchUpInside" id="TIF-3Q-UrL"/>
</connections>
</button>
<button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="right" contentVerticalAlignment="center" buttonType="roundedRect" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="G1t-KY-nRM">
<rect key="frame" x="540" y="361" width="40" height="21"/>
<constraints>
<constraint firstAttribute="width" relation="greaterThanOrEqual" constant="100" id="7hQ-jm-atj"/>
<constraint firstAttribute="height" constant="21" id="CY7-QS-iEx"/>
<constraint firstAttribute="width" constant="40" id="NiE-3p-8x7"/>
<constraint firstAttribute="height" relation="greaterThanOrEqual" constant="30" id="e4K-eC-Fyk"/>
</constraints>
<state key="normal" title="Clear"/>
<variation key="default">
<mask key="constraints">
<exclude reference="7hQ-jm-atj"/>
<exclude reference="e4K-eC-Fyk"/>
</mask>
</variation>
<connections>
<action selector="clearQueueButtonWasPressed:" destination="BYZ-38-t0r" eventType="touchUpInside" id="nEQ-3K-Kfa"/>
</connections>
</button>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="Queue is empty" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="zKb-h9-0cy">
<rect key="frame" x="20" y="361" width="190" height="21"/>
<constraints>
<constraint firstAttribute="height" constant="21" id="lPq-pc-6A4"/>
<constraint firstAttribute="width" constant="190" id="oak-Gg-gxU"/>
</constraints>
<fontDescription key="fontDescription" type="system" pointSize="17"/>
<color key="textColor" cocoaTouchSystemColor="darkTextColor"/>
<nil key="highlightedColor"/>
</label>
</subviews>
<color key="backgroundColor" white="1" alpha="1" colorSpace="calibratedWhite"/>
<constraints>
<constraint firstItem="uC1-dn-wmH" firstAttribute="top" secondItem="k2B-HR-6u1" secondAttribute="bottom" constant="55" id="01e-3i-DaB"/>
<constraint firstItem="uC1-dn-wmH" firstAttribute="top" secondItem="zKb-h9-0cy" secondAttribute="bottom" constant="10" id="03h-CL-J0W"/>
<constraint firstItem="zs2-tO-O9K" firstAttribute="leading" secondItem="lSE-mk-Glu" secondAttribute="leading" id="0Nl-Eq-0C9"/>
<constraint firstItem="AcP-gW-NjJ" firstAttribute="leading" secondItem="xrl-4x-Edh" secondAttribute="trailing" id="0Xi-o4-GPm"/>
<constraint firstItem="uC1-dn-wmH" firstAttribute="top" secondItem="oq2-Hd-jlM" secondAttribute="bottom" constant="10" id="4l9-xu-sFK"/>
<constraint firstItem="uC1-dn-wmH" firstAttribute="leading" secondItem="8bC-Xf-vdC" secondAttribute="leadingMargin" id="68M-AK-ggx"/>
<constraint firstItem="uC1-dn-wmH" firstAttribute="top" secondItem="k2B-HR-6u1" secondAttribute="bottom" constant="8" id="6tP-9t-bnx"/>
<constraint firstItem="zs2-tO-O9K" firstAttribute="leading" secondItem="AcP-gW-NjJ" secondAttribute="trailing" constant="10" id="Djj-sA-9ve"/>
<constraint firstItem="k2B-HR-6u1" firstAttribute="top" secondItem="y3c-jy-aDJ" secondAttribute="bottom" id="EZa-NV-Rbw"/>
<constraint firstItem="uC1-dn-wmH" firstAttribute="trailing" secondItem="G1t-KY-nRM" secondAttribute="trailing" id="F9C-yS-BuO"/>
<constraint firstItem="qOC-Kz-nZX" firstAttribute="trailing" secondItem="zs2-tO-O9K" secondAttribute="trailing" id="Ny2-em-Hsb"/>
<constraint firstItem="uC1-dn-wmH" firstAttribute="trailing" secondItem="8bC-Xf-vdC" secondAttribute="trailingMargin" constant="100" id="RRL-fh-vhG"/>
<constraint firstAttribute="trailingMargin" secondItem="k2B-HR-6u1" secondAttribute="trailing" constant="-20" id="SHT-LM-7fv"/>
<constraint firstItem="zs2-tO-O9K" firstAttribute="width" secondItem="lSE-mk-Glu" secondAttribute="width" id="UTA-oc-lgo"/>
<constraint firstItem="uC1-dn-wmH" firstAttribute="top" secondItem="G1t-KY-nRM" secondAttribute="bottom" constant="1" id="Vyz-t7-Nde"/>
<constraint firstItem="uC1-dn-wmH" firstAttribute="trailing" secondItem="oq2-Hd-jlM" secondAttribute="trailing" id="Y50-u8-uDf"/>
<constraint firstItem="qOC-Kz-nZX" firstAttribute="top" secondItem="uC1-dn-wmH" secondAttribute="bottom" constant="30" id="aIz-29-r5i"/>
<constraint firstItem="zs2-tO-O9K" firstAttribute="centerY" secondItem="AcP-gW-NjJ" secondAttribute="centerY" id="aUq-JI-udW"/>
<constraint firstItem="uC1-dn-wmH" firstAttribute="top" secondItem="G1t-KY-nRM" secondAttribute="bottom" constant="10" id="c0z-mE-HXr"/>
<constraint firstItem="zs2-tO-O9K" firstAttribute="centerX" secondItem="lSE-mk-Glu" secondAttribute="centerX" id="eau-ex-HCT"/>
<constraint firstItem="uC1-dn-wmH" firstAttribute="leading" secondItem="zKb-h9-0cy" secondAttribute="leading" id="fqr-ay-8dR"/>
<constraint firstItem="k2B-HR-6u1" firstAttribute="top" secondItem="8bC-Xf-vdC" secondAttribute="topMargin" constant="70" id="fr7-hU-dZX"/>
<constraint firstAttribute="trailingMargin" secondItem="uC1-dn-wmH" secondAttribute="trailing" id="fyW-Ct-YVf"/>
<constraint firstItem="xrl-4x-Edh" firstAttribute="centerY" secondItem="1Ph-2J-7JL" secondAttribute="centerY" id="gSS-SM-9qW"/>
<constraint firstItem="k2B-HR-6u1" firstAttribute="leading" secondItem="8bC-Xf-vdC" secondAttribute="leadingMargin" constant="-20" id="hIA-sm-1WB"/>
<constraint firstItem="AcP-gW-NjJ" firstAttribute="centerY" secondItem="xrl-4x-Edh" secondAttribute="centerY" id="hnG-E0-mGw"/>
<constraint firstItem="zs2-tO-O9K" firstAttribute="trailing" secondItem="8bC-Xf-vdC" secondAttribute="trailingMargin" id="ill-9D-3bq"/>
<constraint firstItem="zs2-tO-O9K" firstAttribute="top" secondItem="lSE-mk-Glu" secondAttribute="bottom" constant="8" id="jXv-uJ-vtR"/>
<constraint firstItem="zs2-tO-O9K" firstAttribute="top" secondItem="qOC-Kz-nZX" secondAttribute="bottom" constant="8" id="mP2-9X-QOD"/>
<constraint firstItem="xrl-4x-Edh" firstAttribute="leading" secondItem="1Ph-2J-7JL" secondAttribute="trailing" id="nWe-M1-J3B"/>
<constraint firstItem="G1t-KY-nRM" firstAttribute="leading" secondItem="oq2-Hd-jlM" secondAttribute="trailing" constant="10" id="rYo-sU-mJv"/>
<constraint firstItem="1Ph-2J-7JL" firstAttribute="leading" secondItem="8bC-Xf-vdC" secondAttribute="leadingMargin" constant="-20" id="xCn-By-np3"/>
<constraint firstItem="wfy-db-euE" firstAttribute="top" secondItem="k2B-HR-6u1" secondAttribute="bottom" constant="200" id="zi6-Fl-Tea"/>
<constraint firstItem="wfy-db-euE" firstAttribute="top" secondItem="1Ph-2J-7JL" secondAttribute="bottom" constant="20" id="zlE-Xs-R57"/>
</constraints>
<variation key="default">
<mask key="constraints">
<exclude reference="fr7-hU-dZX"/>
<exclude reference="6tP-9t-bnx"/>
<exclude reference="RRL-fh-vhG"/>
<exclude reference="Vyz-t7-Nde"/>
<exclude reference="Y50-u8-uDf"/>
<exclude reference="UTA-oc-lgo"/>
<exclude reference="eau-ex-HCT"/>
<exclude reference="zi6-Fl-Tea"/>
</mask>
</variation>
</view>
<connections>
<outlet property="clearButton" destination="G1t-KY-nRM" id="54t-PW-2yt"/>
<outlet property="collectionView" destination="uC1-dn-wmH" id="euy-yw-poK"/>
<outlet property="durationLabel" destination="qOC-Kz-nZX" id="vdA-zn-17E"/>
<outlet property="fastForwardButton" destination="AcP-gW-NjJ" id="bVk-tK-Ap4"/>
<outlet property="playPauseButton" destination="xrl-4x-Edh" id="BeD-Na-Ocg"/>
<outlet property="playerView" destination="k2B-HR-6u1" id="w2i-er-gei"/>
<outlet property="queueLabel" destination="zKb-h9-0cy" id="bca-fA-Mv0"/>
<outlet property="rewindButton" destination="1Ph-2J-7JL" id="Fwp-1T-h4Q"/>
<outlet property="startTimeLabel" destination="lSE-mk-Glu" id="0k2-Ci-tDY"/>
<outlet property="timeSlider" destination="zs2-tO-O9K" id="BKu-N7-khJ"/>
</connections>
</viewController>
<placeholder placeholderIdentifier="IBFirstResponder" id="dkx-z0-nzr" sceneMemberID="firstResponder"/>
</objects>
<point key="canvasLocation" x="357" y="421"/>
</scene>
</scenes>
<resources>
<image name="PlayButton" width="21" height="22"/>
<image name="ScanBackwardButton" width="29" height="16"/>
<image name="ScanForwardButton" width="29" height="16"/>
</resources>
</document>

Some files were not shown because too many files have changed in this diff Show More