AudioInARKit - Audio in ARKit: Version 1.1, 2017-09-19

Editorial update.

This sample app runs an ARKit world tracking session with content displayed in a SceneKit view. To demonstrate plane detection, the app simply places a 3D model onto the first plane that ARKit detects. If the model's position is outside the current field of view of the camera, the app uses SceneKit's positional audio feature to indicate which direction to turn the device to see the model.

Signed-off-by: Liu Lantao <liulantao@gmail.com>
This commit is contained in:
Liu Lantao 2017-11-20 08:22:44 +08:00
parent c580342ab1
commit d2a790e74d
No known key found for this signature in database
GPG Key ID: BF35AA0CD375679D
42 changed files with 1534 additions and 0 deletions

43
AudioInARKit/.gitignore vendored Normal file
View File

@ -0,0 +1,43 @@
.DS_Store
# Xcode
build/*
*/build/*
*/**/build/*
*.mode1
*.pbxuser
*.perspective
!default.perspectivev3
!default.pbxuser
*.mode1v3
!default.mode1v3
*.mode2v3
!default.mode2v3
*.perspectivev3
!default.perspectivev3
*.xcworkspace
!default.xcworkspace
xcuserdata
profile
*.moved-aside
# Generated files
VersionX-revision.h
# build products
build/
*.[oa]
# version control files
.hg
.svn
CVS
# automatic backup files
*~.nib
*.swp
*~
*(Autosaved).rtfd/
Backup[ ]of[ ]*.pages/
Backup[ ]of[ ]*.key/
Backup[ ]of[ ]*.numbers/

View File

@ -0,0 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<array/>
</plist>

View File

@ -0,0 +1,367 @@
// !$*UTF8*$!
{
archiveVersion = 1;
classes = {
};
objectVersion = 48;
objects = {
/* Begin PBXBuildFile section */
301595A41F4B799700F4AD09 /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 301595A31F4B799700F4AD09 /* AppDelegate.swift */; };
301595A81F4B799700F4AD09 /* ViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 301595A71F4B799700F4AD09 /* ViewController.swift */; };
301595BC1F4B7AA500F4AD09 /* ARSCNView+HitTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 301595B91F4B7AA400F4AD09 /* ARSCNView+HitTests.swift */; };
301595BD1F4B7AA500F4AD09 /* PreviewNode.swift in Sources */ = {isa = PBXBuildFile; fileRef = 301595BA1F4B7AA500F4AD09 /* PreviewNode.swift */; };
301595BE1F4B7AA500F4AD09 /* Utilities.swift in Sources */ = {isa = PBXBuildFile; fileRef = 301595BB1F4B7AA500F4AD09 /* Utilities.swift */; };
30E1BCC31F5F40D6003E6D01 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 427473A61F4E4945004B3E25 /* Assets.xcassets */; };
4274739F1F4E4824004B3E25 /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 4274739A1F4E4803004B3E25 /* LaunchScreen.storyboard */; };
427473A01F4E4828004B3E25 /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 4274739C1F4E4803004B3E25 /* Main.storyboard */; };
427473A11F4E482C004B3E25 /* Assets.scnassets in Resources */ = {isa = PBXBuildFile; fileRef = 427473991F4E4803004B3E25 /* Assets.scnassets */; };
/* End PBXBuildFile section */
/* Begin PBXFileReference section */
301595A01F4B799700F4AD09 /* Audio in ARKit.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = "Audio in ARKit.app"; sourceTree = BUILT_PRODUCTS_DIR; };
301595A31F4B799700F4AD09 /* AppDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = "<group>"; };
301595A71F4B799700F4AD09 /* ViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ViewController.swift; sourceTree = "<group>"; };
301595B71F4B7A5500F4AD09 /* README.md */ = {isa = PBXFileReference; lastKnownFileType = net.daringfireball.markdown; path = README.md; sourceTree = "<group>"; };
301595B91F4B7AA400F4AD09 /* ARSCNView+HitTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "ARSCNView+HitTests.swift"; sourceTree = "<group>"; };
301595BA1F4B7AA500F4AD09 /* PreviewNode.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PreviewNode.swift; sourceTree = "<group>"; };
301595BB1F4B7AA500F4AD09 /* Utilities.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Utilities.swift; sourceTree = "<group>"; };
427473991F4E4803004B3E25 /* Assets.scnassets */ = {isa = PBXFileReference; lastKnownFileType = wrapper.scnassets; path = Assets.scnassets; sourceTree = "<group>"; };
4274739B1F4E4803004B3E25 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = "<group>"; };
4274739D1F4E4803004B3E25 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = "<group>"; };
4274739E1F4E4803004B3E25 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = "<group>"; };
427473A51F4E489A004B3E25 /* SampleCode.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; path = SampleCode.xcconfig; sourceTree = "<group>"; };
427473A61F4E4945004B3E25 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = "<group>"; };
/* End PBXFileReference section */
/* Begin PBXFrameworksBuildPhase section */
3015959D1F4B799700F4AD09 /* Frameworks */ = {
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXFrameworksBuildPhase section */
/* Begin PBXGroup section */
301595971F4B799700F4AD09 = {
isa = PBXGroup;
children = (
301595B71F4B7A5500F4AD09 /* README.md */,
301595A21F4B799700F4AD09 /* Audio in ARKit */,
427473A41F4E489A004B3E25 /* Configuration */,
301595A11F4B799700F4AD09 /* Products */,
);
sourceTree = "<group>";
};
301595A11F4B799700F4AD09 /* Products */ = {
isa = PBXGroup;
children = (
301595A01F4B799700F4AD09 /* Audio in ARKit.app */,
);
name = Products;
sourceTree = "<group>";
};
301595A21F4B799700F4AD09 /* Audio in ARKit */ = {
isa = PBXGroup;
children = (
301595A31F4B799700F4AD09 /* AppDelegate.swift */,
301595A71F4B799700F4AD09 /* ViewController.swift */,
301595B91F4B7AA400F4AD09 /* ARSCNView+HitTests.swift */,
301595BA1F4B7AA500F4AD09 /* PreviewNode.swift */,
301595BB1F4B7AA500F4AD09 /* Utilities.swift */,
427473981F4E4803004B3E25 /* Resources */,
);
path = "Audio in ARKit";
sourceTree = "<group>";
};
427473981F4E4803004B3E25 /* Resources */ = {
isa = PBXGroup;
children = (
427473A61F4E4945004B3E25 /* Assets.xcassets */,
427473991F4E4803004B3E25 /* Assets.scnassets */,
4274739A1F4E4803004B3E25 /* LaunchScreen.storyboard */,
4274739C1F4E4803004B3E25 /* Main.storyboard */,
4274739E1F4E4803004B3E25 /* Info.plist */,
);
path = Resources;
sourceTree = "<group>";
};
427473A41F4E489A004B3E25 /* Configuration */ = {
isa = PBXGroup;
children = (
427473A51F4E489A004B3E25 /* SampleCode.xcconfig */,
);
path = Configuration;
sourceTree = "<group>";
};
/* End PBXGroup section */
/* Begin PBXNativeTarget section */
3015959F1F4B799700F4AD09 /* Audio in ARKit */ = {
isa = PBXNativeTarget;
buildConfigurationList = 301595B41F4B799700F4AD09 /* Build configuration list for PBXNativeTarget "Audio in ARKit" */;
buildPhases = (
3015959C1F4B799700F4AD09 /* Sources */,
3015959D1F4B799700F4AD09 /* Frameworks */,
3015959E1F4B799700F4AD09 /* Resources */,
);
buildRules = (
);
dependencies = (
);
name = "Audio in ARKit";
productName = "Audio in ARKit";
productReference = 301595A01F4B799700F4AD09 /* Audio in ARKit.app */;
productType = "com.apple.product-type.application";
};
/* End PBXNativeTarget section */
/* Begin PBXProject section */
301595981F4B799700F4AD09 /* Project object */ = {
isa = PBXProject;
attributes = {
LastSwiftUpdateCheck = 0900;
LastUpgradeCheck = 0900;
ORGANIZATIONNAME = Apple;
TargetAttributes = {
3015959F1F4B799700F4AD09 = {
CreatedOnToolsVersion = 9.0;
ProvisioningStyle = Automatic;
};
};
};
buildConfigurationList = 3015959B1F4B799700F4AD09 /* Build configuration list for PBXProject "Audio in ARKit" */;
compatibilityVersion = "Xcode 8.0";
developmentRegion = en;
hasScannedForEncodings = 0;
knownRegions = (
en,
Base,
);
mainGroup = 301595971F4B799700F4AD09;
productRefGroup = 301595A11F4B799700F4AD09 /* Products */;
projectDirPath = "";
projectRoot = "";
targets = (
3015959F1F4B799700F4AD09 /* Audio in ARKit */,
);
};
/* End PBXProject section */
/* Begin PBXResourcesBuildPhase section */
3015959E1F4B799700F4AD09 /* Resources */ = {
isa = PBXResourcesBuildPhase;
buildActionMask = 2147483647;
files = (
427473A11F4E482C004B3E25 /* Assets.scnassets in Resources */,
4274739F1F4E4824004B3E25 /* LaunchScreen.storyboard in Resources */,
30E1BCC31F5F40D6003E6D01 /* Assets.xcassets in Resources */,
427473A01F4E4828004B3E25 /* Main.storyboard in Resources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXResourcesBuildPhase section */
/* Begin PBXSourcesBuildPhase section */
3015959C1F4B799700F4AD09 /* Sources */ = {
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
301595BD1F4B7AA500F4AD09 /* PreviewNode.swift in Sources */,
301595A81F4B799700F4AD09 /* ViewController.swift in Sources */,
301595BC1F4B7AA500F4AD09 /* ARSCNView+HitTests.swift in Sources */,
301595BE1F4B7AA500F4AD09 /* Utilities.swift in Sources */,
301595A41F4B799700F4AD09 /* AppDelegate.swift in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXSourcesBuildPhase section */
/* Begin PBXVariantGroup section */
4274739A1F4E4803004B3E25 /* LaunchScreen.storyboard */ = {
isa = PBXVariantGroup;
children = (
4274739B1F4E4803004B3E25 /* Base */,
);
name = LaunchScreen.storyboard;
sourceTree = "<group>";
};
4274739C1F4E4803004B3E25 /* Main.storyboard */ = {
isa = PBXVariantGroup;
children = (
4274739D1F4E4803004B3E25 /* Base */,
);
name = Main.storyboard;
sourceTree = "<group>";
};
/* End PBXVariantGroup section */
/* Begin XCBuildConfiguration section */
301595B21F4B799700F4AD09 /* Debug */ = {
isa = XCBuildConfiguration;
baseConfigurationReference = 427473A51F4E489A004B3E25 /* SampleCode.xcconfig */;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
CLANG_ANALYZER_NONNULL = YES;
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++14";
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
CODE_SIGN_IDENTITY = "iPhone Developer";
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = dwarf;
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_TESTABILITY = YES;
GCC_C_LANGUAGE_STANDARD = gnu11;
GCC_DYNAMIC_NO_PIC = NO;
GCC_NO_COMMON_BLOCKS = YES;
GCC_OPTIMIZATION_LEVEL = 0;
GCC_PREPROCESSOR_DEFINITIONS = (
"DEBUG=1",
"$(inherited)",
);
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 11.0;
MTL_ENABLE_DEBUG_INFO = YES;
ONLY_ACTIVE_ARCH = YES;
SDKROOT = iphoneos;
SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG;
SWIFT_OPTIMIZATION_LEVEL = "-Onone";
};
name = Debug;
};
301595B31F4B799700F4AD09 /* Release */ = {
isa = XCBuildConfiguration;
baseConfigurationReference = 427473A51F4E489A004B3E25 /* SampleCode.xcconfig */;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
CLANG_ANALYZER_NONNULL = YES;
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++14";
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
CODE_SIGN_IDENTITY = "iPhone Developer";
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
ENABLE_NS_ASSERTIONS = NO;
ENABLE_STRICT_OBJC_MSGSEND = YES;
GCC_C_LANGUAGE_STANDARD = gnu11;
GCC_NO_COMMON_BLOCKS = YES;
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 11.0;
MTL_ENABLE_DEBUG_INFO = NO;
SDKROOT = iphoneos;
SWIFT_OPTIMIZATION_LEVEL = "-Owholemodule";
VALIDATE_PRODUCT = YES;
};
name = Release;
};
301595B51F4B799700F4AD09 /* Debug */ = {
isa = XCBuildConfiguration;
baseConfigurationReference = 427473A51F4E489A004B3E25 /* SampleCode.xcconfig */;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
CODE_SIGN_STYLE = Automatic;
DEVELOPMENT_TEAM = "";
INFOPLIST_FILE = "Audio in ARKit/Resources/Info.plist";
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
PRODUCT_BUNDLE_IDENTIFIER = "com.example.apple-samplecode.Audio-in-ARKit${SAMPLE_CODE_DISAMBIGUATOR}";
PRODUCT_NAME = "$(TARGET_NAME)";
SWIFT_VERSION = 4.0;
TARGETED_DEVICE_FAMILY = "1,2";
};
name = Debug;
};
301595B61F4B799700F4AD09 /* Release */ = {
isa = XCBuildConfiguration;
baseConfigurationReference = 427473A51F4E489A004B3E25 /* SampleCode.xcconfig */;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
CODE_SIGN_STYLE = Automatic;
DEVELOPMENT_TEAM = "";
INFOPLIST_FILE = "Audio in ARKit/Resources/Info.plist";
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
PRODUCT_BUNDLE_IDENTIFIER = "com.example.apple-samplecode.Audio-in-ARKit${SAMPLE_CODE_DISAMBIGUATOR}";
PRODUCT_NAME = "$(TARGET_NAME)";
SWIFT_VERSION = 4.0;
TARGETED_DEVICE_FAMILY = "1,2";
};
name = Release;
};
/* End XCBuildConfiguration section */
/* Begin XCConfigurationList section */
3015959B1F4B799700F4AD09 /* Build configuration list for PBXProject "Audio in ARKit" */ = {
isa = XCConfigurationList;
buildConfigurations = (
301595B21F4B799700F4AD09 /* Debug */,
301595B31F4B799700F4AD09 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
301595B41F4B799700F4AD09 /* Build configuration list for PBXNativeTarget "Audio in ARKit" */ = {
isa = XCConfigurationList;
buildConfigurations = (
301595B51F4B799700F4AD09 /* Debug */,
301595B61F4B799700F4AD09 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
/* End XCConfigurationList section */
};
rootObject = 301595981F4B799700F4AD09 /* Project object */;
}

View File

@ -0,0 +1,185 @@
/*
Copyright (C) 2017 Apple Inc. All Rights Reserved.
See LICENSE.txt for this samples licensing information
Abstract:
Convenience extensions on ARSCNView for hit testing
*/
import ARKit
extension ARSCNView {
// MARK: - Types
struct HitTestRay {
let origin: float3
let direction: float3
}
struct FeatureHitTestResult {
let position: float3
let distanceToRayOrigin: Float
let featureHit: float3
let featureDistanceToHitResult: Float
}
func unprojectPoint(_ point: float3) -> float3 {
return float3(self.unprojectPoint(SCNVector3(point)))
}
// MARK: - Hit Tests
func hitTestRayFromScreenPos(_ point: CGPoint) -> HitTestRay? {
guard let frame = self.session.currentFrame else {
return nil
}
let cameraPos = frame.camera.transform.translation
// Note: z: 1.0 will unproject() the screen position to the far clipping plane.
let positionVec = float3(x: Float(point.x), y: Float(point.y), z: 1.0)
let screenPosOnFarClippingPlane = self.unprojectPoint(positionVec)
let rayDirection = simd_normalize(screenPosOnFarClippingPlane - cameraPos)
return HitTestRay(origin: cameraPos, direction: rayDirection)
}
func hitTestWithInfiniteHorizontalPlane(_ point: CGPoint, _ pointOnPlane: float3) -> float3? {
guard let ray = hitTestRayFromScreenPos(point) else {
return nil
}
// Do not intersect with planes above the camera or if the ray is almost parallel to the plane.
if ray.direction.y > -0.03 {
return nil
}
// Return the intersection of a ray from the camera through the screen position with a horizontal plane
// at height (Y axis).
return rayIntersectionWithHorizontalPlane(rayOrigin: ray.origin, direction: ray.direction, planeY: pointOnPlane.y)
}
func hitTestWithFeatures(_ point: CGPoint, coneOpeningAngleInDegrees: Float,
minDistance: Float = 0,
maxDistance: Float = Float.greatestFiniteMagnitude,
maxResults: Int = 1) -> [FeatureHitTestResult] {
var results = [FeatureHitTestResult]()
guard let features = self.session.currentFrame?.rawFeaturePoints else {
return results
}
guard let ray = hitTestRayFromScreenPos(point) else {
return results
}
let maxAngleInDeg = min(coneOpeningAngleInDegrees, 360) / 2
let maxAngle = (maxAngleInDeg / 180) * .pi
let points = features.__points
for i in 0...features.__count {
let feature = points.advanced(by: Int(i))
let featurePos = feature.pointee
let originToFeature = featurePos - ray.origin
let crossProduct = simd_cross(originToFeature, ray.direction)
let featureDistanceFromResult = simd_length(crossProduct)
let hitTestResult = ray.origin + (ray.direction * simd_dot(ray.direction, originToFeature))
let hitTestResultDistance = simd_length(hitTestResult - ray.origin)
if hitTestResultDistance < minDistance || hitTestResultDistance > maxDistance {
// Skip this feature - it is too close or too far away.
continue
}
let originToFeatureNormalized = simd_normalize(originToFeature)
let angleBetweenRayAndFeature = acos(simd_dot(ray.direction, originToFeatureNormalized))
if angleBetweenRayAndFeature > maxAngle {
// Skip this feature - is is outside of the hit test cone.
continue
}
// All tests passed: Add the hit against this feature to the results.
results.append(FeatureHitTestResult(position: hitTestResult,
distanceToRayOrigin: hitTestResultDistance,
featureHit: featurePos,
featureDistanceToHitResult: featureDistanceFromResult))
}
// Sort the results by feature distance to the ray.
results = results.sorted(by: { (first, second) -> Bool in
return first.distanceToRayOrigin < second.distanceToRayOrigin
})
// Cap the list to maxResults.
var cappedResults = [FeatureHitTestResult]()
var i = 0
while i < maxResults && i < results.count {
cappedResults.append(results[i])
i += 1
}
return cappedResults
}
func hitTestWithFeatures(_ point: CGPoint) -> [FeatureHitTestResult] {
var results = [FeatureHitTestResult]()
guard let ray = hitTestRayFromScreenPos(point) else {
return results
}
if let result = self.hitTestFromOrigin(origin: ray.origin, direction: ray.direction) {
results.append(result)
}
return results
}
func hitTestFromOrigin(origin: float3, direction: float3) -> FeatureHitTestResult? {
guard let features = self.session.currentFrame?.rawFeaturePoints else {
return nil
}
let points = features.__points
// Determine the point from the whole point cloud which is closest to the hit test ray.
var closestFeaturePoint = origin
var minDistance = Float.greatestFiniteMagnitude
for i in 0...features.__count {
let feature = points.advanced(by: Int(i))
let featurePos = feature.pointee
let originVector = origin - featurePos
let crossProduct = simd_cross(originVector, direction)
let featureDistanceFromResult = simd_length(crossProduct)
if featureDistanceFromResult < minDistance {
closestFeaturePoint = featurePos
minDistance = featureDistanceFromResult
}
}
// Compute the point along the ray that is closest to the selected feature.
let originToFeature = closestFeaturePoint - origin
let hitTestResult = origin + (direction * simd_dot(direction, originToFeature))
let hitTestResultDistance = simd_length(hitTestResult - origin)
return FeatureHitTestResult(position: hitTestResult,
distanceToRayOrigin: hitTestResultDistance,
featureHit: closestFeaturePoint,
featureDistanceToHitResult: minDistance)
}
}

View File

@ -0,0 +1,14 @@
/*
Copyright (C) 2017 Apple Inc. All Rights Reserved.
See LICENSE.txt for this samples licensing information
Abstract:
Contains the application's delegate.
*/
import UIKit
@UIApplicationMain
class AppDelegate: UIResponder, UIApplicationDelegate {
var window: UIWindow?
}

View File

@ -0,0 +1,61 @@
/*
Copyright (C) 2017 Apple Inc. All Rights Reserved.
See LICENSE.txt for this samples licensing information
Abstract:
SceneKit node wrapper that estimates an object's final placement
*/
import Foundation
import ARKit
class PreviewNode: SCNNode {
// Saved positions that help smooth the movement of the preview
var lastPositionOnPlane: float3?
var lastPosition: float3?
// Use average of recent positions to avoid jitter.
private var recentPreviewNodePositions: [float3] = []
// MARK: - Initialization
override init() {
super.init()
}
convenience init(node: SCNNode) {
self.init()
opacity = 0.5
addChildNode(node)
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
// MARK: - Appearence
func update(for position: float3, planeAnchor: ARPlaneAnchor?, camera: ARCamera?) {
lastPosition = position
if planeAnchor != nil {
lastPositionOnPlane = position
}
updateTransform(for: position, camera: camera)
}
// MARK: - Private
private func updateTransform(for position: float3, camera: ARCamera?) {
// Add to the list of recent positions.
recentPreviewNodePositions.append(position)
// Remove anything older than the last 8 positions.
recentPreviewNodePositions.keepLast(8)
// Move to average of recent positions to avoid jitter.
if let average = recentPreviewNodePositions.average {
simdPosition = average
}
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 279 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 381 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 36 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 379 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 515 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 272 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 51 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.2 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.2 KiB

View File

@ -0,0 +1,112 @@
{
"images" : [
{
"size" : "20x20",
"idiom" : "iphone",
"filename" : "ARKit-40.png",
"scale" : "2x"
},
{
"size" : "20x20",
"idiom" : "iphone",
"filename" : "ARKit-60.png",
"scale" : "3x"
},
{
"size" : "29x29",
"idiom" : "iphone",
"filename" : "ARKit-59.png",
"scale" : "2x"
},
{
"size" : "29x29",
"idiom" : "iphone",
"filename" : "ARKit-87.png",
"scale" : "3x"
},
{
"size" : "40x40",
"idiom" : "iphone",
"filename" : "ARKit-81.png",
"scale" : "2x"
},
{
"size" : "40x40",
"idiom" : "iphone",
"filename" : "ARKit-121.png",
"scale" : "3x"
},
{
"size" : "60x60",
"idiom" : "iphone",
"filename" : "ARKit-120.png",
"scale" : "2x"
},
{
"size" : "60x60",
"idiom" : "iphone",
"filename" : "ARKit-180.png",
"scale" : "3x"
},
{
"idiom" : "ipad",
"size" : "20x20",
"scale" : "1x"
},
{
"size" : "20x20",
"idiom" : "ipad",
"filename" : "ARKit-41.png",
"scale" : "2x"
},
{
"idiom" : "ipad",
"size" : "29x29",
"scale" : "1x"
},
{
"size" : "29x29",
"idiom" : "ipad",
"filename" : "ARKit-58.png",
"scale" : "2x"
},
{
"idiom" : "ipad",
"size" : "40x40",
"scale" : "1x"
},
{
"size" : "40x40",
"idiom" : "ipad",
"filename" : "ARKit-80.png",
"scale" : "2x"
},
{
"size" : "76x76",
"idiom" : "ipad",
"filename" : "ARKit-76.png",
"scale" : "1x"
},
{
"size" : "76x76",
"idiom" : "ipad",
"filename" : "ARKit-152.png",
"scale" : "2x"
},
{
"size" : "83.5x83.5",
"idiom" : "ipad",
"filename" : "ARKit-167.png",
"scale" : "2x"
},
{
"idiom" : "ios-marketing",
"size" : "1024x1024",
"scale" : "1x"
}
],
"info" : {
"version" : 1,
"author" : "xcode"
}
}

View File

@ -0,0 +1,6 @@
{
"info" : {
"version" : 1,
"author" : "xcode"
}
}

View File

@ -0,0 +1,28 @@
<?xml version="1.0" encoding="UTF-8"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="13189.4" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" launchScreen="YES" useTraitCollections="YES" useSafeAreas="YES" colorMatched="YES" initialViewController="01J-lp-oVM">
<device id="retina4_7" orientation="portrait">
<adaptation id="fullscreen"/>
</device>
<dependencies>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="13165.3"/>
<capability name="Safe area layout guides" minToolsVersion="9.0"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
</dependencies>
<scenes>
<!--View Controller-->
<scene sceneID="EHf-IW-A2E">
<objects>
<viewController id="01J-lp-oVM" sceneMemberID="viewController">
<view key="view" contentMode="scaleToFill" id="Ze5-6b-2t3">
<rect key="frame" x="0.0" y="0.0" width="375" height="667"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<color key="backgroundColor" red="1" green="1" blue="1" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<viewLayoutGuide key="safeArea" id="6Tk-OE-BBY"/>
</view>
</viewController>
<placeholder placeholderIdentifier="IBFirstResponder" id="iYj-Kq-Ea1" userLabel="First Responder" sceneMemberID="firstResponder"/>
</objects>
<point key="canvasLocation" x="53" y="375"/>
</scene>
</scenes>
</document>

View File

@ -0,0 +1,52 @@
<?xml version="1.0" encoding="UTF-8"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="13196" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" useSafeAreas="YES" colorMatched="YES" initialViewController="7dh-A0-qGE">
<device id="retina4_7" orientation="portrait">
<adaptation id="fullscreen"/>
</device>
<dependencies>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="13171"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
</dependencies>
<scenes>
<!--View Controller-->
<scene sceneID="WeU-Be-bo7">
<objects>
<viewController id="7dh-A0-qGE" customClass="ViewController" customModule="Audio_in_ARKit" customModuleProvider="target" sceneMemberID="viewController">
<view key="view" contentMode="scaleToFill" id="loo-cG-myf">
<rect key="frame" x="0.0" y="0.0" width="375" height="667"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<subviews>
<arscnView clipsSubviews="YES" multipleTouchEnabled="YES" contentMode="scaleToFill" sceneName="Scene.scn" translatesAutoresizingMaskIntoConstraints="NO" id="BrB-h1-WRS">
<rect key="frame" x="0.0" y="-20" width="375" height="687"/>
<connections>
<outlet property="delegate" destination="7dh-A0-qGE" id="6YV-aS-dot"/>
</connections>
</arscnView>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" fixedFrame="YES" usesAttributedText="YES" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" minimumFontSize="7" enabled="NO" translatesAutoresizingMaskIntoConstraints="NO" id="Y7q-Fg-DbF">
<rect key="frame" x="16" y="626" width="343" height="21"/>
<autoresizingMask key="autoresizingMask" flexibleMaxX="YES" flexibleMaxY="YES"/>
<color key="tintColor" red="0.20970137799999999" green="0.18353630239999999" blue="1" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<attributedString key="attributedText"/>
<color key="highlightedColor" name="textColor" catalog="System" colorSpace="catalog"/>
</label>
</subviews>
<color key="backgroundColor" white="1" alpha="1" colorSpace="calibratedWhite"/>
<constraints>
<constraint firstAttribute="top" secondItem="BrB-h1-WRS" secondAttribute="top" constant="20" id="6az-Zm-SM4"/>
<constraint firstAttribute="trailing" secondItem="BrB-h1-WRS" secondAttribute="trailing" id="OWR-vj-YmO"/>
<constraint firstItem="BrB-h1-WRS" firstAttribute="leading" secondItem="loo-cG-myf" secondAttribute="leading" id="U47-u3-i38"/>
<constraint firstAttribute="bottom" secondItem="BrB-h1-WRS" secondAttribute="bottom" id="xmO-pC-5vS"/>
</constraints>
</view>
<connections>
<outlet property="sceneView" destination="BrB-h1-WRS" id="sgr-g8-kdG"/>
<outlet property="sessionInfoLabel" destination="Y7q-Fg-DbF" id="nU1-vd-x1S"/>
</connections>
</viewController>
<placeholder placeholderIdentifier="IBFirstResponder" id="hvH-ud-t1P" userLabel="First Responder" sceneMemberID="firstResponder"/>
<customObject id="dh1-aA-Oau" customClass="SCNScene"/>
</objects>
<point key="canvasLocation" x="1029.5999999999999" y="-222.63868065967017"/>
</scene>
</scenes>
</document>

View File

@ -0,0 +1,54 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleDevelopmentRegion</key>
<string>$(DEVELOPMENT_LANGUAGE)</string>
<key>CFBundleExecutable</key>
<string>$(EXECUTABLE_NAME)</string>
<key>CFBundleIcons</key>
<dict/>
<key>CFBundleIcons~ipad</key>
<dict/>
<key>CFBundleIdentifier</key>
<string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>$(PRODUCT_NAME)</string>
<key>CFBundlePackageType</key>
<string>APPL</string>
<key>CFBundleShortVersionString</key>
<string>1.0</string>
<key>CFBundleVersion</key>
<string>1</string>
<key>LSRequiresIPhoneOS</key>
<true/>
<key>NSCameraUsageDescription</key>
<string>This application will use the camera for Augmented Reality.</string>
<key>UILaunchStoryboardName</key>
<string>LaunchScreen</string>
<key>UIMainStoryboardFile</key>
<string>Main</string>
<key>UIRequiredDeviceCapabilities</key>
<array>
<string>armv7</string>
<string>arkit</string>
</array>
<key>UIStatusBarHidden</key>
<true/>
<key>UISupportedInterfaceOrientations</key>
<array>
<string>UIInterfaceOrientationPortrait</string>
<string>UIInterfaceOrientationLandscapeLeft</string>
<string>UIInterfaceOrientationLandscapeRight</string>
</array>
<key>UISupportedInterfaceOrientations~ipad</key>
<array>
<string>UIInterfaceOrientationPortrait</string>
<string>UIInterfaceOrientationPortraitUpsideDown</string>
<string>UIInterfaceOrientationLandscapeLeft</string>
<string>UIInterfaceOrientationLandscapeRight</string>
</array>
</dict>
</plist>

View File

@ -0,0 +1,162 @@
/*
Copyright (C) 2017 Apple Inc. All Rights Reserved.
See LICENSE.txt for this samples licensing information
Abstract:
Utility functions and type extensions used throughout the projects.
*/
import Foundation
import ARKit
// MARK: - Collection extensions
extension Array where Iterator.Element == float3 {
var average: float3? {
guard !self.isEmpty else {
return nil
}
let sum = self.reduce(float3(0)) { current, next in
return current + next
}
return sum / Float(self.count)
}
}
extension RangeReplaceableCollection where IndexDistance == Int {
mutating func keepLast(_ elementsToKeep: Int) {
if count > elementsToKeep {
self.removeFirst(count - elementsToKeep)
}
}
}
// MARK: - float4x4 extensions
extension float4x4 {
/// Treats matrix as a (right-hand column-major convention) transform matrix
/// and factors out the translation component of the transform.
var translation: float3 {
let translation = self.columns.3
return float3(translation.x, translation.y, translation.z)
}
}
// MARK: - Math
func rayIntersectionWithHorizontalPlane(rayOrigin: float3, direction: float3, planeY: Float) -> float3? {
let direction = simd_normalize(direction)
// Special case handling: Check if the ray is horizontal as well.
if direction.y == 0 {
if rayOrigin.y == planeY {
// The ray is horizontal and on the plane, thus all points on the ray intersect with the plane.
// Therefore we simply return the ray origin.
return rayOrigin
} else {
// The ray is parallel to the plane and never intersects.
return nil
}
}
// The distance from the ray's origin to the intersection point on the plane is:
// (pointOnPlane - rayOrigin) dot planeNormal
// --------------------------------------------
// direction dot planeNormal
// Since we know that horizontal planes have normal (0, 1, 0), we can simplify this to:
let dist = (planeY - rayOrigin.y) / direction.y
// Do not return intersections behind the ray's origin.
if dist < 0 {
return nil
}
// Return the intersection point.
return rayOrigin + (direction * dist)
}
func worldPositionFromScreenPosition(_ position: CGPoint,
in sceneView: ARSCNView,
objectPos: float3?,
infinitePlane: Bool = false) -> (position: float3?, planeAnchor: ARPlaneAnchor?, hitAPlane: Bool) {
// -------------------------------------------------------------------------------
// 1. Always do a hit test against exisiting plane anchors first.
// (If any such anchors exist & only within their extents.)
let planeHitTestResults = sceneView.hitTest(position, types: .existingPlaneUsingExtent)
if let result = planeHitTestResults.first {
let planeHitTestPosition = result.worldTransform.translation
let planeAnchor = result.anchor
// Return immediately - this is the best possible outcome.
return (planeHitTestPosition, planeAnchor as? ARPlaneAnchor, true)
}
// -------------------------------------------------------------------------------
// 2. Collect more information about the environment by hit testing against
// the feature point cloud, but do not return the result yet.
var featureHitTestPosition: float3?
var highQualityFeatureHitTestResult = false
let highQualityfeatureHitTestResults = sceneView.hitTestWithFeatures(position, coneOpeningAngleInDegrees: 18, minDistance: 0.2, maxDistance: 2.0)
if !highQualityfeatureHitTestResults.isEmpty {
let result = highQualityfeatureHitTestResults[0]
featureHitTestPosition = result.position
highQualityFeatureHitTestResult = true
}
// -------------------------------------------------------------------------------
// 3. If desired or necessary (no good feature hit test result): Hit test
// against an infinite, horizontal plane (ignoring the real world).
if infinitePlane || !highQualityFeatureHitTestResult {
if let pointOnPlane = objectPos {
let pointOnInfinitePlane = sceneView.hitTestWithInfiniteHorizontalPlane(position, pointOnPlane)
if pointOnInfinitePlane != nil {
return (pointOnInfinitePlane, nil, true)
}
}
}
// -------------------------------------------------------------------------------
// 4. If available, return the result of the hit test against high quality
// features if the hit tests against infinite planes were skipped or no
// infinite plane was hit.
if highQualityFeatureHitTestResult {
return (featureHitTestPosition, nil, false)
}
// -------------------------------------------------------------------------------
// 5. As a last resort, perform a second, unfiltered hit test against features.
// If there are no features in the scene, the result returned here will be nil.
let unfilteredFeatureHitTestResults = sceneView.hitTestWithFeatures(position)
if !unfilteredFeatureHitTestResults.isEmpty {
let result = unfilteredFeatureHitTestResults[0]
return (result.position, nil, false)
}
return (nil, nil, false)
}
func setNewVirtualObjectPosition(_ object: SCNNode, to pos: float3, cameraTransform: matrix_float4x4) {
let cameraWorldPos = cameraTransform.translation
var cameraToPosition = pos - cameraWorldPos
// Limit the distance of the object from the camera to a maximum of 10 meters.
if simd_length(cameraToPosition) > 10 {
cameraToPosition = simd_normalize(cameraToPosition)
cameraToPosition *= 10
}
object.simdPosition = cameraWorldPos + cameraToPosition
}

View File

@ -0,0 +1,273 @@
/*
Copyright (C) 2017 Apple Inc. All Rights Reserved.
See LICENSE.txt for this samples licensing information
Abstract:
Main view controller for the AR experience.
*/
import UIKit
import SceneKit
import ARKit
class ViewController: UIViewController, ARSCNViewDelegate {
// MARK: - IBOutlets
@IBOutlet weak var sessionInfoLabel: UILabel!
@IBOutlet weak var sceneView: ARSCNView!
var screenCenter: CGPoint = .zero
// Shows a preview of the object to be placed and hovers over estimated planes.
var previewNode: PreviewNode?
// Contains the cup model that is shared by the preview and final nodes.
var cupNode = SCNNode()
// Audio source for positional audio feedback.
var source: SCNAudioSource?
// MARK: - View Life Cycle
override func viewDidLoad() {
super.viewDidLoad()
// Show statistics such as FPS and timing information.
sceneView.showsStatistics = true
// Setup environment mapping.
let environmentMap = UIImage(named: "Assets.scnassets/sharedImages/environment_blur.exr")!
sceneView.scene.lightingEnvironment.contents = environmentMap
// Complete rendering setup of ARSCNView.
sceneView.antialiasingMode = .multisampling4X
sceneView.automaticallyUpdatesLighting = false
sceneView.contentScaleFactor = 1.3
// Preload the audio file.
source = SCNAudioSource(fileNamed: "Assets.scnassets/ping.aif")!
source!.loops = true
source!.load()
}
/// - Tag: StartARSession
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
guard ARWorldTrackingConfiguration.isSupported
else { showUnsupportedDeviceError(); return }
// Start the ARSession.
let configuration = ARWorldTrackingConfiguration()
configuration.planeDetection = .horizontal
sceneView.session.run(configuration)
screenCenter = CGPoint(x: sceneView.bounds.midX, y: sceneView.bounds.midY)
// Prevent the screen from being dimmed after a while as users will likely have
// long periods of interaction without touching the screen or buttons.
UIApplication.shared.isIdleTimerDisabled = true
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
// Pause the view's session.
sceneView.session.pause()
}
override func viewWillTransition(to size: CGSize, with coordinator: UIViewControllerTransitionCoordinator) {
super.viewWillTransition(to: size, with: coordinator)
// The screen's center point changes on orientation switch, so recalculate `screenCenter`.
screenCenter = CGPoint(x: size.width / 2.0, y: size.height / 2.0)
}
// MARK: - Internal methods
private func showUnsupportedDeviceError() {
// This device does not support 6DOF world tracking.
let alertController = UIAlertController(
title: "ARKit is not available on this device.",
message: "This app requires world tracking, which is available only on iOS devices with the A9 processor or later.",
preferredStyle: .alert
)
alertController.addAction(UIAlertAction(title: "OK", style: .default, handler: nil))
present(alertController, animated: true, completion: nil)
}
// Check the light estimate from the current ARFrame and update the scene.
private func updateLightEstimate() {
if let lightEstimate = sceneView.session.currentFrame?.lightEstimate {
sceneView.scene.lightingEnvironment.intensity = lightEstimate.ambientIntensity / 40.0
} else {
sceneView.scene.lightingEnvironment.intensity = 40.0
}
}
private func resetTracking() {
let configuration = ARWorldTrackingConfiguration()
configuration.planeDetection = .horizontal
sceneView.session.run(configuration, options: [.resetTracking, .removeExistingAnchors])
// Reset preview state.
cupNode.removeFromParentNode()
cupNode = SCNNode()
previewNode?.removeFromParentNode()
previewNode = nil
playSound()
}
private func setNewVirtualObjectToAnchor(_ object: SCNNode, to anchor: ARAnchor, cameraTransform: matrix_float4x4) {
let cameraWorldPosition = cameraTransform.translation
var cameraToPosition = anchor.transform.translation - cameraWorldPosition
// Limit the distance of the object from the camera to a maximum of 10 meters.
if simd_length(cameraToPosition) > 10 {
cameraToPosition = simd_normalize(cameraToPosition)
cameraToPosition *= 10
}
object.simdPosition = cameraWorldPosition + cameraToPosition
}
// MARK: - ARSCNViewDelegate
/// - Tag: UpdateAudioPlayback
func renderer(_ renderer: SCNSceneRenderer, updateAtTime time: TimeInterval) {
if cupNode.parent == nil && previewNode == nil {
// If our model hasn't been placed and we lack a preview for placement then setup a preview.
setupPreviewNode()
updatePreviewNode()
} else {
updatePreviewNode()
}
updateLightEstimate()
cutVolumeIfPlacedObjectIsInView()
}
/// - Tag: PlaceARContent
func renderer(_ renderer: SCNSceneRenderer, didAdd node: SCNNode, for anchor: ARAnchor) {
// Place content only for anchors found by plane detection.
guard anchor is ARPlaneAnchor && previewNode != nil
else { return }
// Stop showing a preview version of the object to be placed.
cupNode.removeFromParentNode()
previewNode?.removeFromParentNode()
previewNode = nil
// Add the cupNode to the scene's root node using the anchor's position.
guard let cameraTransform = sceneView.session.currentFrame?.camera.transform
else { return }
setNewVirtualObjectToAnchor(cupNode, to: anchor, cameraTransform: cameraTransform)
sceneView.scene.rootNode.addChildNode(cupNode)
// Disable plane detection after the model has been added.
let configuration = ARWorldTrackingConfiguration()
configuration.planeDetection = .horizontal
sceneView.session.run(configuration, options: [])
// Set up positional audio to play in case the object moves offscreen.
playSound()
}
func session(_ session: ARSession, cameraDidChangeTrackingState camera: ARCamera) {
let message: String
// Inform the user of their camera tracking state.
switch camera.trackingState {
case .notAvailable:
message = "Tracking unavailable"
case .normal:
message = "Tracking normal"
case .limited(.excessiveMotion):
message = "Tracking limited - Too much camera movement"
case .limited(.insufficientFeatures):
message = "Tracking limited - Not enough surface detail"
case .limited(.initializing):
message = "Initializing AR Session"
}
sessionInfoLabel.text = message
}
func session(_ session: ARSession, didFailWithError error: Error) {
// Present an error message to the user.
sessionInfoLabel.text = "Session failed: \(error.localizedDescription)"
resetTracking()
}
func sessionWasInterrupted(_ session: ARSession) {
// Inform the user that the session has been interrupted, for example, by presenting an overlay.
sessionInfoLabel.text = "Session was interrupted"
resetTracking()
}
func sessionInterruptionEnded(_ session: ARSession) {
// Reset tracking and/or remove existing anchors if consistent tracking is required.
sessionInfoLabel.text = "Session interruption ended"
resetTracking()
}
// MARK: - Preview Node
/*
Loads the cup model (`cupNode`) that is used for the duration of the app.
Initializes a `PreviewNode` that contains the `cupNode` and adds it to the node hierarchy.
*/
func setupPreviewNode() {
if (cupNode.childNode(withName: "cup", recursively: false) == nil) {
// Load the cup scene from the bundle only once.
let modelScene = SCNScene(named: "Assets.scnassets/cup/cup.scn")!
// Get a handle to the cup model.
let cup = modelScene.rootNode.childNode(withName: "cup", recursively: true)!
// Set the cup model onto `cupNode`.
cupNode.addChildNode(cup)
}
// Initialize `previewNode` to display the cup model.
previewNode = PreviewNode(node: cupNode)
// Add `previewNode` to the node hierarchy.
sceneView.scene.rootNode.addChildNode(previewNode!)
}
/*
`previewNode` exists when ARKit is finding a plane. During this time, get a world position for the areas closest to the scene's point of view that ARKit believes might be a plane, and use it to update the `previewNode` position.
*/
func updatePreviewNode() {
guard let node = previewNode else { return }
let (worldPosition, planeAnchor, _) = worldPositionFromScreenPosition(screenCenter,
in: sceneView,
objectPos: node.simdPosition)
if let position = worldPosition {
node.update(for: position, planeAnchor: planeAnchor, camera: sceneView.session.currentFrame?.camera)
}
}
// MARK: - Sound
/*
Determines whether the `cupNode` is visible. If the `cupNode` isn't visible, a sound is played using
SceneKit's positional audio functionality to locate the `cupNode`.
*/
func cutVolumeIfPlacedObjectIsInView() {
guard previewNode == nil, let pointOfView = sceneView.pointOfView else { return }
guard let player = cupNode.audioPlayers.first,
let avNode = player.audioNode as? AVAudioMixing else {
return
}
let placedObjectIsInView = sceneView.isNode(cupNode, insideFrustumOf: pointOfView)
avNode.volume = placedObjectIsInView ? 0.0 : 1.0
}
// Plays a sound on the cupNode using SceneKit's positional audio.
/// - Tag: AddAudioPlayer
func playSound() {
guard cupNode.audioPlayers.count == 0 else { return }
cupNode.addAudioPlayer(SCNAudioPlayer(source: source!))
}
}

View File

@ -0,0 +1,11 @@
//
// SampleCode.xcconfig
//
// The `SAMPLE_CODE_DISAMBIGUATOR` configuration is to make it easier to build
// and run a sample code project. Once you set your project's development team,
// you'll have a unique bundle identifier. This is because the bundle identifier
// is derived based on the 'SAMPLE_CODE_DISAMBIGUATOR' value. Do not use this
// approach in your own projects—it's only useful for sample code projects because
// they are frequently downloaded and don't have a development team set.
SAMPLE_CODE_DISAMBIGUATOR=${DEVELOPMENT_TEAM}

42
AudioInARKit/LICENSE.txt Normal file
View File

@ -0,0 +1,42 @@
Sample code project: Audio in ARKit
Version: 1.1
IMPORTANT: This Apple software is supplied to you by Apple
Inc. ("Apple") in consideration of your agreement to the following
terms, and your use, installation, modification or redistribution of
this Apple software constitutes acceptance of these terms. If you do
not agree with these terms, please do not use, install, modify or
redistribute this Apple software.
In consideration of your agreement to abide by the following terms, and
subject to these terms, Apple grants you a personal, non-exclusive
license, under Apple's copyrights in this original Apple software (the
"Apple Software"), to use, reproduce, modify and redistribute the Apple
Software, with or without modifications, in source and/or binary forms;
provided that if you redistribute the Apple Software in its entirety and
without modifications, you must retain this notice and the following
text and disclaimers in all such redistributions of the Apple Software.
Neither the name, trademarks, service marks or logos of Apple Inc. may
be used to endorse or promote products derived from the Apple Software
without specific prior written permission from Apple. Except as
expressly stated in this notice, no other rights or licenses, express or
implied, are granted by Apple herein, including but not limited to any
patent rights that may be infringed by your derivative works or by other
works in which the Apple Software may be incorporated.
The Apple Software is provided by Apple on an "AS IS" basis. APPLE
MAKES NO WARRANTIES, EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION
THE IMPLIED WARRANTIES OF NON-INFRINGEMENT, MERCHANTABILITY AND FITNESS
FOR A PARTICULAR PURPOSE, REGARDING THE APPLE SOFTWARE OR ITS USE AND
OPERATION ALONE OR IN COMBINATION WITH YOUR PRODUCTS.
IN NO EVENT SHALL APPLE BE LIABLE FOR ANY SPECIAL, INDIRECT, INCIDENTAL
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) ARISING IN ANY WAY OUT OF THE USE, REPRODUCTION,
MODIFICATION AND/OR DISTRIBUTION OF THE APPLE SOFTWARE, HOWEVER CAUSED
AND WHETHER UNDER THEORY OF CONTRACT, TORT (INCLUDING NEGLIGENCE),
STRICT LIABILITY OR OTHERWISE, EVEN IF APPLE HAS BEEN ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
Copyright (C) 2017 Apple Inc. All Rights Reserved.

119
AudioInARKit/README.md Normal file
View File

@ -0,0 +1,119 @@
# Audio in ARKit
Places a 3D object on the first plane ARKit detects and uses SceneKit's positional audio feature to locate the object when it is outside of the camera's view finder.
## Overview
This sample app runs an [ARKit][0] world tracking session with content displayed in a SceneKit view. To demonstrate plane detection, the app simply places a 3D model onto the first plane that ARKit detects. If the model's position is outside the current field of view of the camera, the app uses SceneKit's positional audio feature to indicate which direction to turn the device to see the model.
[0]:https://developer.apple.com/documentation/arkit
## Getting Started
ARKit requires iOS 11 and a device with an A9 (or later) processor. ARKit is not available in iOS Simulator. Building the sample code requires Xcode 9 or later.
## Configure and Run the AR Session
The [`ARSCNView`][1] class is a SceneKit view that includes an [`ARSession`][2] object that manages the motion tracking and image processing required to create an AR experience. However, to run a session you must provide a session configuration.
[1]:https://developer.apple.com/documentation/arkit/arscnview
[2]:https://developer.apple.com/documentation/arkit/arsession
![Architecture diagram: an ARKit view owns an ARSession, which requires an ARConfiguration to run.](Documentation/AR-View-Session-Configuration.png)
The [`ARWorldTrackingConfiguration`][3] class provides high-precision motion tracking and enables features to help you place virtual content in relation to real-world surfaces. To start an AR session, create a session configuration object with the options you want (such as plane detection), then call the [`run(_:options:)`][4] method on the [`session`][5] object of your [`ARSCNView`][1] instance:
```
guard ARWorldTrackingConfiguration.isSupported
else { showUnsupportedDeviceError(); return }
// Start the ARSession.
let configuration = ARWorldTrackingConfiguration()
configuration.planeDetection = .horizontal
sceneView.session.run(configuration)
```
[View in Source](x-source-tag://StartARSession)
[3]:https://developer.apple.com/documentation/arkit/arworldtrackingconfiguration
[4]:https://developer.apple.com/documentation/arkit/arsession/2875735-run
[5]:https://developer.apple.com/documentation/arkit/arscnview/2865796-session
> **Important:** If your app requires ARKit for its core functionality, use the `arkit` key in the `UIRequiredDeviceCapabilities` section of your app's Info.plist file to make your app available only on devices that support ARKit. If AR is a secondary feature of your app, use the `isSupported` property to determine whether to offer AR-based features.
[7]:https://developer.apple.com/documentation/arkit/arconfiguration/2923553-issupported
## Placing 3D Content on Detected Planes
After youve set up your AR session, you can use SceneKit to place virtual content in the view.
When plane detection is enabled, ARKit adds and updates anchors for each detected plane. By default, the [`ARSCNView`][1] class adds an [`SCNNode`][8] object to the SceneKit scene for each anchor. Your view's delegate can implement the [`renderer(_:didAdd:for:)`][9] method to add content to the scene.
```
func renderer(_ renderer: SCNSceneRenderer, didAdd node: SCNNode, for anchor: ARAnchor) {
// Place content only for anchors found by plane detection.
guard anchor is ARPlaneAnchor && previewNode != nil
else { return }
// Stop showing a preview version of the object to be placed.
cupNode.removeFromParentNode()
previewNode?.removeFromParentNode()
previewNode = nil
// Add the cupNode to the scene's root node using the anchor's position.
guard let cameraTransform = sceneView.session.currentFrame?.camera.transform
else { return }
setNewVirtualObjectToAnchor(cupNode, to: anchor, cameraTransform: cameraTransform)
sceneView.scene.rootNode.addChildNode(cupNode)
// Disable plane detection after the model has been added.
let configuration = ARWorldTrackingConfiguration()
configuration.planeDetection = .horizontal
sceneView.session.run(configuration, options: [])
// Set up positional audio to play in case the object moves offscreen.
playSound()
}
```
[View in Source](x-source-tag://PlaceARContent)
[8]:https://developer.apple.com/documentation/scenekit/scnnode
[9]:https://developer.apple.com/documentation/arkit/arscnviewdelegate/2865794-renderer
If you add content as a child of the node corresponding to the anchor, the `ARSCNView` class automatically moves that content as ARKit refines its estimate of the plane's position and extent. This sample app instead adds content as a child of the scene's root node, but using the transform provided by the anchor — this alternative technique is a way to keep the content from moving after placement.
## Using SceneKit Audio with ARKit
This sample configures an audio player that plays audio whenever the cup node is no longer in view to help the user locate it.
The [`SCNAudioSource`][10] class represents an audio source that can be added to any [`SCNNode`][8] instance. To support positional audio in SceneKit, your application can create instances of [`SCNAudioSource`][10] with a file URL pointing to an audio file. Because SceneKits audio engine uses panning for 3D positional purposes, you must use mono audio files for best results.
```
let source = SCNAudioSource(fileNamed: "Assets.scnassets/ping.aif")!
source.loops = true
source.load()
```
To add the [`SCNAudioSource`][10] to the SceneKit graph, you need to initialize an instance of [`SCNAudioPlayer`][11] using that instance of [`SCNAudioSource`][10] and then add the audio player to the [`SCNNode`][8].
```
let audioPlayer = SCNAudioPlayer(source: source)
cupNode.addAudioPlayer(SCNAudioPlayer(source: source!))
```
[View in Source](x-source-tag://AddAudioPlayer)
Attaching the audio player to an [`SCNNode`][8] object allows for spatialized 3D audio playback based on the position of that node relative to the scenes point of view.
[10]:https://developer.apple.com/documentation/scenekit/scnaudiosource
[11]:https://developer.apple.com/documentation/scenekit/scnaudioplayer
To control playback of the audio player, your view's delegate can implement the [`renderer(_:updateAtTime:)`][12] method to update the playback state of the audio player as needed. This sample uses this delegate callback to control the volume of the audio player based on whether the `cupNode` is visible.
```
func renderer(_ renderer: SCNSceneRenderer, updateAtTime time: TimeInterval) {
updatePreviewNode()
updateLightEstimate()
cutVolumeIfPlacedObjectIsInView()
}
```
[View in Source](x-source-tag://UpdateAudioPlayback)