diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml
new file mode 100644
index 0000000..0708076
--- /dev/null
+++ b/.github/FUNDING.yml
@@ -0,0 +1,4 @@
+# These are supported funding model platforms
+
+github: [shu223]
+custom: ['https://paypal.me/shu223', 'https://note.com/shu223/m/me1aa6761ab16']
diff --git a/.gitignore b/.gitignore
index cab8c69..dbc0efe 100644
--- a/.gitignore
+++ b/.gitignore
@@ -2,6 +2,8 @@
#
# gitignore contributors: remember to update Global/Xcode.gitignore, Objective-C.gitignore & Swift.gitignore
+iOS-10-Sampler/Resources/network_params/logos
+
## Build generated
build/
DerivedData/
diff --git a/README.md b/README.md
index fd3c590..c051c45 100644
--- a/README.md
+++ b/README.md
@@ -12,138 +12,141 @@
Code examples for new APIs of iOS 10.
-##How to build
+## How to build
Just build with Xcode 8.
It can **NOT** run on **Simulator**. (Because it uses Metal.)
-##Contents
+## Contents
-###Speech Recognition
+### Speech Recognition
Speech Recognition demo using Speech Framework. All available languages can be selected.
-###Looper
+### Looper
Loop playback demo using AVPlayerLooper.
-###Live Photo Capturing
+### Live Photo Capturing
Live Photo Capturing example using AVCapturePhotoOutput.
-###Audio Fade-in/out
+### Audio Fade-in/out
-Audio fade-in/out demo using `setVolume:fadeDuration` method which is added to AVAudioPlayer.
+Audio fade-in/out demo using `setVolume:fadeDuration` method which is added to `AVAudioPlayer`.
-###Metal CNN Basic: Digit Detection
+### Metal CNN Basic: Digit Detection
Hand-writing digit detection using CNN (Convolutional Neural Network) by Metal Performance Shaders.
-###Metal CNN Advanced: Image Recognition
+### Metal CNN Advanced: Image Recognition
Real-time image recognition using CNN (Convolutional Neural Network) by Metal Performance Shaders.
-###PropertyAnimator: Position
+### PropertyAnimator: Position
-Animating UIView's `center` & `backgroundColor` using UIViewPropertyAnimator.
+Animating UIView's `center` & `backgroundColor` using `UIViewPropertyAnimator`.
-###PropertyAnimator: Blur
+### PropertyAnimator: Blur
-Animating blur effect using `fractionComplete` property of UIViewPropertyAnimator.
+Animating blur effect using `fractionComplete` property of `UIViewPropertyAnimator`.
-###Preview Interaction
+### Preview Interaction
**Peek & Pop interactions with 3D touch** using UIPreviewInteraction.
-###Notification with Image
+### Notification with Image
Local notification with an image using UserNotifications framework.
-###Sticker Pack
+### Sticker Pack
Example of Sticker Pack for iMessage.
-###Core Data Stack (Created by [nolili](https://github.com/nolili))
+### Core Data Stack (Created by [nolili](https://github.com/nolili))
Simple Core Data stack using NSPersistentContainer.
-###TabBar Customization
+### TabBar Customization
Customization sample for UITabBar's badge using text attributes.
-###New filters
+### New filters
New filters of CIFilter in Core Image.
-###New Fonts
+### New Fonts
New Fonts gallery
-###Proactive: Location Suggestions
+### Proactive: Location Suggestions
This sample demonstrates how to use new `mapItem` property of NSUserActivity to integrate with location suggestions.
-###Attributed Speech
+### Attributed Speech
-Attributed Speech demo using `attributedSpeechString` of AVSpeechUtterance.
+Attributed Speech demo with `AVSpeechSynthesisIPANotationAttribute` for `AVSpeechUtterance`.
-But it seems **NOT** to affect the speech with this attribute. Anyone, please let me know how to use this new API.
-###Haptic Feedback
+### Haptic Feedback
Haptic Feedbacks using UIFeedbackGenerator.
-##Author
+## Author
**Shuichi Tsutsumi**
Freelance iOS programmer in Japan.
+
+
+
+
- PAST WORKS: [My Profile Summary](https://medium.com/@shu223/my-profile-summary-f14bfc1e7099#.vdh0i7clr)
-- PROFILES: [LinkedIn](https://www.linkedin.com/profile/view?id=214896557)
+- PROFILES: [LinkedIn](https://www.linkedin.com/in/shuichi-tsutsumi-525b755b/)
- BLOGS: [English](https://medium.com/@shu223/) / [Japanese](http://d.hatena.ne.jp/shu223/)
- CONTACTS: [Twitter](https://twitter.com/shu223) / [Facebook](https://www.facebook.com/shuichi.tsutsumi)
-##Special Thanks
+## Special Thanks
The icon is designed by [Okazu](https://www.facebook.com/pashimo)
diff --git a/iOS-10-Sampler.xcodeproj/project.pbxproj b/iOS-10-Sampler.xcodeproj/project.pbxproj
index 75b2346..2fd4e7e 100644
--- a/iOS-10-Sampler.xcodeproj/project.pbxproj
+++ b/iOS-10-Sampler.xcodeproj/project.pbxproj
@@ -3,7 +3,7 @@
archiveVersion = 1;
classes = {
};
- objectVersion = 46;
+ objectVersion = 54;
objects = {
/* Begin PBXBuildFile section */
@@ -26,10 +26,9 @@
8A1CC3621D7AAFBC00562709 /* MetalCNNBasicViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A1CC3611D7AAFBC00562709 /* MetalCNNBasicViewController.swift */; };
8A1CC3641D7AAFCC00562709 /* MetalCNNBasic.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 8A1CC3631D7AAFCC00562709 /* MetalCNNBasic.storyboard */; };
8A1CC37F1D7AB10D00562709 /* DrawView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A1CC37B1D7AB10D00562709 /* DrawView.swift */; };
- 8A1CC3811D7AB10D00562709 /* SlimMPSCNN.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A1CC37D1D7AB10D00562709 /* SlimMPSCNN.swift */; };
8A1CC3891D7ABCE400562709 /* MNISTDeepCNN.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A1CC3881D7ABCE400562709 /* MNISTDeepCNN.swift */; };
8A1CC38F1D7AC22E00562709 /* Stickers.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 8A1CC38E1D7AC22E00562709 /* Stickers.xcassets */; };
- 8A1CC3931D7AC22E00562709 /* SamplerStickerPack.appex in Embed App Extensions */ = {isa = PBXBuildFile; fileRef = 8A1CC38C1D7AC22E00562709 /* SamplerStickerPack.appex */; settings = {ATTRIBUTES = (RemoveHeadersOnCopy, ); }; };
+ 8A1CC3931D7AC22E00562709 /* SamplerStickerPack.appex in Embed Foundation Extensions */ = {isa = PBXBuildFile; fileRef = 8A1CC38C1D7AC22E00562709 /* SamplerStickerPack.appex */; settings = {ATTRIBUTES = (RemoveHeadersOnCopy, ); }; };
8A1CC3991D7AC43C00562709 /* StickerPackViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A1CC3981D7AC43C00562709 /* StickerPackViewController.swift */; };
8A1CC39B1D7AC44A00562709 /* StickerPack.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 8A1CC39A1D7AC44A00562709 /* StickerPack.storyboard */; };
8A3CD9B01DA539F000039F36 /* bias_conv1.dat in Resources */ = {isa = PBXBuildFile; fileRef = 8A3CD9A81DA539F000039F36 /* bias_conv1.dat */; };
@@ -242,6 +241,8 @@
8A5AE0B91D7D50E80095209E /* LivePhotoCapture.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 8A5AE0B81D7D50E80095209E /* LivePhotoCapture.storyboard */; };
8A5AE0BC1D7D58AB0095209E /* LivePhotoCaptureDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A5AE0BB1D7D58AB0095209E /* LivePhotoCaptureDelegate.swift */; };
8A5AE0BE1D7D61C70095209E /* PreviewView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A5AE0BD1D7D61C70095209E /* PreviewView.swift */; };
+ 8A662A7F20CC94CE00EF23A2 /* SlimMPSCNNConvolution.m in Sources */ = {isa = PBXBuildFile; fileRef = 8A662A7D20CC94CE00EF23A2 /* SlimMPSCNNConvolution.m */; };
+ 8A662A8220CC955700EF23A2 /* SlimMPSCNNFullyConnected.m in Sources */ = {isa = PBXBuildFile; fileRef = 8A662A8120CC955700EF23A2 /* SlimMPSCNNFullyConnected.m */; };
8A6CE2EC1D854B9E00DC79B4 /* LivePhotoCaptureSessionManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A6CE2EB1D854B9E00DC79B4 /* LivePhotoCaptureSessionManager.swift */; };
8A736F161D7A892D00A58684 /* AudioFadeInOutViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A736F151D7A892D00A58684 /* AudioFadeInOutViewController.swift */; };
8A736F181D7A893A00A58684 /* AudioFadeInOut.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 8A736F171D7A893A00A58684 /* AudioFadeInOut.storyboard */; };
@@ -264,7 +265,6 @@
8AB430601D7A6B4900A3BD98 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 8AB4305F1D7A6B4900A3BD98 /* Assets.xcassets */; };
8AB430631D7A6B4900A3BD98 /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 8AB430611D7A6B4900A3BD98 /* LaunchScreen.storyboard */; };
8AB4306E1D7A6B4A00A3BD98 /* iOS_10_SamplerTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8AB4306D1D7A6B4A00A3BD98 /* iOS_10_SamplerTests.swift */; };
- 8AB430791D7A6B4A00A3BD98 /* iOS_10_SamplerUITests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8AB430781D7A6B4A00A3BD98 /* iOS_10_SamplerUITests.swift */; };
8AB430871D7A6DA700A3BD98 /* RootViewCell.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8AB430861D7A6DA700A3BD98 /* RootViewCell.swift */; };
8AB430891D7A6DF300A3BD98 /* SampleDataSource.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8AB430881D7A6DF300A3BD98 /* SampleDataSource.swift */; };
8ACF88331E1B8F7C00E1555B /* HapticViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8ACF88321E1B8F7C00E1555B /* HapticViewController.swift */; };
@@ -291,25 +291,18 @@
remoteGlobalIDString = 8AB430541D7A6B4900A3BD98;
remoteInfo = "iOS-10-Sampler";
};
- 8AB430751D7A6B4A00A3BD98 /* PBXContainerItemProxy */ = {
- isa = PBXContainerItemProxy;
- containerPortal = 8AB4304D1D7A6B4900A3BD98 /* Project object */;
- proxyType = 1;
- remoteGlobalIDString = 8AB430541D7A6B4900A3BD98;
- remoteInfo = "iOS-10-Sampler";
- };
/* End PBXContainerItemProxy section */
/* Begin PBXCopyFilesBuildPhase section */
- 8A1CC3971D7AC22E00562709 /* Embed App Extensions */ = {
+ 8A1CC3971D7AC22E00562709 /* Embed Foundation Extensions */ = {
isa = PBXCopyFilesBuildPhase;
buildActionMask = 2147483647;
dstPath = "";
dstSubfolderSpec = 13;
files = (
- 8A1CC3931D7AC22E00562709 /* SamplerStickerPack.appex in Embed App Extensions */,
+ 8A1CC3931D7AC22E00562709 /* SamplerStickerPack.appex in Embed Foundation Extensions */,
);
- name = "Embed App Extensions";
+ name = "Embed Foundation Extensions";
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXCopyFilesBuildPhase section */
@@ -333,7 +326,6 @@
8A1CC3611D7AAFBC00562709 /* MetalCNNBasicViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = MetalCNNBasicViewController.swift; sourceTree = ""; };
8A1CC3631D7AAFCC00562709 /* MetalCNNBasic.storyboard */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = file.storyboard; path = MetalCNNBasic.storyboard; sourceTree = ""; };
8A1CC37B1D7AB10D00562709 /* DrawView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = DrawView.swift; path = MetalCNNBasicApple/DrawView.swift; sourceTree = ""; };
- 8A1CC37D1D7AB10D00562709 /* SlimMPSCNN.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SlimMPSCNN.swift; sourceTree = ""; };
8A1CC3851D7AB15B00562709 /* iOS10Sampler-Bridging-Header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "iOS10Sampler-Bridging-Header.h"; sourceTree = ""; };
8A1CC3881D7ABCE400562709 /* MNISTDeepCNN.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = MNISTDeepCNN.swift; sourceTree = ""; };
8A1CC38C1D7AC22E00562709 /* SamplerStickerPack.appex */ = {isa = PBXFileReference; explicitFileType = "wrapper.app-extension"; includeInIndex = 0; path = SamplerStickerPack.appex; sourceTree = BUILT_PRODUCTS_DIR; };
@@ -552,6 +544,10 @@
8A5AE0B81D7D50E80095209E /* LivePhotoCapture.storyboard */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = file.storyboard; path = LivePhotoCapture.storyboard; sourceTree = ""; };
8A5AE0BB1D7D58AB0095209E /* LivePhotoCaptureDelegate.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = LivePhotoCaptureDelegate.swift; sourceTree = ""; };
8A5AE0BD1D7D61C70095209E /* PreviewView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = PreviewView.swift; sourceTree = ""; };
+ 8A662A7D20CC94CE00EF23A2 /* SlimMPSCNNConvolution.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = SlimMPSCNNConvolution.m; sourceTree = ""; };
+ 8A662A7E20CC94CE00EF23A2 /* SlimMPSCNNConvolution.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = SlimMPSCNNConvolution.h; sourceTree = ""; };
+ 8A662A8020CC955700EF23A2 /* SlimMPSCNNFullyConnected.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = SlimMPSCNNFullyConnected.h; sourceTree = ""; };
+ 8A662A8120CC955700EF23A2 /* SlimMPSCNNFullyConnected.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = SlimMPSCNNFullyConnected.m; sourceTree = ""; };
8A6CE2EB1D854B9E00DC79B4 /* LivePhotoCaptureSessionManager.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = LivePhotoCaptureSessionManager.swift; sourceTree = ""; };
8A736F151D7A892D00A58684 /* AudioFadeInOutViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = AudioFadeInOutViewController.swift; sourceTree = ""; };
8A736F171D7A893A00A58684 /* AudioFadeInOut.storyboard */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = file.storyboard; path = AudioFadeInOut.storyboard; sourceTree = ""; };
@@ -579,7 +575,6 @@
8AB430691D7A6B4A00A3BD98 /* iOS-10-SamplerTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = "iOS-10-SamplerTests.xctest"; sourceTree = BUILT_PRODUCTS_DIR; };
8AB4306D1D7A6B4A00A3BD98 /* iOS_10_SamplerTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = iOS_10_SamplerTests.swift; sourceTree = ""; };
8AB4306F1D7A6B4A00A3BD98 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; };
- 8AB430741D7A6B4A00A3BD98 /* iOS-10-SamplerUITests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = "iOS-10-SamplerUITests.xctest"; sourceTree = BUILT_PRODUCTS_DIR; };
8AB430781D7A6B4A00A3BD98 /* iOS_10_SamplerUITests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = iOS_10_SamplerUITests.swift; sourceTree = ""; };
8AB4307A1D7A6B4A00A3BD98 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; };
8AB430861D7A6DA700A3BD98 /* RootViewCell.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = RootViewCell.swift; sourceTree = ""; };
@@ -608,13 +603,6 @@
);
runOnlyForDeploymentPostprocessing = 0;
};
- 8AB430711D7A6B4A00A3BD98 /* Frameworks */ = {
- isa = PBXFrameworksBuildPhase;
- buildActionMask = 2147483647;
- files = (
- );
- runOnlyForDeploymentPostprocessing = 0;
- };
/* End PBXFrameworksBuildPhase section */
/* Begin PBXGroup section */
@@ -696,7 +684,10 @@
8A1CC3781D7AB10D00562709 /* Common */ = {
isa = PBXGroup;
children = (
- 8A1CC37D1D7AB10D00562709 /* SlimMPSCNN.swift */,
+ 8A662A8020CC955700EF23A2 /* SlimMPSCNNFullyConnected.h */,
+ 8A662A8120CC955700EF23A2 /* SlimMPSCNNFullyConnected.m */,
+ 8A662A7E20CC94CE00EF23A2 /* SlimMPSCNNConvolution.h */,
+ 8A662A7D20CC94CE00EF23A2 /* SlimMPSCNNConvolution.m */,
);
name = Common;
path = MetalCNNBasicApple;
@@ -1110,7 +1101,6 @@
children = (
8AB430551D7A6B4900A3BD98 /* iOS-10-Sampler.app */,
8AB430691D7A6B4A00A3BD98 /* iOS-10-SamplerTests.xctest */,
- 8AB430741D7A6B4A00A3BD98 /* iOS-10-SamplerUITests.xctest */,
8A1CC38C1D7AC22E00562709 /* SamplerStickerPack.appex */,
);
name = Products;
@@ -1222,7 +1212,7 @@
8AB430511D7A6B4900A3BD98 /* Sources */,
8AB430521D7A6B4900A3BD98 /* Frameworks */,
8AB430531D7A6B4900A3BD98 /* Resources */,
- 8A1CC3971D7AC22E00562709 /* Embed App Extensions */,
+ 8A1CC3971D7AC22E00562709 /* Embed Foundation Extensions */,
);
buildRules = (
);
@@ -1252,32 +1242,15 @@
productReference = 8AB430691D7A6B4A00A3BD98 /* iOS-10-SamplerTests.xctest */;
productType = "com.apple.product-type.bundle.unit-test";
};
- 8AB430731D7A6B4A00A3BD98 /* iOS-10-SamplerUITests */ = {
- isa = PBXNativeTarget;
- buildConfigurationList = 8AB430831D7A6B4A00A3BD98 /* Build configuration list for PBXNativeTarget "iOS-10-SamplerUITests" */;
- buildPhases = (
- 8AB430701D7A6B4A00A3BD98 /* Sources */,
- 8AB430711D7A6B4A00A3BD98 /* Frameworks */,
- 8AB430721D7A6B4A00A3BD98 /* Resources */,
- );
- buildRules = (
- );
- dependencies = (
- 8AB430761D7A6B4A00A3BD98 /* PBXTargetDependency */,
- );
- name = "iOS-10-SamplerUITests";
- productName = "iOS-10-SamplerUITests";
- productReference = 8AB430741D7A6B4A00A3BD98 /* iOS-10-SamplerUITests.xctest */;
- productType = "com.apple.product-type.bundle.ui-testing";
- };
/* End PBXNativeTarget section */
/* Begin PBXProject section */
8AB4304D1D7A6B4900A3BD98 /* Project object */ = {
isa = PBXProject;
attributes = {
+ BuildIndependentTargetsInParallel = YES;
LastSwiftUpdateCheck = 0800;
- LastUpgradeCheck = 0810;
+ LastUpgradeCheck = 1530;
ORGANIZATIONNAME = "Shuichi Tsutsumi";
TargetAttributes = {
8A1CC38B1D7AC22E00562709 = {
@@ -1288,25 +1261,21 @@
8AB430541D7A6B4900A3BD98 = {
CreatedOnToolsVersion = 8.0;
DevelopmentTeam = 9Z86A4AWDE;
+ LastSwiftMigration = 1020;
ProvisioningStyle = Automatic;
};
8AB430681D7A6B4A00A3BD98 = {
CreatedOnToolsVersion = 8.0;
DevelopmentTeam = 9Z86A4AWDE;
- ProvisioningStyle = Automatic;
- TestTargetID = 8AB430541D7A6B4900A3BD98;
- };
- 8AB430731D7A6B4A00A3BD98 = {
- CreatedOnToolsVersion = 8.0;
- DevelopmentTeam = 9Z86A4AWDE;
+ LastSwiftMigration = 1020;
ProvisioningStyle = Automatic;
TestTargetID = 8AB430541D7A6B4900A3BD98;
};
};
};
buildConfigurationList = 8AB430501D7A6B4900A3BD98 /* Build configuration list for PBXProject "iOS-10-Sampler" */;
- compatibilityVersion = "Xcode 3.2";
- developmentRegion = English;
+ compatibilityVersion = "Xcode 12.0";
+ developmentRegion = en;
hasScannedForEncodings = 0;
knownRegions = (
en,
@@ -1319,7 +1288,6 @@
targets = (
8AB430541D7A6B4900A3BD98 /* iOS-10-Sampler */,
8AB430681D7A6B4A00A3BD98 /* iOS-10-SamplerTests */,
- 8AB430731D7A6B4A00A3BD98 /* iOS-10-SamplerUITests */,
8A1CC38B1D7AC22E00562709 /* SamplerStickerPack */,
);
};
@@ -1571,13 +1539,6 @@
);
runOnlyForDeploymentPostprocessing = 0;
};
- 8AB430721D7A6B4A00A3BD98 /* Resources */ = {
- isa = PBXResourcesBuildPhase;
- buildActionMask = 2147483647;
- files = (
- );
- runOnlyForDeploymentPostprocessing = 0;
- };
/* End PBXResourcesBuildPhase section */
/* Begin PBXSourcesBuildPhase section */
@@ -1586,6 +1547,7 @@
buildActionMask = 2147483647;
files = (
8A825D521D7AA4E6008D74EA /* AttributedSpeechViewController.swift in Sources */,
+ 8A662A7F20CC94CE00EF23A2 /* SlimMPSCNNConvolution.m in Sources */,
8AB430871D7A6DA700A3BD98 /* RootViewCell.swift in Sources */,
8A5AE0BC1D7D58AB0095209E /* LivePhotoCaptureDelegate.swift in Sources */,
8A081D131D87394200D7DDF4 /* UIViewController+alert.swift in Sources */,
@@ -1606,7 +1568,6 @@
8A1CC3891D7ABCE400562709 /* MNISTDeepCNN.swift in Sources */,
8C3A5F501D7EF7BE00B7A480 /* Message+CoreDataClass.swift in Sources */,
8A3F09AC1D83F7F700D28DF2 /* FontsViewController.swift in Sources */,
- 8A1CC3811D7AB10D00562709 /* SlimMPSCNN.swift in Sources */,
8C67369F1D7EFE7000BAAE02 /* iOS10Sampler.xcdatamodeld in Sources */,
8A6CE2EC1D854B9E00DC79B4 /* LivePhotoCaptureSessionManager.swift in Sources */,
8A081D071D8714C200D7DDF4 /* LooperViewController.swift in Sources */,
@@ -1620,6 +1581,7 @@
8A081D1A1D87932100D7DDF4 /* PreviewInteractionPopViewController.swift in Sources */,
8AB430891D7A6DF300A3BD98 /* SampleDataSource.swift in Sources */,
8A1CC3991D7AC43C00562709 /* StickerPackViewController.swift in Sources */,
+ 8A662A8220CC955700EF23A2 /* SlimMPSCNNFullyConnected.m in Sources */,
8A49EF461D7BF4D8003EE90B /* UserNotificationViewController.swift in Sources */,
8AA0743F1D86614400FEAC28 /* PropertyAnimatorEffectViewController.swift in Sources */,
8C3A5F521D7EF7BE00B7A480 /* PersistentContainerViewController.swift in Sources */,
@@ -1636,14 +1598,6 @@
);
runOnlyForDeploymentPostprocessing = 0;
};
- 8AB430701D7A6B4A00A3BD98 /* Sources */ = {
- isa = PBXSourcesBuildPhase;
- buildActionMask = 2147483647;
- files = (
- 8AB430791D7A6B4A00A3BD98 /* iOS_10_SamplerUITests.swift in Sources */,
- );
- runOnlyForDeploymentPostprocessing = 0;
- };
/* End PBXSourcesBuildPhase section */
/* Begin PBXTargetDependency section */
@@ -1657,11 +1611,6 @@
target = 8AB430541D7A6B4900A3BD98 /* iOS-10-Sampler */;
targetProxy = 8AB4306A1D7A6B4A00A3BD98 /* PBXContainerItemProxy */;
};
- 8AB430761D7A6B4A00A3BD98 /* PBXTargetDependency */ = {
- isa = PBXTargetDependency;
- target = 8AB430541D7A6B4900A3BD98 /* iOS-10-Sampler */;
- targetProxy = 8AB430751D7A6B4A00A3BD98 /* PBXContainerItemProxy */;
- };
/* End PBXTargetDependency section */
/* Begin PBXVariantGroup section */
@@ -1712,20 +1661,30 @@
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
+ CLANG_ANALYZER_LOCALIZABILITY_NONLOCALIZED = YES;
CLANG_ANALYZER_NONNULL = YES;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
+ CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
+ CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
+ CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
+ CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
+ CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
+ CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
+ CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
+ CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
+ CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_SUSPICIOUS_MOVES = YES;
CLANG_WARN_UNREACHABLE_CODE = YES;
@@ -1735,6 +1694,7 @@
DEBUG_INFORMATION_FORMAT = dwarf;
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_TESTABILITY = YES;
+ ENABLE_USER_SCRIPT_SANDBOXING = YES;
GCC_C_LANGUAGE_STANDARD = gnu99;
GCC_DYNAMIC_NO_PIC = NO;
GCC_NO_COMMON_BLOCKS = YES;
@@ -1749,7 +1709,7 @@
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
- IPHONEOS_DEPLOYMENT_TARGET = 10.0;
+ IPHONEOS_DEPLOYMENT_TARGET = 12.0;
MTL_ENABLE_DEBUG_INFO = YES;
ONLY_ACTIVE_ARCH = YES;
SDKROOT = iphoneos;
@@ -1762,20 +1722,30 @@
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
+ CLANG_ANALYZER_LOCALIZABILITY_NONLOCALIZED = YES;
CLANG_ANALYZER_NONNULL = YES;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
+ CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
+ CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
+ CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
+ CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
+ CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
+ CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
+ CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
+ CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
+ CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_SUSPICIOUS_MOVES = YES;
CLANG_WARN_UNREACHABLE_CODE = YES;
@@ -1785,6 +1755,7 @@
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
ENABLE_NS_ASSERTIONS = NO;
ENABLE_STRICT_OBJC_MSGSEND = YES;
+ ENABLE_USER_SCRIPT_SANDBOXING = YES;
GCC_C_LANGUAGE_STANDARD = gnu99;
GCC_NO_COMMON_BLOCKS = YES;
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
@@ -1793,10 +1764,11 @@
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
- IPHONEOS_DEPLOYMENT_TARGET = 10.0;
+ IPHONEOS_DEPLOYMENT_TARGET = 12.0;
MTL_ENABLE_DEBUG_INFO = NO;
SDKROOT = iphoneos;
- SWIFT_OPTIMIZATION_LEVEL = "-Owholemodule";
+ SWIFT_COMPILATION_MODE = wholemodule;
+ SWIFT_OPTIMIZATION_LEVEL = "-O";
VALIDATE_PRODUCT = YES;
};
name = Release;
@@ -1807,11 +1779,14 @@
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
DEVELOPMENT_TEAM = 9Z86A4AWDE;
INFOPLIST_FILE = "iOS-10-Sampler/Info.plist";
- LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
+ LD_RUNPATH_SEARCH_PATHS = (
+ "$(inherited)",
+ "@executable_path/Frameworks",
+ );
PRODUCT_BUNDLE_IDENTIFIER = "com.shu223.iOS-10-Sampler";
PRODUCT_NAME = "$(TARGET_NAME)";
SWIFT_OBJC_BRIDGING_HEADER = "iOS-10-Sampler/iOS10Sampler-Bridging-Header.h";
- SWIFT_VERSION = 3.0;
+ SWIFT_VERSION = 5.0;
};
name = Debug;
};
@@ -1821,11 +1796,14 @@
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
DEVELOPMENT_TEAM = 9Z86A4AWDE;
INFOPLIST_FILE = "iOS-10-Sampler/Info.plist";
- LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
+ LD_RUNPATH_SEARCH_PATHS = (
+ "$(inherited)",
+ "@executable_path/Frameworks",
+ );
PRODUCT_BUNDLE_IDENTIFIER = "com.shu223.iOS-10-Sampler";
PRODUCT_NAME = "$(TARGET_NAME)";
SWIFT_OBJC_BRIDGING_HEADER = "iOS-10-Sampler/iOS10Sampler-Bridging-Header.h";
- SWIFT_VERSION = 3.0;
+ SWIFT_VERSION = 5.0;
};
name = Release;
};
@@ -1836,10 +1814,14 @@
BUNDLE_LOADER = "$(TEST_HOST)";
DEVELOPMENT_TEAM = 9Z86A4AWDE;
INFOPLIST_FILE = "iOS-10-SamplerTests/Info.plist";
- LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks";
+ LD_RUNPATH_SEARCH_PATHS = (
+ "$(inherited)",
+ "@executable_path/Frameworks",
+ "@loader_path/Frameworks",
+ );
PRODUCT_BUNDLE_IDENTIFIER = "com.shu223.iOS-10-SamplerTests";
PRODUCT_NAME = "$(TARGET_NAME)";
- SWIFT_VERSION = 3.0;
+ SWIFT_VERSION = 5.0;
TEST_HOST = "$(BUILT_PRODUCTS_DIR)/iOS-10-Sampler.app/iOS-10-Sampler";
};
name = Debug;
@@ -1851,42 +1833,18 @@
BUNDLE_LOADER = "$(TEST_HOST)";
DEVELOPMENT_TEAM = 9Z86A4AWDE;
INFOPLIST_FILE = "iOS-10-SamplerTests/Info.plist";
- LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks";
+ LD_RUNPATH_SEARCH_PATHS = (
+ "$(inherited)",
+ "@executable_path/Frameworks",
+ "@loader_path/Frameworks",
+ );
PRODUCT_BUNDLE_IDENTIFIER = "com.shu223.iOS-10-SamplerTests";
PRODUCT_NAME = "$(TARGET_NAME)";
- SWIFT_VERSION = 3.0;
+ SWIFT_VERSION = 5.0;
TEST_HOST = "$(BUILT_PRODUCTS_DIR)/iOS-10-Sampler.app/iOS-10-Sampler";
};
name = Release;
};
- 8AB430841D7A6B4A00A3BD98 /* Debug */ = {
- isa = XCBuildConfiguration;
- buildSettings = {
- ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES;
- DEVELOPMENT_TEAM = 9Z86A4AWDE;
- INFOPLIST_FILE = "iOS-10-SamplerUITests/Info.plist";
- LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks";
- PRODUCT_BUNDLE_IDENTIFIER = "com.shu223.iOS-10-SamplerUITests";
- PRODUCT_NAME = "$(TARGET_NAME)";
- SWIFT_VERSION = 3.0;
- TEST_TARGET_NAME = "iOS-10-Sampler";
- };
- name = Debug;
- };
- 8AB430851D7A6B4A00A3BD98 /* Release */ = {
- isa = XCBuildConfiguration;
- buildSettings = {
- ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES;
- DEVELOPMENT_TEAM = 9Z86A4AWDE;
- INFOPLIST_FILE = "iOS-10-SamplerUITests/Info.plist";
- LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks";
- PRODUCT_BUNDLE_IDENTIFIER = "com.shu223.iOS-10-SamplerUITests";
- PRODUCT_NAME = "$(TARGET_NAME)";
- SWIFT_VERSION = 3.0;
- TEST_TARGET_NAME = "iOS-10-Sampler";
- };
- name = Release;
- };
/* End XCBuildConfiguration section */
/* Begin XCConfigurationList section */
@@ -1926,15 +1884,6 @@
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
- 8AB430831D7A6B4A00A3BD98 /* Build configuration list for PBXNativeTarget "iOS-10-SamplerUITests" */ = {
- isa = XCConfigurationList;
- buildConfigurations = (
- 8AB430841D7A6B4A00A3BD98 /* Debug */,
- 8AB430851D7A6B4A00A3BD98 /* Release */,
- );
- defaultConfigurationIsVisible = 0;
- defaultConfigurationName = Release;
- };
/* End XCConfigurationList section */
/* Begin XCVersionGroup section */
diff --git a/iOS-10-Sampler.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist b/iOS-10-Sampler.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist
new file mode 100644
index 0000000..18d9810
--- /dev/null
+++ b/iOS-10-Sampler.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist
@@ -0,0 +1,8 @@
+
+
+
+
+ IDEDidComputeMac32BitWarning
+
+
+
diff --git a/iOS-10-Sampler.xcodeproj/xcuserdata/shuichi.xcuserdatad/xcschemes/iOS-10-Sampler.xcscheme b/iOS-10-Sampler.xcodeproj/xcuserdata/shuichi.xcuserdatad/xcschemes/iOS-10-Sampler.xcscheme
index d857d72..ead9f9a 100644
--- a/iOS-10-Sampler.xcodeproj/xcuserdata/shuichi.xcuserdatad/xcschemes/iOS-10-Sampler.xcscheme
+++ b/iOS-10-Sampler.xcodeproj/xcuserdata/shuichi.xcuserdatad/xcschemes/iOS-10-Sampler.xcscheme
@@ -1,6 +1,6 @@
+
+
+
+
@@ -49,17 +58,6 @@
-
-
-
-
-
-
Bool {
+ func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool {
// Override point for customization after application launch.
return true
}
diff --git a/iOS-10-Sampler/Assets.xcassets/AppIcon.appiconset/10-iTunesArtwork.png b/iOS-10-Sampler/Assets.xcassets/AppIcon.appiconset/10-iTunesArtwork.png
new file mode 100644
index 0000000..8c03172
Binary files /dev/null and b/iOS-10-Sampler/Assets.xcassets/AppIcon.appiconset/10-iTunesArtwork.png differ
diff --git a/iOS-10-Sampler/Assets.xcassets/AppIcon.appiconset/Contents.json b/iOS-10-Sampler/Assets.xcassets/AppIcon.appiconset/Contents.json
index 4482606..36b398a 100644
--- a/iOS-10-Sampler/Assets.xcassets/AppIcon.appiconset/Contents.json
+++ b/iOS-10-Sampler/Assets.xcassets/AppIcon.appiconset/Contents.json
@@ -45,6 +45,12 @@
"idiom" : "iphone",
"filename" : "Icon-App-60x60@3x.png",
"scale" : "3x"
+ },
+ {
+ "size" : "1024x1024",
+ "idiom" : "ios-marketing",
+ "filename" : "10-iTunesArtwork.png",
+ "scale" : "1x"
}
],
"info" : {
diff --git a/iOS-10-Sampler/Base.lproj/Main.storyboard b/iOS-10-Sampler/Base.lproj/Main.storyboard
index 7c49b02..a0a4a09 100644
--- a/iOS-10-Sampler/Base.lproj/Main.storyboard
+++ b/iOS-10-Sampler/Base.lproj/Main.storyboard
@@ -1,9 +1,9 @@
-
-
+
+
+
-
-
+
@@ -17,18 +17,23 @@
-
+
-
+
@@ -69,7 +75,6 @@
-
diff --git a/iOS-10-Sampler/RootViewController.swift b/iOS-10-Sampler/RootViewController.swift
index d08a65c..ea9af77 100644
--- a/iOS-10-Sampler/RootViewController.swift
+++ b/iOS-10-Sampler/RootViewController.swift
@@ -39,11 +39,11 @@ class RootViewController: UITableViewController {
// MARK: UITableViewDelegate
override func tableView(_ tableView: UITableView, heightForRowAt indexPath: IndexPath) -> CGFloat {
- return UITableViewAutomaticDimension
+ return UITableView.automaticDimension
}
override func tableView(_ tableView: UITableView, estimatedHeightForRowAt indexPath: IndexPath) -> CGFloat {
- return UITableViewAutomaticDimension
+ return UITableView.automaticDimension
}
override func tableView(_ tableView: UITableView, didSelectRowAt indexPath: IndexPath) {
diff --git a/iOS-10-Sampler/Samples/AttributedSpeech.storyboard b/iOS-10-Sampler/Samples/AttributedSpeech.storyboard
index 5c5964c..43a7b6d 100644
--- a/iOS-10-Sampler/Samples/AttributedSpeech.storyboard
+++ b/iOS-10-Sampler/Samples/AttributedSpeech.storyboard
@@ -1,9 +1,9 @@
-
-
+
+
+
-
-
+
@@ -16,10 +16,11 @@
-
+
+
diff --git a/iOS-10-Sampler/Samples/AttributedSpeechViewController.swift b/iOS-10-Sampler/Samples/AttributedSpeechViewController.swift
index cda7caf..411cc9f 100644
--- a/iOS-10-Sampler/Samples/AttributedSpeechViewController.swift
+++ b/iOS-10-Sampler/Samples/AttributedSpeechViewController.swift
@@ -14,7 +14,7 @@ class AttributedSpeechViewController: UIViewController {
@IBOutlet private weak var label: UILabel!
private let speech = AVSpeechSynthesizer()
- private let baseStr = "iOS 10 Sampler is a collection of code examples for new APIs of iOS 10."
+ private let baseStr = "Tsutsumi"
private var attributedStr: NSMutableAttributedString!
private var utterance: AVSpeechUtterance!
@@ -22,12 +22,8 @@ class AttributedSpeechViewController: UIViewController {
super.viewDidLoad()
attributedStr = NSMutableAttributedString(string: baseStr)
- let rangeAll = NSMakeRange(0, baseStr.characters.count)
- let rangeBold = NSString(string: baseStr).range(of: "iOS")
- attributedStr.addAttributes([NSFontAttributeName: UIFont.systemFont(ofSize: 14)], range: rangeAll)
- attributedStr.addAttributes([NSForegroundColorAttributeName: UIColor.black], range: rangeAll)
- attributedStr.addAttributes([NSFontAttributeName: UIFont.boldSystemFont(ofSize: 20)], range: rangeBold)
-
+ let rangeAll = NSMakeRange(0, baseStr.count)
+ attributedStr.addAttribute(NSAttributedString.Key(rawValue: AVSpeechSynthesisIPANotationAttribute), value: "tən.tən.mi", range: rangeAll)
updateUtterance(attributed: false)
}
@@ -38,7 +34,7 @@ class AttributedSpeechViewController: UIViewController {
private func updateUtterance(attributed: Bool) {
if attributed {
utterance = AVSpeechUtterance(attributedString: attributedStr)
- label.attributedText = attributedStr
+ label.text = baseStr + " (attributed)"
} else {
utterance = AVSpeechUtterance(string: baseStr)
label.text = baseStr
diff --git a/iOS-10-Sampler/Samples/HapticViewController.swift b/iOS-10-Sampler/Samples/HapticViewController.swift
index dd45935..7ed7a70 100644
--- a/iOS-10-Sampler/Samples/HapticViewController.swift
+++ b/iOS-10-Sampler/Samples/HapticViewController.swift
@@ -25,14 +25,14 @@ class HapticViewController: UIViewController {
}
@IBAction func impactBtnTapped(_ sender: UIButton) {
- guard let style = UIImpactFeedbackStyle(rawValue: sender.tag) else {fatalError()}
+ guard let style = UIImpactFeedbackGenerator.FeedbackStyle(rawValue: sender.tag) else {fatalError()}
impactFeedbacker = UIImpactFeedbackGenerator(style: style)
impactFeedbacker.prepare()
impactFeedbacker.impactOccurred()
}
@IBAction func notificationBtnTapped(_ sender: UIButton) {
- guard let type = UINotificationFeedbackType(rawValue: sender.tag) else {fatalError()}
+ guard let type = UINotificationFeedbackGenerator.FeedbackType(rawValue: sender.tag) else {fatalError()}
notificationFeedbacker.notificationOccurred(type)
}
diff --git a/iOS-10-Sampler/Samples/ImageFiltersViewController.swift b/iOS-10-Sampler/Samples/ImageFiltersViewController.swift
index d2171b7..01e661b 100644
--- a/iOS-10-Sampler/Samples/ImageFiltersViewController.swift
+++ b/iOS-10-Sampler/Samples/ImageFiltersViewController.swift
@@ -26,7 +26,7 @@ class ImageFiltersViewController: UIViewController, UIPickerViewDataSource, UIPi
available_iOS: 10,
category: kCICategoryBuiltIn,
exceptCategories: [kCICategoryGradient])
- print("filters:\(filters)\n")
+ print("filters:\(String(describing: filters))")
filters.insert("Original", at: 0)
}
@@ -34,7 +34,7 @@ class ImageFiltersViewController: UIViewController, UIPickerViewDataSource, UIPi
super.didReceiveMemoryWarning()
}
- private func applyFilter(name: String, handler: ((UIImage?) -> Void)) {
+ private func applyFilter(name: String, size: CGSize, scale: CGFloat, handler: ((UIImage?) -> Void)) {
let inputImage = CIImage(image: self.orgImage)!
guard let filter = CIFilter(name: name) else {fatalError()}
let attributes = filter.attributes
@@ -76,21 +76,19 @@ class ImageFiltersViewController: UIViewController, UIPickerViewDataSource, UIPi
return
}
- let size = self.imageView.frame.size
var extent = outputImage.extent
- let scale: CGFloat!
+ var imageScale = scale
// some outputImage have infinite extents. e.g. CIDroste
if extent.isInfinite {
- scale = UIScreen.main.scale
extent = CGRect(x: 0, y: 0, width: size.width, height: size.height)
} else {
- scale = extent.size.width / self.orgImage.size.width
+ imageScale = extent.size.width / self.orgImage.size.width
}
guard let cgImage = context.createCGImage(outputImage, from: extent) else {fatalError()}
- let image = UIImage(cgImage: cgImage, scale: scale, orientation: .up)
- print("extent:\(extent), image:\(image), org:\(self.orgImage), scale:\(scale)\n")
+ let image = UIImage(cgImage: cgImage, scale: imageScale, orientation: .up)
+ print("extent:\(extent), image:\(image), org:\(String(describing: self.orgImage)), scale:\(String(describing: scale))\n")
handler(image)
}
@@ -120,9 +118,12 @@ class ImageFiltersViewController: UIViewController, UIPickerViewDataSource, UIPi
}
indicator.startAnimating()
-
+
+ let size = self.imageView.frame.size
+ let scale = UIScreen.main.scale
+
DispatchQueue.global(qos: .default).async {
- self.applyFilter(name: self.filters[row], handler: { (image) in
+ self.applyFilter(name: self.filters[row], size: size, scale: scale, handler: { (image) in
DispatchQueue.main.async(execute: {
self.imageView.image = image
self.indicator.stopAnimating()
diff --git a/iOS-10-Sampler/Samples/Inception3Net.swift b/iOS-10-Sampler/Samples/Inception3Net.swift
index f99c437..f6fed23 100644
--- a/iOS-10-Sampler/Samples/Inception3Net.swift
+++ b/iOS-10-Sampler/Samples/Inception3Net.swift
@@ -127,8 +127,8 @@ class Inception3Net{
neuronFilter: relu,
device: device,
kernelParamsBinaryName: "conv" ,
- padding: false,
- strideXY: (2, 2))
+ padding: true,
+ strideX: 2, strideY: 2)
conv1 = SlimMPSCNNConvolution(kernelWidth: 3,
kernelHeight: 3,
@@ -195,6 +195,7 @@ class Inception3Net{
neuronFilter: relu,
device: device,
kernelParamsBinaryName: "mixed_tower_conv_1",
+ padding: true,
destinationFeatureChannelOffset: 64)
// branch3x3dbl
@@ -221,6 +222,7 @@ class Inception3Net{
neuronFilter: relu,
device: device,
kernelParamsBinaryName: "mixed_tower_1_conv_2",
+ padding: true,
destinationFeatureChannelOffset: 128)
@@ -232,6 +234,7 @@ class Inception3Net{
neuronFilter: relu,
device: device,
kernelParamsBinaryName: "mixed_tower_2_conv",
+ padding: true,
destinationFeatureChannelOffset: 224)
aPool = MPSCNNPoolingAverage(device: device!, kernelWidth: 3, kernelHeight: 3, strideInPixelsX: 1, strideInPixelsY: 1)
@@ -381,7 +384,8 @@ class Inception3Net{
device: device,
kernelParamsBinaryName: "mixed_3_conv",
padding: false,
- strideXY: (2,2))
+ strideX: 2,
+ strideY: 2)
// branch3x3dbl
m3t1conv0 = SlimMPSCNNConvolution(kernelWidth: 1,
@@ -408,7 +412,8 @@ class Inception3Net{
device: device,
kernelParamsBinaryName: "mixed_3_tower_conv_2",
padding: false,
- strideXY: (2,2),
+ strideX: 2,
+ strideY: 2,
destinationFeatureChannelOffset: 384)
// branch_pool
@@ -801,7 +806,8 @@ class Inception3Net{
device: device,
kernelParamsBinaryName: "mixed_8_tower_conv_1",
padding: false,
- strideXY: (2,2))
+ strideX: 2,
+ strideY: 2)
// branch7x7x3dbl
m8t1conv0 = SlimMPSCNNConvolution(kernelWidth: 1,
@@ -837,7 +843,8 @@ class Inception3Net{
device: device,
kernelParamsBinaryName: "mixed_8_tower_1_conv_3",
padding: false,
- strideXY: (2,2),
+ strideX: 2,
+ strideY: 2,
destinationFeatureChannelOffset: 320)
// branch_pool
@@ -1063,7 +1070,11 @@ class Inception3Net{
// In this sample code, the aggregate benefit of the use of MPSTemporaryImages
// is to reduce the area of memory allocated to 1/4 and save about 3 ms of CPU
// time.
- MPSTemporaryImage.prefetchStorage(with: commandBuffer, imageDescriptorList: [sid, inid, m0id, m1id, m2id, m3id, m4id, m5id, m6id, m7id, m8id, m9id, m10id])
+ let descriptors = [sid, inid, m0id, m1id, m2id, m3id, m4id, m5id, m6id, m7id, m8id, m9id, m10id]
+ for descriptor in descriptors {
+ descriptor.storageMode = .private
+ }
+ MPSTemporaryImage.prefetchStorage(with: commandBuffer, imageDescriptorList: descriptors)
// we use preImage to hold preprocesing intermediate results
preImage = MPSTemporaryImage(commandBuffer: commandBuffer, imageDescriptor: sid)
diff --git a/iOS-10-Sampler/Samples/LivePhotoCaptureDelegate.swift b/iOS-10-Sampler/Samples/LivePhotoCaptureDelegate.swift
index 3e1ead5..b5c05d1 100644
--- a/iOS-10-Sampler/Samples/LivePhotoCaptureDelegate.swift
+++ b/iOS-10-Sampler/Samples/LivePhotoCaptureDelegate.swift
@@ -63,28 +63,27 @@ class LivePhotoCaptureDelegate: NSObject, AVCapturePhotoCaptureDelegate {
// =========================================================================
// MARK: - AVCapturePhotoCaptureDelegate
- func capture(_ captureOutput: AVCapturePhotoOutput, willBeginCaptureForResolvedSettings resolvedSettings: AVCaptureResolvedPhotoSettings) {
+ func photoOutput(_ captureOutput: AVCapturePhotoOutput, willBeginCaptureFor resolvedSettings: AVCaptureResolvedPhotoSettings) {
if resolvedSettings.livePhotoMovieDimensions.width > 0 && resolvedSettings.livePhotoMovieDimensions.height > 0 {
capturingLivePhoto(true)
}
}
- func capture(_ captureOutput: AVCapturePhotoOutput, didFinishProcessingPhotoSampleBuffer photoSampleBuffer: CMSampleBuffer?, previewPhotoSampleBuffer: CMSampleBuffer?, resolvedSettings: AVCaptureResolvedPhotoSettings, bracketSettings: AVCaptureBracketedStillImageSettings?, error: Error?) {
+ func photoOutput(_ captureOutput: AVCapturePhotoOutput, didFinishProcessingPhoto photoSampleBuffer: CMSampleBuffer?, previewPhoto previewPhotoSampleBuffer: CMSampleBuffer?, resolvedSettings: AVCaptureResolvedPhotoSettings, bracketSettings: AVCaptureBracketedStillImageSettings?, error: Error?) {
if let photoSampleBuffer = photoSampleBuffer {
photoData = AVCapturePhotoOutput.jpegPhotoDataRepresentation(forJPEGSampleBuffer: photoSampleBuffer, previewPhotoSampleBuffer: previewPhotoSampleBuffer)
}
- else {
+ else if let error = error {
print("Error capturing photo: \(error)")
- return
}
}
- func capture(_ captureOutput: AVCapturePhotoOutput, didFinishRecordingLivePhotoMovieForEventualFileAt outputFileURL: URL, resolvedSettings: AVCaptureResolvedPhotoSettings) {
+ func photoOutput(_ captureOutput: AVCapturePhotoOutput, didFinishRecordingLivePhotoMovieForEventualFileAt outputFileURL: URL, resolvedSettings: AVCaptureResolvedPhotoSettings) {
capturingLivePhoto(false)
}
- func capture(_ captureOutput: AVCapturePhotoOutput, didFinishProcessingLivePhotoToMovieFileAt outputFileURL: URL, duration: CMTime, photoDisplay photoDisplayTime: CMTime, resolvedSettings: AVCaptureResolvedPhotoSettings, error: Error?) {
- if let _ = error {
+ func photoOutput(_ captureOutput: AVCapturePhotoOutput, didFinishProcessingLivePhotoToMovieFileAt outputFileURL: URL, duration: CMTime, photoDisplayTime: CMTime, resolvedSettings: AVCaptureResolvedPhotoSettings, error: Error?) {
+ if let error = error {
print("Error processing live photo companion movie: \(error)")
return
}
@@ -92,7 +91,7 @@ class LivePhotoCaptureDelegate: NSObject, AVCapturePhotoCaptureDelegate {
livePhotoCompanionMovieURL = outputFileURL
}
- func capture(_ captureOutput: AVCapturePhotoOutput, didFinishCaptureForResolvedSettings resolvedSettings: AVCaptureResolvedPhotoSettings, error: Error?) {
+ func photoOutput(_ captureOutput: AVCapturePhotoOutput, didFinishCaptureFor resolvedSettings: AVCaptureResolvedPhotoSettings, error: Error?) {
if let error = error {
print("Error capturing photo: \(error)")
didFinish()
diff --git a/iOS-10-Sampler/Samples/LivePhotoCaptureSessionManager.swift b/iOS-10-Sampler/Samples/LivePhotoCaptureSessionManager.swift
index 48f4690..1c0638d 100644
--- a/iOS-10-Sampler/Samples/LivePhotoCaptureSessionManager.swift
+++ b/iOS-10-Sampler/Samples/LivePhotoCaptureSessionManager.swift
@@ -41,14 +41,14 @@ class LivePhotoCaptureSessionManager: NSObject {
session.beginConfiguration()
- session.sessionPreset = AVCaptureSessionPresetPhoto
+ session.sessionPreset = .photo
// Add video input.
do {
- let videoDevice = AVCaptureDevice.defaultDevice(
- withDeviceType: AVCaptureDeviceType.builtInWideAngleCamera,
- mediaType: AVMediaTypeVideo,
- position: .back)
+ guard let videoDevice = AVCaptureDevice.default(
+ .builtInWideAngleCamera,
+ for: .video,
+ position: .back) else {fatalError()}
let videoDeviceInput = try AVCaptureDeviceInput(device: videoDevice)
if session.canAddInput(videoDeviceInput) {
@@ -71,7 +71,7 @@ class LivePhotoCaptureSessionManager: NSObject {
// Add audio input.
do {
- let audioDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio)
+ guard let audioDevice = AVCaptureDevice.default(for: .audio) else {fatalError()}
let audioDeviceInput = try AVCaptureDeviceInput(device: audioDevice)
if session.canAddInput(audioDeviceInput) {
@@ -112,7 +112,7 @@ class LivePhotoCaptureSessionManager: NSObject {
// MARK: - Public
func authorize() {
- switch AVCaptureDevice.authorizationStatus(forMediaType: AVMediaTypeVideo) {
+ switch AVCaptureDevice.authorizationStatus(for: .video) {
case .authorized:
break
@@ -126,7 +126,7 @@ class LivePhotoCaptureSessionManager: NSObject {
create an AVCaptureDeviceInput for audio during session setup.
*/
sessionQueue.suspend()
- AVCaptureDevice.requestAccess(forMediaType: AVMediaTypeVideo, completionHandler: { [unowned self] granted in
+ AVCaptureDevice.requestAccess(for: .video, completionHandler: { [unowned self] granted in
if !granted {
self.setupResult = .notAuthorized
}
@@ -182,7 +182,7 @@ class LivePhotoCaptureSessionManager: NSObject {
sessionQueue.async {
// Update the photo output's connection to match the video orientation of the video preview layer.
- if let photoOutputConnection = self.photoOutput.connection(withMediaType: AVMediaTypeVideo) {
+ if let photoOutputConnection = self.photoOutput.connection(with: .video) {
photoOutputConnection.videoOrientation = videoOrientation
}
@@ -190,9 +190,9 @@ class LivePhotoCaptureSessionManager: NSObject {
let photoSettings = AVCapturePhotoSettings()
photoSettings.flashMode = .auto
photoSettings.isHighResolutionPhotoEnabled = true
- if photoSettings.availablePreviewPhotoPixelFormatTypes.count > 0 {
- photoSettings.previewPhotoFormat = [kCVPixelBufferPixelFormatTypeKey as String : photoSettings.availablePreviewPhotoPixelFormatTypes.first!]
- }
+// if photoSettings.availablePreviewPhotoPixelFormatTypes.count > 0 {
+// photoSettings.previewPhotoFormat = [kCVPixelBufferPixelFormatTypeKey as String : photoSettings.availablePreviewPhotoPixelFormatTypes.first!]
+// }
if self.photoOutput.isLivePhotoCaptureSupported { // Live Photo capture is not supported in movie mode.
let livePhotoMovieFileName = NSUUID().uuidString
let livePhotoMovieFilePath = (NSTemporaryDirectory() as NSString).appendingPathComponent((livePhotoMovieFileName as NSString).appendingPathExtension("mov")!)
diff --git a/iOS-10-Sampler/Samples/LivePhotoCaptureViewController.swift b/iOS-10-Sampler/Samples/LivePhotoCaptureViewController.swift
index 93d8068..21bfaac 100644
--- a/iOS-10-Sampler/Samples/LivePhotoCaptureViewController.swift
+++ b/iOS-10-Sampler/Samples/LivePhotoCaptureViewController.swift
@@ -96,8 +96,8 @@ class LivePhotoCaptureViewController: UIViewController {
// MARK: - Actions
@IBAction private func capturePhoto(_ photoButton: UIButton) {
- let videoPreviewLayerOrientation = previewView.videoPreviewLayer.connection.videoOrientation
- LivePhotoCaptureSessionManager.sharedManager.capture(videoOrientation: videoPreviewLayerOrientation) { (inProgressLivePhotoCapturesCount) in
+ let videoPreviewLayerOrientation = previewView.videoPreviewLayer.connection?.videoOrientation
+ LivePhotoCaptureSessionManager.sharedManager.capture(videoOrientation: videoPreviewLayerOrientation!) { (inProgressLivePhotoCapturesCount) in
DispatchQueue.main.async { [unowned self] in
if inProgressLivePhotoCapturesCount > 0 {
self.capturingLivePhotoLabel.isHidden = false
diff --git a/iOS-10-Sampler/Samples/Looper.swift b/iOS-10-Sampler/Samples/Looper.swift
index a084c93..2010760 100644
--- a/iOS-10-Sampler/Samples/Looper.swift
+++ b/iOS-10-Sampler/Samples/Looper.swift
@@ -40,7 +40,7 @@ class Looper: NSObject {
// Getting the natural size of the video
// http://stackoverflow.com/questions/14466842/ios-6-avplayeritem-presentationsize-returning-zero-naturalsize-method-deprec
- let videoTracks = playerItem.asset.tracks(withMediaType: AVMediaTypeVideo)
+ let videoTracks = playerItem.asset.tracks(withMediaType: .video)
guard let videoSize = videoTracks.first?.naturalSize else {fatalError()}
parentLayer.addSublayer(playerLayer)
diff --git a/iOS-10-Sampler/Samples/MNISTDeepCNN.swift b/iOS-10-Sampler/Samples/MNISTDeepCNN.swift
index c460198..fd56d91 100644
--- a/iOS-10-Sampler/Samples/MNISTDeepCNN.swift
+++ b/iOS-10-Sampler/Samples/MNISTDeepCNN.swift
@@ -38,7 +38,7 @@ class MNISTDeepCNN {
var commandQueue : MTLCommandQueue
var device : MTLDevice
- init(withCommandQueue commandQueueIn: MTLCommandQueue!) {
+ init(withCommandQueue commandQueueIn: MTLCommandQueue) {
commandQueue = commandQueueIn
device = commandQueueIn.device
@@ -116,7 +116,7 @@ class MNISTDeepCNN {
// so the user can decide the appropriate time to release this
autoreleasepool{
// Get command buffer to use in MetalPerformanceShaders.
- let commandBuffer = commandQueue.makeCommandBuffer()
+ guard let commandBuffer = commandQueue.makeCommandBuffer() else {return}
// output will be stored in this image
let finalLayer = MPSImage(device: commandBuffer.device, imageDescriptor: did)
diff --git a/iOS-10-Sampler/Samples/MetalCNNBasicApple/SlimMPSCNN.swift b/iOS-10-Sampler/Samples/MetalCNNBasicApple/SlimMPSCNN.swift
index 2b7975c..3cfdc8d 100755
--- a/iOS-10-Sampler/Samples/MetalCNNBasicApple/SlimMPSCNN.swift
+++ b/iOS-10-Sampler/Samples/MetalCNNBasicApple/SlimMPSCNN.swift
@@ -21,7 +21,7 @@ class SlimMPSCNNConvolution: MPSCNNConvolution{
/**
A property to keep info from init time whether we will pad input image or not for use during encode call
*/
- private var padding = true
+ internal var isPadding = true
/**
Initializes a fully connected kernel.
@@ -34,7 +34,7 @@ class SlimMPSCNNConvolution: MPSCNNConvolution{
- neuronFilter: A neuronFilter to add at the end as activation, default is nil
- device: The MTLDevice on which this SlimMPSCNNConvolution filter will be used
- kernelParamsBinaryName: name of the layer to fetch kernelParameters by adding a prefix "weights_" or "bias_"
- - padding: Bool value whether to use padding or not
+ - isPadding: Bool value whether to use padding or not
- strideXY: Stride of the filter
- destinationFeatureChannelOffset: FeatureChannel no. in the destination MPSImage to start writing from, helps with concat operations
- groupNum: if grouping is used, default value is 1 meaning no groups
@@ -93,7 +93,7 @@ class SlimMPSCNNConvolution: MPSCNNConvolution{
self.destinationFeatureChannelOffset = Int(destinationFeatureChannelOffset)
// set padding for calculation of offset during encode call
- padding = willPad
+ isPadding = willPad
// unmap files at initialization of MPSCNNConvolution, the weights are copied and packed internally we no longer require these
assert(munmap(hdrW, Int(sizeWeights)) == 0, "munmap failed with errno = \(errno)")
@@ -104,6 +104,10 @@ class SlimMPSCNNConvolution: MPSCNNConvolution{
close(fd_b)
}
+ required init?(coder aDecoder: NSCoder) {
+ fatalError("init(coder:) has not been implemented")
+ }
+
/**
Encode a MPSCNNKernel into a command Buffer. The operation shall proceed out-of-place.
@@ -118,16 +122,23 @@ class SlimMPSCNNConvolution: MPSCNNConvolution{
*/
override func encode(commandBuffer: MTLCommandBuffer, sourceImage: MPSImage, destinationImage: MPSImage) {
// select offset according to padding being used or not
- if padding {
- let pad_along_height = ((destinationImage.height - 1) * strideInPixelsY + kernelHeight - sourceImage.height)
- let pad_along_width = ((destinationImage.width - 1) * strideInPixelsX + kernelWidth - sourceImage.width)
- let pad_top = Int(pad_along_height / 2)
- let pad_left = Int(pad_along_width / 2)
-
- self.offset = MPSOffset(x: ((Int(kernelWidth)/2) - pad_left), y: (Int(kernelHeight/2) - pad_top), z: 0)
+ if isPadding {
+ if #available(iOS 11.0, *) {
+ let pad_along_height = ((destinationImage.height - 1) * strideInPixelsY + kernelHeight - sourceImage.height)
+ let pad_along_width = ((destinationImage.width - 1) * strideInPixelsX + kernelWidth - sourceImage.width)
+ let pad_top = Int(pad_along_height / 2)
+ let pad_left = Int(pad_along_width / 2)
+ self.offset = MPSOffset(x: ((Int(kernelWidth)/2) - pad_left), y: (Int(kernelHeight/2) - pad_top), z: 0)
+ } else {
+ // Fallback on earlier versions
+ }
}
else{
- self.offset = MPSOffset(x: Int(kernelWidth)/2, y: Int(kernelHeight)/2, z: 0)
+ if #available(iOS 11.0, *) {
+ self.offset = MPSOffset(x: Int(kernelWidth)/2, y: Int(kernelHeight)/2, z: 0)
+ } else {
+ // Fallback on earlier versions
+ }
}
super.encode(commandBuffer: commandBuffer, sourceImage: sourceImage, destinationImage: destinationImage)
@@ -212,4 +223,8 @@ class SlimMPSCNNFullyConnected: MPSCNNFullyConnected{
close(fd_w)
close(fd_b)
}
+
+ required init?(coder aDecoder: NSCoder) {
+ fatalError("init(coder:) has not been implemented")
+ }
}
diff --git a/iOS-10-Sampler/Samples/MetalCNNBasicApple/SlimMPSCNNConvolution.h b/iOS-10-Sampler/Samples/MetalCNNBasicApple/SlimMPSCNNConvolution.h
new file mode 100644
index 0000000..6db1909
--- /dev/null
+++ b/iOS-10-Sampler/Samples/MetalCNNBasicApple/SlimMPSCNNConvolution.h
@@ -0,0 +1,86 @@
+//
+// SlimMPSCNNConvolution.h
+// iOS-10-Sampler
+//
+// Created by Shuichi Tsutsumi on 6/9/18.
+// Copyright © 2018 Shuichi Tsutsumi, Inc. All rights reserved.
+//
+
+#import
+@import MetalPerformanceShaders;
+
+@interface SlimMPSCNNConvolution : MPSCNNConvolution
+
+- (instancetype)initWithKernelWidth:(NSUInteger)kernelWidth
+ kernelHeight:(NSUInteger)kernelHeight
+ inputFeatureChannels:(NSUInteger)inputFeatureChannels
+ outputFeatureChannels:(NSUInteger)outputFeatureChannels
+ neuronFilter:(MPSCNNNeuron *)neuronFilter
+ device:(id)device
+ kernelParamsBinaryName:(NSString *)kernelParamsBinaryName;
+
+- (instancetype)initWithKernelWidth:(NSUInteger)kernelWidth
+ kernelHeight:(NSUInteger)kernelHeight
+ inputFeatureChannels:(NSUInteger)inputFeatureChannels
+ outputFeatureChannels:(NSUInteger)outputFeatureChannels
+ neuronFilter:(MPSCNNNeuron *)neuronFilter
+ device:(id)device
+ kernelParamsBinaryName:(NSString *)kernelParamsBinaryName
+destinationFeatureChannelOffset:(NSUInteger)destinationFeatureChannelOffset;
+
+- (instancetype)initWithKernelWidth:(NSUInteger)kernelWidth
+ kernelHeight:(NSUInteger)kernelHeight
+ inputFeatureChannels:(NSUInteger)inputFeatureChannels
+ outputFeatureChannels:(NSUInteger)outputFeatureChannels
+ neuronFilter:(MPSCNNNeuron *)neuronFilter
+ device:(id)device
+ kernelParamsBinaryName:(NSString *)kernelParamsBinaryName
+ padding:(BOOL)willPad;
+
+- (instancetype)initWithKernelWidth:(NSUInteger)kernelWidth
+ kernelHeight:(NSUInteger)kernelHeight
+ inputFeatureChannels:(NSUInteger)inputFeatureChannels
+ outputFeatureChannels:(NSUInteger)outputFeatureChannels
+ neuronFilter:(MPSCNNNeuron *)neuronFilter
+ device:(id)device
+ kernelParamsBinaryName:(NSString *)kernelParamsBinaryName
+ padding:(BOOL)willPad
+ strideX:(NSUInteger)strideX
+ strideY:(NSUInteger)strideY;
+
+- (instancetype)initWithKernelWidth:(NSUInteger)kernelWidth
+ kernelHeight:(NSUInteger)kernelHeight
+ inputFeatureChannels:(NSUInteger)inputFeatureChannels
+ outputFeatureChannels:(NSUInteger)outputFeatureChannels
+ neuronFilter:(MPSCNNNeuron *)neuronFilter
+ device:(id)device
+ kernelParamsBinaryName:(NSString *)kernelParamsBinaryName
+ padding:(BOOL)willPad
+ destinationFeatureChannelOffset:(NSUInteger)destinationFeatureChannelOffset;
+
+- (instancetype)initWithKernelWidth:(NSUInteger)kernelWidth
+ kernelHeight:(NSUInteger)kernelHeight
+ inputFeatureChannels:(NSUInteger)inputFeatureChannels
+ outputFeatureChannels:(NSUInteger)outputFeatureChannels
+ neuronFilter:(MPSCNNNeuron *)neuronFilter
+ device:(id)device
+ kernelParamsBinaryName:(NSString *)kernelParamsBinaryName
+ padding:(BOOL)willPad
+ strideX:(NSUInteger)strideX
+ strideY:(NSUInteger)strideY
+ destinationFeatureChannelOffset:(NSUInteger)destinationFeatureChannelOffset;
+
+- (instancetype)initWithKernelWidth:(NSUInteger)kernelWidth
+ kernelHeight:(NSUInteger)kernelHeight
+ inputFeatureChannels:(NSUInteger)inputFeatureChannels
+ outputFeatureChannels:(NSUInteger)outputFeatureChannels
+ neuronFilter:(MPSCNNNeuron *)neuronFilter
+ device:(id)device
+ kernelParamsBinaryName:(NSString *)kernelParamsBinaryName
+ padding:(BOOL)willPad
+ strideX:(NSUInteger)strideX
+ strideY:(NSUInteger)strideY
+ destinationFeatureChannelOffset:(NSUInteger)destinationFeatureChannelOffset
+ groupNum:(NSUInteger)groupNum;
+
+@end
diff --git a/iOS-10-Sampler/Samples/MetalCNNBasicApple/SlimMPSCNNConvolution.m b/iOS-10-Sampler/Samples/MetalCNNBasicApple/SlimMPSCNNConvolution.m
new file mode 100644
index 0000000..b512665
--- /dev/null
+++ b/iOS-10-Sampler/Samples/MetalCNNBasicApple/SlimMPSCNNConvolution.m
@@ -0,0 +1,255 @@
+//
+// SlimMPSCNNConvolution.m
+// iOS-10-Sampler
+//
+// Created by Shuichi Tsutsumi on 6/9/18.
+// Copyright © 2018 Shuichi Tsutsumi, Inc. All rights reserved.
+//
+
+#import "SlimMPSCNNConvolution.h"
+#include
+
+// https://forums.developer.apple.com/thread/99554
+@interface MPSCNNConvolution (MPSCNNConvolution_iOS10)
+
+-(nonnull instancetype) initWithDevice: (nonnull id ) device
+ convolutionDescriptor: (const MPSCNNConvolutionDescriptor * __nonnull) convolutionDescriptor
+ kernelWeights: (const float * __nonnull) kernelWeights
+ biasTerms: (const float * __nullable) biasTerms
+ flags: (MPSCNNConvolutionFlags) flags;
+@end
+
+@interface SlimMPSCNNConvolution ()
+{
+ BOOL bn;
+}
+@end
+
+
+@implementation SlimMPSCNNConvolution
+
+- (instancetype)initWithKernelWidth:(NSUInteger)kernelWidth
+ kernelHeight:(NSUInteger)kernelHeight
+ inputFeatureChannels:(NSUInteger)inputFeatureChannels
+ outputFeatureChannels:(NSUInteger)outputFeatureChannels
+ neuronFilter:(MPSCNNNeuron *)neuronFilter
+ device:(id)device
+ kernelParamsBinaryName:(NSString *)kernelParamsBinaryName
+{
+ return [self initWithKernelWidth:kernelWidth
+ kernelHeight:kernelHeight
+ inputFeatureChannels:inputFeatureChannels
+ outputFeatureChannels:outputFeatureChannels
+ neuronFilter:neuronFilter
+ device:device
+ kernelParamsBinaryName:kernelParamsBinaryName
+ padding:YES];
+}
+
+- (instancetype)initWithKernelWidth:(NSUInteger)kernelWidth
+ kernelHeight:(NSUInteger)kernelHeight
+ inputFeatureChannels:(NSUInteger)inputFeatureChannels
+ outputFeatureChannels:(NSUInteger)outputFeatureChannels
+ neuronFilter:(MPSCNNNeuron *)neuronFilter
+ device:(id)device
+ kernelParamsBinaryName:(NSString *)kernelParamsBinaryName
+ destinationFeatureChannelOffset:(NSUInteger)destinationFeatureChannelOffset
+{
+ return [self initWithKernelWidth:kernelWidth
+ kernelHeight:kernelHeight
+ inputFeatureChannels:inputFeatureChannels
+ outputFeatureChannels:outputFeatureChannels
+ neuronFilter:neuronFilter
+ device:device
+ kernelParamsBinaryName:kernelParamsBinaryName
+ padding:YES
+ strideX:1
+ strideY:1
+ destinationFeatureChannelOffset:destinationFeatureChannelOffset];
+}
+
+- (instancetype)initWithKernelWidth:(NSUInteger)kernelWidth
+ kernelHeight:(NSUInteger)kernelHeight
+ inputFeatureChannels:(NSUInteger)inputFeatureChannels
+ outputFeatureChannels:(NSUInteger)outputFeatureChannels
+ neuronFilter:(MPSCNNNeuron *)neuronFilter
+ device:(id)device
+ kernelParamsBinaryName:(NSString *)kernelParamsBinaryName
+ padding:(BOOL)willPad
+{
+ return [self initWithKernelWidth:kernelWidth
+ kernelHeight:kernelHeight
+ inputFeatureChannels:inputFeatureChannels
+ outputFeatureChannels:outputFeatureChannels
+ neuronFilter:neuronFilter
+ device:device
+ kernelParamsBinaryName:kernelParamsBinaryName
+ padding:willPad
+ strideX:1
+ strideY:1];
+}
+
+- (instancetype)initWithKernelWidth:(NSUInteger)kernelWidth
+ kernelHeight:(NSUInteger)kernelHeight
+ inputFeatureChannels:(NSUInteger)inputFeatureChannels
+ outputFeatureChannels:(NSUInteger)outputFeatureChannels
+ neuronFilter:(MPSCNNNeuron *)neuronFilter
+ device:(id)device
+ kernelParamsBinaryName:(NSString *)kernelParamsBinaryName
+ padding:(BOOL)willPad
+ strideX:(NSUInteger)strideX
+ strideY:(NSUInteger)strideY
+{
+ return [self initWithKernelWidth:kernelWidth
+ kernelHeight:kernelHeight
+ inputFeatureChannels:inputFeatureChannels
+ outputFeatureChannels:outputFeatureChannels
+ neuronFilter:neuronFilter
+ device:device
+ kernelParamsBinaryName:kernelParamsBinaryName
+ padding:willPad
+ strideX:strideX
+ strideY:strideY
+ destinationFeatureChannelOffset:0];
+}
+
+- (instancetype)initWithKernelWidth:(NSUInteger)kernelWidth
+ kernelHeight:(NSUInteger)kernelHeight
+ inputFeatureChannels:(NSUInteger)inputFeatureChannels
+ outputFeatureChannels:(NSUInteger)outputFeatureChannels
+ neuronFilter:(MPSCNNNeuron *)neuronFilter
+ device:(id)device
+ kernelParamsBinaryName:(NSString *)kernelParamsBinaryName
+ padding:(BOOL)willPad
+ destinationFeatureChannelOffset:(NSUInteger)destinationFeatureChannelOffset
+{
+ return [self initWithKernelWidth:kernelWidth
+ kernelHeight:kernelHeight
+ inputFeatureChannels:inputFeatureChannels
+ outputFeatureChannels:outputFeatureChannels
+ neuronFilter:neuronFilter
+ device:device
+ kernelParamsBinaryName:kernelParamsBinaryName
+ padding:willPad
+ strideX:1
+ strideY:1
+ destinationFeatureChannelOffset:destinationFeatureChannelOffset
+ groupNum:1];
+}
+
+- (instancetype)initWithKernelWidth:(NSUInteger)kernelWidth
+ kernelHeight:(NSUInteger)kernelHeight
+ inputFeatureChannels:(NSUInteger)inputFeatureChannels
+ outputFeatureChannels:(NSUInteger)outputFeatureChannels
+ neuronFilter:(MPSCNNNeuron *)neuronFilter
+ device:(id)device
+ kernelParamsBinaryName:(NSString *)kernelParamsBinaryName
+ padding:(BOOL)willPad
+ strideX:(NSUInteger)strideX
+ strideY:(NSUInteger)strideY
+ destinationFeatureChannelOffset:(NSUInteger)destinationFeatureChannelOffset
+{
+ return [self initWithKernelWidth:kernelWidth
+ kernelHeight:kernelHeight
+ inputFeatureChannels:inputFeatureChannels
+ outputFeatureChannels:outputFeatureChannels
+ neuronFilter:neuronFilter
+ device:device
+ kernelParamsBinaryName:kernelParamsBinaryName
+ padding:willPad
+ strideX:strideX
+ strideY:strideY
+ destinationFeatureChannelOffset:destinationFeatureChannelOffset
+ groupNum:1];
+}
+
+- (instancetype)initWithKernelWidth:(NSUInteger)kernelWidth
+ kernelHeight:(NSUInteger)kernelHeight
+ inputFeatureChannels:(NSUInteger)inputFeatureChannels
+ outputFeatureChannels:(NSUInteger)outputFeatureChannels
+ neuronFilter:(MPSCNNNeuron *)neuronFilter
+ device:(id)device
+ kernelParamsBinaryName:(NSString *)kernelParamsBinaryName
+ padding:(BOOL)willPad
+ strideX:(NSUInteger)strideX
+ strideY:(NSUInteger)strideY
+ destinationFeatureChannelOffset:(NSUInteger)destinationFeatureChannelOffset
+ groupNum:(NSUInteger)groupNum;
+{
+ // calculate the size of weights and bias required to be memory mapped into memory
+ NSUInteger sizeBias = outputFeatureChannels * sizeof(float);
+ NSUInteger sizeWeights = inputFeatureChannels * kernelHeight * kernelWidth * outputFeatureChannels * sizeof(float);
+
+ // get the url to this layer's weights and bias
+ NSString *filenameW = [NSString stringWithFormat:@"weights_%@", kernelParamsBinaryName];
+ NSString *filenameB = [NSString stringWithFormat:@"bias_%@", kernelParamsBinaryName];
+ NSString *wtPath = [[NSBundle mainBundle] pathForResource:filenameW ofType:@"dat"];
+ NSString *bsPath = [[NSBundle mainBundle] pathForResource:filenameB ofType:@"dat"];
+ NSAssert1(wtPath, @"Error: failed to find file %@", filenameW);
+ NSAssert1(bsPath, @"Error: failed to find file %@", filenameB);
+
+ // open file descriptors in read-only mode to parameter files
+ int fd_w = open([wtPath UTF8String], O_RDONLY, S_IRUSR | S_IWUSR | S_IRGRP | S_IWGRP | S_IROTH | S_IWOTH);
+ int fd_b = open([bsPath UTF8String], O_RDONLY, S_IRUSR | S_IWUSR | S_IRGRP | S_IWGRP | S_IROTH | S_IWOTH);
+ NSAssert1(fd_w != -1, @"Error: failed to open output file at %@", wtPath);
+ NSAssert1(fd_b != -1, @"Error: failed to open output file at %@", bsPath);
+
+ // memory map the parameters
+ void *hdrW = mmap(nil, sizeWeights, PROT_READ, MAP_FILE | MAP_SHARED, fd_w, 0);
+ void *hdrB = mmap(nil, sizeBias, PROT_READ, MAP_FILE | MAP_SHARED, fd_b, 0);
+
+ // cast Void pointers to Float
+ float *w = hdrW;
+ float *b = hdrB;
+
+ MPSCNNConvolutionDescriptor *convDesc;
+ convDesc = [MPSCNNConvolutionDescriptor cnnConvolutionDescriptorWithKernelWidth:kernelWidth
+ kernelHeight:kernelHeight
+ inputFeatureChannels:inputFeatureChannels
+ outputFeatureChannels:outputFeatureChannels
+ neuronFilter:neuronFilter];
+ convDesc.strideInPixelsX = strideX;
+ convDesc.strideInPixelsY = strideY;
+ convDesc.groups = groupNum;
+
+ self = [super initWithDevice:device
+ convolutionDescriptor:convDesc
+ kernelWeights:w
+ biasTerms:b
+ flags:MPSCNNConvolutionFlagsNone];
+
+ self.destinationFeatureChannelOffset = destinationFeatureChannelOffset;
+
+ // FIXME: -
+// self.padding = willPad;
+
+ NSAssert1(munmap(hdrW, sizeWeights) == 0, @"error %s" ,"hdrW");
+ NSAssert1(munmap(hdrB, sizeBias) == 0, @"error %s" ,"hdrB");
+
+ close(fd_w);
+ close(fd_b);
+ return self;
+}
+
+- (void)encodeToCommandBuffer:(id)commandBuffer
+ sourceImage:(MPSImage *)sourceImage
+ destinationImage:(MPSImage *)destinationImage
+{
+ // padding
+ NSUInteger padAlongHeight = (destinationImage.height - 1) * self.strideInPixelsY + self.kernelHeight - sourceImage.height;
+ NSUInteger padAlongWidth = (destinationImage.width - 1) *self.strideInPixelsX + self.kernelWidth - sourceImage.width;
+ NSUInteger padTop = padAlongHeight / 2;
+ NSUInteger padLeft = padAlongWidth / 2;
+ MPSOffset offset;
+ offset.x = self.kernelWidth/2 - padLeft;
+ offset.y = self.kernelHeight/2 - padTop;
+ offset.z = 0;
+ self.offset = offset;
+
+ [super encodeToCommandBuffer:commandBuffer
+ sourceImage:sourceImage
+ destinationImage:destinationImage];
+
+}
+
+@end
diff --git a/iOS-10-Sampler/Samples/MetalCNNBasicApple/SlimMPSCNNFullyConnected.h b/iOS-10-Sampler/Samples/MetalCNNBasicApple/SlimMPSCNNFullyConnected.h
new file mode 100644
index 0000000..d7186ff
--- /dev/null
+++ b/iOS-10-Sampler/Samples/MetalCNNBasicApple/SlimMPSCNNFullyConnected.h
@@ -0,0 +1,22 @@
+//
+// SlimMPSCNNFullyConnected.h
+// iOS-10-Sampler
+//
+// Created by Shuichi Tsutsumi on 6/9/18.
+// Copyright © 2018 Shuichi Tsutsumi, Inc. All rights reserved.
+//
+
+#import
+@import MetalPerformanceShaders;
+
+@interface SlimMPSCNNFullyConnected : MPSCNNFullyConnected
+
+- (instancetype)initWithKernelWidth:(NSUInteger)kernelWidth
+ kernelHeight:(NSUInteger)kernelHeight
+ inputFeatureChannels:(NSUInteger)inputFeatureChannels
+ outputFeatureChannels:(NSUInteger)outputFeatureChannels
+ neuronFilter:(MPSCNNNeuron *)neuronFilter
+ device:(id)device
+ kernelParamsBinaryName:(NSString *)kernelParamsBinaryName;
+
+@end
diff --git a/iOS-10-Sampler/Samples/MetalCNNBasicApple/SlimMPSCNNFullyConnected.m b/iOS-10-Sampler/Samples/MetalCNNBasicApple/SlimMPSCNNFullyConnected.m
new file mode 100644
index 0000000..8e5a2c0
--- /dev/null
+++ b/iOS-10-Sampler/Samples/MetalCNNBasicApple/SlimMPSCNNFullyConnected.m
@@ -0,0 +1,78 @@
+//
+// SlimMPSCNNFullyConnected.m
+// iOS-10-Sampler
+//
+// Created by Shuichi Tsutsumi on 6/9/18.
+// Copyright © 2018 Shuichi Tsutsumi, Inc. All rights reserved.
+//
+
+#import "SlimMPSCNNFullyConnected.h"
+#include
+
+@interface SlimMPSCNNFullyConnected ()
+{
+ BOOL bn;
+}
+@end
+
+
+@implementation SlimMPSCNNFullyConnected
+
+- (instancetype)initWithKernelWidth:(NSUInteger)kernelWidth
+ kernelHeight:(NSUInteger)kernelHeight
+ inputFeatureChannels:(NSUInteger)inputFeatureChannels
+ outputFeatureChannels:(NSUInteger)outputFeatureChannels
+ neuronFilter:(MPSCNNNeuron *)neuronFilter
+ device:(id)device
+ kernelParamsBinaryName:(NSString *)kernelParamsBinaryName
+{
+ // calculate the size of weights and bias required to be memory mapped into memory
+ NSUInteger sizeBias = outputFeatureChannels * sizeof(float);
+ NSUInteger sizeWeights = inputFeatureChannels * kernelHeight * kernelWidth * outputFeatureChannels * sizeof(float);
+
+ // get the url to this layer's weights and bias
+ NSString *filenameW = [NSString stringWithFormat:@"weights_%@", kernelParamsBinaryName];
+ NSString *filenameB = [NSString stringWithFormat:@"bias_%@", kernelParamsBinaryName];
+ NSString *wtPath = [[NSBundle mainBundle] pathForResource:filenameW ofType:@"dat"];
+ NSString *bsPath = [[NSBundle mainBundle] pathForResource:filenameB ofType:@"dat"];
+ NSAssert1(wtPath, @"Error: failed to find file %@", filenameW);
+ NSAssert1(bsPath, @"Error: failed to find file %@", filenameB);
+
+ // open file descriptors in read-only mode to parameter files
+ int fd_w = open([wtPath UTF8String], O_RDONLY, S_IRUSR | S_IWUSR | S_IRGRP | S_IWGRP | S_IROTH | S_IWOTH);
+ int fd_b = open([bsPath UTF8String], O_RDONLY, S_IRUSR | S_IWUSR | S_IRGRP | S_IWGRP | S_IROTH | S_IWOTH);
+ NSAssert1(fd_w != -1, @"Error: failed to open output file at %@", wtPath);
+ NSAssert1(fd_b != -1, @"Error: failed to open output file at %@", bsPath);
+
+ // memory map the parameters
+ void *hdrW = mmap(nil, sizeWeights, PROT_READ, MAP_FILE | MAP_SHARED, fd_w, 0);
+ void *hdrB = mmap(nil, sizeBias, PROT_READ, MAP_FILE | MAP_SHARED, fd_b, 0);
+
+ // cast Void pointers to Float
+ float *w = hdrW;
+ float *b = hdrB;
+
+ MPSCNNConvolutionDescriptor *convDesc;
+ convDesc = [MPSCNNConvolutionDescriptor cnnConvolutionDescriptorWithKernelWidth:kernelWidth
+ kernelHeight:kernelHeight
+ inputFeatureChannels:inputFeatureChannels
+ outputFeatureChannels:outputFeatureChannels
+ neuronFilter:neuronFilter];
+ self = [super initWithDevice:device
+ convolutionDescriptor:convDesc
+ kernelWeights:w
+ biasTerms:b
+ flags:MPSCNNConvolutionFlagsNone];
+ if (self) {
+ self.destinationFeatureChannelOffset = 0;
+ }
+
+ NSAssert1(munmap(hdrW, sizeWeights) == 0, @"error %s" ,"hdrW");
+ NSAssert1(munmap(hdrB, sizeBias) == 0, @"error %s" ,"hdrB");
+
+ close(fd_w);
+ close(fd_b);
+ return self;
+}
+
+@end
diff --git a/iOS-10-Sampler/Samples/MetalCNNBasicViewController.swift b/iOS-10-Sampler/Samples/MetalCNNBasicViewController.swift
index f389b97..4db6541 100644
--- a/iOS-10-Sampler/Samples/MetalCNNBasicViewController.swift
+++ b/iOS-10-Sampler/Samples/MetalCNNBasicViewController.swift
@@ -33,7 +33,12 @@ class MetalCNNBasicViewController: UIViewController {
predictionLabel.text = nil
// Load default device.
- device = MTLCreateSystemDefaultDevice()
+ guard let device = MTLCreateSystemDefaultDevice() else {
+ showAlert(title: "Not Supported", message: "Metal is not supported on current device", handler: { (action) in
+ self.navigationController!.popViewController(animated: true)
+ })
+ return
+ }
// Make sure the current device supports MetalPerformanceShaders.
guard MPSSupportsMTLDevice(device) else {
@@ -44,7 +49,7 @@ class MetalCNNBasicViewController: UIViewController {
}
// Create new command queue.
- commandQueue = device!.makeCommandQueue()
+ commandQueue = device.makeCommandQueue()
// initialize the networks we shall use to detect digits
network = MNISTDeepCNN(withCommandQueue: commandQueue)
diff --git a/iOS-10-Sampler/Samples/MetalImageRecognitionViewController.swift b/iOS-10-Sampler/Samples/MetalImageRecognitionViewController.swift
index f3fddd2..ab0103b 100644
--- a/iOS-10-Sampler/Samples/MetalImageRecognitionViewController.swift
+++ b/iOS-10-Sampler/Samples/MetalImageRecognitionViewController.swift
@@ -32,7 +32,12 @@ class MetalImageRecognitionViewController: UIViewController, UIImagePickerContro
super.viewDidLoad()
// Load default device.
- device = MTLCreateSystemDefaultDevice()
+ guard let device = MTLCreateSystemDefaultDevice() else {
+ showAlert(title: "Not Supported", message: "Metal is not supported on current device", handler: { (action) in
+ self.navigationController!.popViewController(animated: true)
+ })
+ return
+ }
// Make sure the current device supports MetalPerformanceShaders.
guard MPSSupportsMTLDevice(device) else {
@@ -52,7 +57,7 @@ class MetalImageRecognitionViewController: UIViewController, UIImagePickerContro
// get a texture from this CGImage
do {
- self.sourceTexture = try self.textureLoader.newTexture(with: cgImage, options: [:])
+ self.sourceTexture = try self.textureLoader.newTexture(cgImage: cgImage, options: nil)
}
catch let error as NSError {
fatalError("Unexpected error ocurred: \(error.localizedDescription).")
@@ -64,10 +69,10 @@ class MetalImageRecognitionViewController: UIViewController, UIImagePickerContro
// Load any resources required for rendering.
// Create new command queue.
- commandQueue = device!.makeCommandQueue()
+ commandQueue = device.makeCommandQueue()
// make a textureLoader to get our input images as MTLTextures
- textureLoader = MTKTextureLoader(device: device!)
+ textureLoader = MTKTextureLoader(device: device)
// Load the appropriate Network
inception3Net = Inception3Net(withCommandQueue: commandQueue)
@@ -101,16 +106,15 @@ class MetalImageRecognitionViewController: UIViewController, UIImagePickerContro
}
/**
- This function gets a commanBuffer and encodes layers in it. It follows that by commiting the commandBuffer and getting labels
+ This function gets a commandBuffer and encodes layers in it. It follows that by commiting the commandBuffer and getting labels
*/
func runNetwork() {
- let startTime = CACurrentMediaTime()
-
// to deliver optimal performance we leave some resources used in MPSCNN to be released at next call of autoreleasepool,
// so the user can decide the appropriate time to release this
autoreleasepool{
+// let startTime = CACurrentMediaTime()
// encoding command buffer
- let commandBuffer = commandQueue.makeCommandBuffer()
+ guard let commandBuffer = commandQueue.makeCommandBuffer() else {return}
// encode all layers of network on present commandBuffer, pass in the input image MTLTexture
inception3Net.forward(commandBuffer: commandBuffer, sourceTexture: sourceTexture)
@@ -121,16 +125,17 @@ class MetalImageRecognitionViewController: UIViewController, UIImagePickerContro
// display top-5 predictions for what the object should be labelled
var resultStr = ""
- inception3Net.getResults().forEach({ (label, prob) in
+ inception3Net.getResults().forEach({ (arg) in
+
+ let (label, prob) = arg
resultStr = resultStr + label + "\t" + String(format: "%.1f", prob * 100) + "%\n\n"
})
+// let endTime = CACurrentMediaTime()
+// print("Running Time: \(endTime - startTime) [sec]")
DispatchQueue.main.async {
self.predictLabel.text = resultStr
}
}
-
- let endTime = CACurrentMediaTime()
- print("Running Time: \(endTime - startTime) [sec]")
}
}
diff --git a/iOS-10-Sampler/Samples/PersistentContainerViewController.swift b/iOS-10-Sampler/Samples/PersistentContainerViewController.swift
index ce856b4..b2a448f 100644
--- a/iOS-10-Sampler/Samples/PersistentContainerViewController.swift
+++ b/iOS-10-Sampler/Samples/PersistentContainerViewController.swift
@@ -88,7 +88,7 @@ class PersitentContainerViewController: UITableViewController {
return true
}
- override func tableView(_ tableView: UITableView, commit editingStyle: UITableViewCellEditingStyle, forRowAt indexPath: IndexPath) {
+ override func tableView(_ tableView: UITableView, commit editingStyle: UITableViewCell.EditingStyle, forRowAt indexPath: IndexPath) {
switch editingStyle {
case .delete:
delete(at: indexPath.row)
diff --git a/iOS-10-Sampler/Samples/PropertyAnimatorEffectViewController.swift b/iOS-10-Sampler/Samples/PropertyAnimatorEffectViewController.swift
index 67209ee..91d8e30 100644
--- a/iOS-10-Sampler/Samples/PropertyAnimatorEffectViewController.swift
+++ b/iOS-10-Sampler/Samples/PropertyAnimatorEffectViewController.swift
@@ -18,12 +18,20 @@ class PropertyAnimatorEffectViewController: UIViewController {
override func viewDidLoad() {
super.viewDidLoad()
-
+ }
+
+ override func viewWillAppear(_ animated: Bool) {
+ super.viewWillAppear(animated)
animator = UIViewPropertyAnimator(duration: 0, curve: .linear) {
self.effectView.effect = nil
}
}
-
+
+ override func viewWillDisappear(_ animated: Bool) {
+ super.viewWillDisappear(animated)
+ animator.startAnimation()
+ }
+
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
}
diff --git a/iOS-10-Sampler/Samples/SpeechRecognition.storyboard b/iOS-10-Sampler/Samples/SpeechRecognition.storyboard
index a0c47f6..2428d9c 100644
--- a/iOS-10-Sampler/Samples/SpeechRecognition.storyboard
+++ b/iOS-10-Sampler/Samples/SpeechRecognition.storyboard
@@ -1,9 +1,9 @@
-
-
+
+
+
-
-
+
@@ -16,10 +16,11 @@
-
+
+
@@ -29,8 +30,8 @@
-
-
+
+
@@ -41,6 +42,7 @@
-
diff --git a/iOS-10-Sampler/Samples/SpeechRecognitionViewController.swift b/iOS-10-Sampler/Samples/SpeechRecognitionViewController.swift
index 9755b57..ebce631 100644
--- a/iOS-10-Sampler/Samples/SpeechRecognitionViewController.swift
+++ b/iOS-10-Sampler/Samples/SpeechRecognitionViewController.swift
@@ -59,6 +59,8 @@ class SpeechRecognitionViewController: UIViewController, SFSpeechRecognizerDeleg
case .notDetermined:
self.recordBtn.isEnabled = false
self.recordBtn.setTitle("Speech recognition not yet authorized", for: .disabled)
+ @unknown default:
+ fatalError()
}
}
}
@@ -82,13 +84,13 @@ class SpeechRecognitionViewController: UIViewController, SFSpeechRecognizerDeleg
}
let audioSession = AVAudioSession.sharedInstance()
- try audioSession.setCategory(AVAudioSessionCategoryRecord)
- try audioSession.setMode(AVAudioSessionModeMeasurement)
- try audioSession.setActive(true, with: .notifyOthersOnDeactivation)
+ try audioSession.setCategory(AVAudioSession.Category(rawValue: convertFromAVAudioSessionCategory(AVAudioSession.Category.record)))
+ try audioSession.setMode(AVAudioSession.Mode.measurement)
+ try audioSession.setActive(true, options: .notifyOthersOnDeactivation)
recognitionRequest = SFSpeechAudioBufferRecognitionRequest()
- guard let inputNode = audioEngine.inputNode else { fatalError("Audio engine has no input node") }
+ let inputNode = audioEngine.inputNode
guard let recognitionRequest = recognitionRequest else { fatalError("Unable to created a SFSpeechAudioBufferRecognitionRequest object") }
// Configure request so that results are returned before audio recording is finished
@@ -179,3 +181,8 @@ class SpeechRecognitionViewController: UIViewController, SFSpeechRecognizerDeleg
}
}
}
+
+// Helper function inserted by Swift 4.2 migrator.
+fileprivate func convertFromAVAudioSessionCategory(_ input: AVAudioSession.Category) -> String {
+ return input.rawValue
+}
diff --git a/iOS-10-Sampler/Samples/StickerPackViewController.swift b/iOS-10-Sampler/Samples/StickerPackViewController.swift
index 47a9a4c..974c3d2 100644
--- a/iOS-10-Sampler/Samples/StickerPackViewController.swift
+++ b/iOS-10-Sampler/Samples/StickerPackViewController.swift
@@ -20,7 +20,12 @@ class StickerPackViewController: UIViewController {
@IBAction func openBtnTapped(sender: UIButton) {
let url = URL(string: "sms:")!
- UIApplication.shared.open(url, options: [:], completionHandler: nil)
+ UIApplication.shared.open(url, options: convertToUIApplicationOpenExternalURLOptionsKeyDictionary([:]), completionHandler: nil)
}
}
+
+// Helper function inserted by Swift 4.2 migrator.
+fileprivate func convertToUIApplicationOpenExternalURLOptionsKeyDictionary(_ input: [String: Any]) -> [UIApplication.OpenExternalURLOptionsKey: Any] {
+ return Dictionary(uniqueKeysWithValues: input.map { key, value in (UIApplication.OpenExternalURLOptionsKey(rawValue: key), value)})
+}
diff --git a/iOS-10-Sampler/Samples/TabBadgeViewController.swift b/iOS-10-Sampler/Samples/TabBadgeViewController.swift
index 99a837b..88e1a3d 100644
--- a/iOS-10-Sampler/Samples/TabBadgeViewController.swift
+++ b/iOS-10-Sampler/Samples/TabBadgeViewController.swift
@@ -27,11 +27,11 @@ class TabBadgeViewController: UIViewController, UITabBarDelegate {
shadow.shadowColor = #colorLiteral(red: 0.501960814, green: 0.501960814, blue: 0.501960814, alpha: 1)
shadow.shadowOffset = CGSize(width: 1, height: 1)
shadow.shadowBlurRadius = 3
- let attributes: [String : Any] = [NSFontAttributeName: UIFont(name: "Menlo-Bold", size: 30)!,
- NSForegroundColorAttributeName: #colorLiteral(red: 0.9098039269, green: 0.4784313738, blue: 0.6431372762, alpha: 1),
- NSShadowAttributeName: shadow]
+ let attributes: [String : Any] = [NSAttributedString.Key.font.rawValue: UIFont(name: "Menlo-Bold", size: 30)!,
+ NSAttributedString.Key.foregroundColor.rawValue: #colorLiteral(red: 0.9098039269, green: 0.4784313738, blue: 0.6431372762, alpha: 1),
+ NSAttributedString.Key.shadow.rawValue: shadow]
// New!
- item.setBadgeTextAttributes(attributes, for: .normal)
+ item.setBadgeTextAttributes(convertToOptionalNSAttributedStringKeyDictionary(attributes), for: .normal)
// New!
item.badgeColor = UIColor.clear
@@ -62,3 +62,9 @@ class TabBadgeViewController: UIViewController, UITabBarDelegate {
}
}
}
+
+// Helper function inserted by Swift 4.2 migrator.
+fileprivate func convertToOptionalNSAttributedStringKeyDictionary(_ input: [String: Any]?) -> [NSAttributedString.Key: Any]? {
+ guard let input = input else { return nil }
+ return Dictionary(uniqueKeysWithValues: input.map { key, value in (NSAttributedString.Key(rawValue: key), value)})
+}
diff --git a/iOS-10-Sampler/Samples/UserNotificationViewController.swift b/iOS-10-Sampler/Samples/UserNotificationViewController.swift
index dc11ac5..ec18e50 100644
--- a/iOS-10-Sampler/Samples/UserNotificationViewController.swift
+++ b/iOS-10-Sampler/Samples/UserNotificationViewController.swift
@@ -35,7 +35,7 @@ class UserNotificationViewController: UIViewController, UNUserNotificationCenter
// Build content
content.title = "iOS-10-Sampler"
content.body = "This is the body."
- content.sound = UNNotificationSound.default()
+ content.sound = UNNotificationSound.default
content.attachments = [movieAttachment]
// Initializa request
@@ -67,11 +67,11 @@ class UserNotificationViewController: UIViewController, UNUserNotificationCenter
// =========================================================================
// MARK: - UNNotificationCenterDelegate
- private func userNotificationCenter(_ center: UNUserNotificationCenter, didReceive response: UNNotificationResponse, withCompletionHandler completionHandler: () -> Void) {
+ internal func userNotificationCenter(_ center: UNUserNotificationCenter, didReceive response: UNNotificationResponse, withCompletionHandler completionHandler: () -> Void) {
print("\(self.classForCoder)/" + #function)
}
- private func userNotificationCenter(_ center: UNUserNotificationCenter, willPresent notification: UNNotification, withCompletionHandler completionHandler: (UNNotificationPresentationOptions) -> Void) {
+ internal func userNotificationCenter(_ center: UNUserNotificationCenter, willPresent notification: UNNotification, withCompletionHandler completionHandler: (UNNotificationPresentationOptions) -> Void) {
print("\(self.classForCoder)/" + #function)
}
@@ -84,14 +84,17 @@ class UserNotificationViewController: UIViewController, UNUserNotificationCenter
UNUserNotificationCenter.current().delegate = self
if let error = error {
print("error:\(error)")
- } else {
+ return
+ }
+ DispatchQueue.main.async { [weak self] in
+ guard let self = self else { return }
let alert = UIAlertController(
title: "Close this app",
message: "A local notification has been scheduled. Close this app and wait 10 sec.",
- preferredStyle: UIAlertControllerStyle.alert)
+ preferredStyle: UIAlertController.Style.alert)
let okAction = UIAlertAction(
title: "OK",
- style: UIAlertActionStyle.cancel,
+ style: UIAlertAction.Style.cancel,
handler: nil)
alert.addAction(okAction)
self.present(alert, animated: true, completion: nil)
diff --git a/iOS-10-Sampler/iOS10Sampler-Bridging-Header.h b/iOS-10-Sampler/iOS10Sampler-Bridging-Header.h
index ed666d3..f33e961 100644
--- a/iOS-10-Sampler/iOS10Sampler-Bridging-Header.h
+++ b/iOS-10-Sampler/iOS10Sampler-Bridging-Header.h
@@ -9,4 +9,7 @@
#ifndef iOS10Sampler_Bridging_Header_h
#define iOS10Sampler_Bridging_Header_h
+#import "SlimMPSCNNConvolution.h"
+#import "SlimMPSCNNFullyConnected.h"
+
#endif /* iOS10Sampler_Bridging_Header_h */
diff --git a/libs/VideoCapture/AVCaptureDevice+Extension.swift b/libs/VideoCapture/AVCaptureDevice+Extension.swift
index 043808f..1b40d99 100644
--- a/libs/VideoCapture/AVCaptureDevice+Extension.swift
+++ b/libs/VideoCapture/AVCaptureDevice+Extension.swift
@@ -8,18 +8,11 @@
import AVFoundation
extension AVCaptureDevice {
- private func availableFormatsFor(preferredFps: Float64) -> [AVCaptureDeviceFormat] {
- guard let allFormats = formats as? [AVCaptureDeviceFormat] else {
- return []
- }
-
- var availableFormats: [AVCaptureDeviceFormat] = []
- for format in allFormats
+ private func availableFormatsFor(preferredFps: Float64) -> [AVCaptureDevice.Format] {
+ var availableFormats: [AVCaptureDevice.Format] = []
+ for format in formats
{
- guard let ranges = format.videoSupportedFrameRateRanges as? [AVFrameRateRange] else {
- continue
- }
-
+ let ranges = format.videoSupportedFrameRateRanges
for range in ranges where range.minFrameRate <= preferredFps && preferredFps <= range.maxFrameRate
{
availableFormats.append(format)
@@ -28,13 +21,12 @@ extension AVCaptureDevice {
return availableFormats
}
- private func formatWithHighestResolution(_ availableFormats: [AVCaptureDeviceFormat]) -> AVCaptureDeviceFormat?
+ private func formatWithHighestResolution(_ availableFormats: [AVCaptureDevice.Format]) -> AVCaptureDevice.Format?
{
var maxWidth: Int32 = 0
- var selectedFormat: AVCaptureDeviceFormat?
+ var selectedFormat: AVCaptureDevice.Format?
for format in availableFormats {
- guard let desc = format.formatDescription else {continue}
- let dimensions = CMVideoFormatDescriptionGetDimensions(desc)
+ let dimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription)
let width = dimensions.width
if width >= maxWidth {
maxWidth = width
@@ -44,11 +36,10 @@ extension AVCaptureDevice {
return selectedFormat
}
- private func formatFor(preferredSize: CGSize, availableFormats: [AVCaptureDeviceFormat]) -> AVCaptureDeviceFormat?
+ private func formatFor(preferredSize: CGSize, availableFormats: [AVCaptureDevice.Format]) -> AVCaptureDevice.Format?
{
for format in availableFormats {
- guard let desc = format.formatDescription else {continue}
- let dimensions = CMVideoFormatDescriptionGetDimensions(desc)
+ let dimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription)
if dimensions.width >= Int32(preferredSize.width) && dimensions.height >= Int32(preferredSize.height)
{
@@ -61,37 +52,33 @@ extension AVCaptureDevice {
func updateFormatWithPreferredVideoSpec(preferredSpec: VideoSpec)
{
- let availableFormats: [AVCaptureDeviceFormat]
+ let availableFormats: [AVCaptureDevice.Format]
if let preferredFps = preferredSpec.fps {
availableFormats = availableFormatsFor(preferredFps: Float64(preferredFps))
- }
- else {
- guard let allFormats = formats as? [AVCaptureDeviceFormat] else { return }
- availableFormats = allFormats
+ } else {
+ availableFormats = formats
}
- var selectedFormat: AVCaptureDeviceFormat?
+ var format: AVCaptureDevice.Format?
if let preferredSize = preferredSpec.size {
- selectedFormat = formatFor(preferredSize: preferredSize, availableFormats: availableFormats)
+ format = formatFor(preferredSize: preferredSize, availableFormats: availableFormats)
} else {
- selectedFormat = formatWithHighestResolution(availableFormats)
+ format = formatWithHighestResolution(availableFormats)
}
+
+ guard let selectedFormat = format else {return}
print("selected format: \(selectedFormat)")
+ do {
+ try lockForConfiguration()
+ } catch {
+ fatalError("")
+ }
+ activeFormat = selectedFormat
- if let selectedFormat = selectedFormat {
- do {
- try lockForConfiguration()
- }
- catch {
- fatalError("")
- }
- activeFormat = selectedFormat
-
- if let preferredFps = preferredSpec.fps {
- activeVideoMinFrameDuration = CMTimeMake(1, preferredFps)
- activeVideoMaxFrameDuration = CMTimeMake(1, preferredFps)
- unlockForConfiguration()
- }
+ if let preferredFps = preferredSpec.fps {
+ activeVideoMinFrameDuration = CMTimeMake(value: 1, timescale: preferredFps)
+ activeVideoMaxFrameDuration = CMTimeMake(value: 1, timescale: preferredFps)
+ unlockForConfiguration()
}
}
}
diff --git a/libs/VideoCapture/VideoCameraType.swift b/libs/VideoCapture/VideoCameraType.swift
index ebafd4f..67ac593 100644
--- a/libs/VideoCapture/VideoCameraType.swift
+++ b/libs/VideoCapture/VideoCameraType.swift
@@ -15,7 +15,7 @@ enum CameraType : Int {
func captureDevice() -> AVCaptureDevice {
switch self {
case .front:
- guard let devices = AVCaptureDeviceDiscoverySession(deviceTypes: [], mediaType: AVMediaTypeVideo, position: .front).devices else {break}
+ let devices = AVCaptureDevice.DiscoverySession(deviceTypes: [], mediaType: .video, position: .front).devices
print("devices:\(devices)")
for device in devices where device.position == .front {
return device
@@ -23,6 +23,6 @@ enum CameraType : Int {
default:
break
}
- return AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
+ return AVCaptureDevice.default(for: .video)!
}
}
diff --git a/libs/VideoCapture/VideoCapture.swift b/libs/VideoCapture/VideoCapture.swift
index 778bfba..72f77df 100644
--- a/libs/VideoCapture/VideoCapture.swift
+++ b/libs/VideoCapture/VideoCapture.swift
@@ -34,7 +34,7 @@ class VideoCapture: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate, AVCa
// setup video format
do {
- captureSession.sessionPreset = AVCaptureSessionPresetInputPriority
+ captureSession.sessionPreset = AVCaptureSession.Preset.inputPriority
if let preferredSpec = preferredSpec {
// update the format with a preferred fps
videoDevice.updateFormatWithPreferredVideoSpec(preferredSpec: preferredSpec)
@@ -58,7 +58,7 @@ class VideoCapture: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate, AVCa
// setup audio device input
do {
- let audioDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio)
+ guard let audioDevice = AVCaptureDevice.default(for: .audio) else {fatalError()}
let audioDeviceInput: AVCaptureDeviceInput
do {
audioDeviceInput = try AVCaptureDeviceInput(device: audioDevice)
@@ -74,10 +74,10 @@ class VideoCapture: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate, AVCa
// setup preview
if let previewContainer = previewContainer {
- guard let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) else {fatalError()}
+ let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.frame = previewContainer.bounds
- previewLayer.contentsGravity = kCAGravityResizeAspectFill
- previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
+ previewLayer.contentsGravity = CALayerContentsGravity.resizeAspectFill
+ previewLayer.videoGravity = .resizeAspectFill
previewContainer.insertSublayer(previewLayer, at: 0)
self.previewLayer = previewLayer
}
@@ -85,7 +85,7 @@ class VideoCapture: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate, AVCa
// setup video output
do {
let videoDataOutput = AVCaptureVideoDataOutput()
- videoDataOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as AnyHashable: NSNumber(value: kCVPixelFormatType_32BGRA)]
+ videoDataOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as AnyHashable as! String: NSNumber(value: kCVPixelFormatType_32BGRA)]
videoDataOutput.alwaysDiscardsLateVideoFrames = true
let queue = DispatchQueue(label: "com.shu223.videosamplequeue")
videoDataOutput.setSampleBufferDelegate(self, queue: queue)
@@ -94,7 +94,7 @@ class VideoCapture: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate, AVCa
}
captureSession.addOutput(videoDataOutput)
- videoConnection = videoDataOutput.connection(withMediaType: AVMediaTypeVideo)
+ videoConnection = videoDataOutput.connection(with: .video)
}
// setup audio output
@@ -107,7 +107,7 @@ class VideoCapture: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate, AVCa
}
captureSession.addOutput(audioDataOutput)
- audioConnection = audioDataOutput.connection(withMediaType: AVMediaTypeAudio)
+ audioConnection = audioDataOutput.connection(with: .audio)
}
// setup asset writer
@@ -151,12 +151,11 @@ class VideoCapture: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate, AVCa
// =========================================================================
// MARK: - AVCaptureVideoDataOutputSampleBufferDelegate
- func captureOutput(_ captureOutput: AVCaptureOutput!, didDrop sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
+ func captureOutput(_ output: AVCaptureOutput, didDrop sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
// print("\(self.classForCoder)/" + #function)
}
-
- func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!)
- {
+
+ func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
// FIXME: temp
if connection.videoOrientation != .portrait {
connection.videoOrientation = .portrait