diff --git a/.github/workflows/ios.yml b/.github/workflows/ios.yml new file mode 100644 index 0000000..982c807 --- /dev/null +++ b/.github/workflows/ios.yml @@ -0,0 +1,42 @@ +name: App-CI + +on: + push: + branches: [ u/elbosc/main ] + pull_request: + branches: [ u/elbosc/main ] + +jobs: + build: + name: Build and Test Default Scheme + runs-on: macos-11.0 + + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Set Default Scheme + run: | + scheme_list=$(xcodebuild -list -json | tr -d "\n") + default=$(echo $scheme_list | ruby -e "require 'json'; puts JSON.parse(STDIN.gets)['project']['targets'][0]") + echo $default | cat >default + echo Using default scheme: $default + - name: Build + env: + scheme: ${{ 'default' }} + platform: ${{ 'iOS Simulator' }} + run: | + device=`instruments -s -devices | grep -oE 'iPhone.*?[^\(]+' | head -1 | awk '{$1=$1;print}'` + if [ $scheme = default ]; then scheme=$(cat default); fi + if [ "`ls -A | grep -i \\.xcworkspace\$`" ]; then filetype_parameter="workspace" && file_to_build="`ls -A | grep -i \\.xcworkspace\$`"; else filetype_parameter="project" && file_to_build="`ls -A | grep -i \\.xcodeproj\$`"; fi + file_to_build=`echo $file_to_build | awk '{$1=$1;print}'` + xcodebuild build-for-testing -scheme "$scheme" -"$filetype_parameter" "$file_to_build" -destination "platform=$platform,name=$device" + - name: Test + env: + scheme: ${{ 'default' }} + platform: ${{ 'iOS Simulator' }} + run: | + device=`instruments -s -devices | grep -oE 'iPhone.*?[^\(]+' | head -1 | awk '{$1=$1;print}'` + if [ $scheme = default ]; then scheme=$(cat default); fi + if [ "`ls -A | grep -i \\.xcworkspace\$`" ]; then filetype_parameter="workspace" && file_to_build="`ls -A | grep -i \\.xcworkspace\$`"; else filetype_parameter="project" && file_to_build="`ls -A | grep -i \\.xcodeproj\$`"; fi + file_to_build=`echo $file_to_build | awk '{$1=$1;print}'` + xcodebuild test-without-building -scheme "$scheme" -"$filetype_parameter" "$file_to_build" -destination "platform=$platform,name=$device" diff --git a/LobeTests/Info.plist b/LobeTests/Info.plist new file mode 100644 index 0000000..64d65ca --- /dev/null +++ b/LobeTests/Info.plist @@ -0,0 +1,22 @@ + + + + + CFBundleDevelopmentRegion + $(DEVELOPMENT_LANGUAGE) + CFBundleExecutable + $(EXECUTABLE_NAME) + CFBundleIdentifier + $(PRODUCT_BUNDLE_IDENTIFIER) + CFBundleInfoDictionaryVersion + 6.0 + CFBundleName + $(PRODUCT_NAME) + CFBundlePackageType + $(PRODUCT_BUNDLE_PACKAGE_TYPE) + CFBundleShortVersionString + 1.0 + CFBundleVersion + 1 + + diff --git a/LobeTests/LobeTests.swift b/LobeTests/LobeTests.swift new file mode 100644 index 0000000..c459aa7 --- /dev/null +++ b/LobeTests/LobeTests.swift @@ -0,0 +1,33 @@ +// +// LobeTests.swift +// LobeTests +// +// Created by Elliot Boschwitz on 12/8/20. +// Copyright © 2020 Microsoft. All rights reserved. +// + +import XCTest + +class LobeTests: XCTestCase { + + override func setUpWithError() throws { + // Put setup code here. This method is called before the invocation of each test method in the class. + } + + override func tearDownWithError() throws { + // Put teardown code here. This method is called after the invocation of each test method in the class. + } + + func testExample() throws { + // This is an example of a functional test case. + // Use XCTAssert and related functions to verify your tests produce the correct results. + } + + func testPerformanceExample() throws { + // This is an example of a performance test case. + measure { + // Put the code you want to measure the time of here. + } + } + +} diff --git a/Lobe_iOS.xcodeproj/project.pbxproj b/Lobe_iOS.xcodeproj/project.pbxproj index 3df972b..9ab55d0 100644 --- a/Lobe_iOS.xcodeproj/project.pbxproj +++ b/Lobe_iOS.xcodeproj/project.pbxproj @@ -9,33 +9,70 @@ /* Begin PBXBuildFile section */ 76A9AF54247F138B002086F7 /* LobeModel.mlmodel in Sources */ = {isa = PBXBuildFile; fileRef = 76A9AF53247F138B002086F7 /* LobeModel.mlmodel */; }; 76CDE1A4247F2E6D0096E882 /* ImagePicker.swift in Sources */ = {isa = PBXBuildFile; fileRef = 76CDE1A3247F2E6D0096E882 /* ImagePicker.swift */; }; - 76ED73F3248987E3003B4F6B /* MyViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 76ED73F2248987E3003B4F6B /* MyViewController.swift */; }; - 76ED73F5248988AE003B4F6B /* UpdateTextViewExternal.swift in Sources */ = {isa = PBXBuildFile; fileRef = 76ED73F4248988AE003B4F6B /* UpdateTextViewExternal.swift */; }; + 76ED73F3248987E3003B4F6B /* CaptureSessionViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 76ED73F2248987E3003B4F6B /* CaptureSessionViewController.swift */; }; + 76ED73F5248988AE003B4F6B /* PredictionLabelView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 76ED73F4248988AE003B4F6B /* PredictionLabelView.swift */; }; 9B7B03EB2475DBF300D34020 /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9B7B03EA2475DBF300D34020 /* AppDelegate.swift */; }; 9B7B03ED2475DBF300D34020 /* SceneDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9B7B03EC2475DBF300D34020 /* SceneDelegate.swift */; }; - 9B7B03EF2475DBF300D34020 /* ContentView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9B7B03EE2475DBF300D34020 /* ContentView.swift */; }; + 9B7B03EF2475DBF300D34020 /* PlayView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9B7B03EE2475DBF300D34020 /* PlayView.swift */; }; 9B7B03F12475DBF600D34020 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 9B7B03F02475DBF600D34020 /* Assets.xcassets */; }; 9B7B03F42475DBF600D34020 /* Preview Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 9B7B03F32475DBF600D34020 /* Preview Assets.xcassets */; }; 9B7B03F72475DBF600D34020 /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 9B7B03F52475DBF600D34020 /* LaunchScreen.storyboard */; }; + B513D84525809D62009D91E2 /* LobeTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = B513D84425809D62009D91E2 /* LobeTests.swift */; }; + B5173D5725774E990065A01F /* PlayViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = B5173D5625774E990065A01F /* PlayViewModel.swift */; }; + B5173D5B25774F400065A01F /* ImagePreview.swift in Sources */ = {isa = PBXBuildFile; fileRef = B5173D5A25774F400065A01F /* ImagePreview.swift */; }; + B53C73762533C150007A0231 /* Project.swift in Sources */ = {isa = PBXBuildFile; fileRef = B53C73752533C150007A0231 /* Project.swift */; }; + B55A1ED32574F368003EE8AD /* CameraView.swift in Sources */ = {isa = PBXBuildFile; fileRef = B55A1ED22574F368003EE8AD /* CameraView.swift */; }; + B55A1ED825760BAC003EE8AD /* PredictionLayer.swift in Sources */ = {isa = PBXBuildFile; fileRef = B55A1ED725760BAC003EE8AD /* PredictionLayer.swift */; }; + B55D41462599BB48007C9DBF /* CaptureSessionManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = B55D41452599BB48007C9DBF /* CaptureSessionManager.swift */; }; + B5A9B5272557373E00FD595B /* CloudKit.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = B5A9B5262557373E00FD595B /* CloudKit.framework */; }; /* End PBXBuildFile section */ +/* Begin PBXContainerItemProxy section */ + B513D84725809D62009D91E2 /* PBXContainerItemProxy */ = { + isa = PBXContainerItemProxy; + containerPortal = 9B7B03DF2475DBF300D34020 /* Project object */; + proxyType = 1; + remoteGlobalIDString = 9B7B03E62475DBF300D34020; + remoteInfo = Lobe_iOS; + }; +/* End PBXContainerItemProxy section */ + /* Begin PBXFileReference section */ 76A9AF53247F138B002086F7 /* LobeModel.mlmodel */ = {isa = PBXFileReference; lastKnownFileType = file.mlmodel; name = LobeModel.mlmodel; path = ../LobeModel.mlmodel; sourceTree = ""; }; 76CDE1A3247F2E6D0096E882 /* ImagePicker.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ImagePicker.swift; sourceTree = ""; }; - 76ED73F2248987E3003B4F6B /* MyViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MyViewController.swift; sourceTree = ""; }; - 76ED73F4248988AE003B4F6B /* UpdateTextViewExternal.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = UpdateTextViewExternal.swift; sourceTree = ""; }; + 76ED73F2248987E3003B4F6B /* CaptureSessionViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CaptureSessionViewController.swift; sourceTree = ""; }; + 76ED73F4248988AE003B4F6B /* PredictionLabelView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PredictionLabelView.swift; sourceTree = ""; }; 9B7B03E72475DBF300D34020 /* Lobe.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = Lobe.app; sourceTree = BUILT_PRODUCTS_DIR; }; 9B7B03EA2475DBF300D34020 /* AppDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = ""; }; 9B7B03EC2475DBF300D34020 /* SceneDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SceneDelegate.swift; sourceTree = ""; }; - 9B7B03EE2475DBF300D34020 /* ContentView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ContentView.swift; sourceTree = ""; }; + 9B7B03EE2475DBF300D34020 /* PlayView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PlayView.swift; sourceTree = ""; }; 9B7B03F02475DBF600D34020 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; 9B7B03F32475DBF600D34020 /* Preview Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = "Preview Assets.xcassets"; sourceTree = ""; }; 9B7B03F62475DBF600D34020 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; }; 9B7B03F82475DBF600D34020 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; + B513D84225809D62009D91E2 /* LobeTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = LobeTests.xctest; sourceTree = BUILT_PRODUCTS_DIR; }; + B513D84425809D62009D91E2 /* LobeTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LobeTests.swift; sourceTree = ""; }; + B513D84625809D62009D91E2 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; + B5173D5625774E990065A01F /* PlayViewModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PlayViewModel.swift; sourceTree = ""; }; + B5173D5A25774F400065A01F /* ImagePreview.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ImagePreview.swift; sourceTree = ""; }; + B53C73752533C150007A0231 /* Project.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Project.swift; sourceTree = ""; }; + B55A1ED22574F368003EE8AD /* CameraView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraView.swift; sourceTree = ""; }; + B55A1ED725760BAC003EE8AD /* PredictionLayer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PredictionLayer.swift; sourceTree = ""; }; + B55D41452599BB48007C9DBF /* CaptureSessionManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CaptureSessionManager.swift; sourceTree = ""; }; + B5A9B5232557372800FD595B /* Lobe.entitlements */ = {isa = PBXFileReference; lastKnownFileType = text.plist.entitlements; path = Lobe.entitlements; sourceTree = ""; }; + B5A9B5262557373E00FD595B /* CloudKit.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CloudKit.framework; path = System/Library/Frameworks/CloudKit.framework; sourceTree = SDKROOT; }; /* End PBXFileReference section */ /* Begin PBXFrameworksBuildPhase section */ 9B7B03E42475DBF300D34020 /* Frameworks */ = { + isa = PBXFrameworksBuildPhase; + buildActionMask = 2147483647; + files = ( + B5A9B5272557373E00FD595B /* CloudKit.framework in Frameworks */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; + B513D83F25809D62009D91E2 /* Frameworks */ = { isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( @@ -49,7 +86,9 @@ isa = PBXGroup; children = ( 9B7B03E92475DBF300D34020 /* Lobe_iOS */, + B513D84325809D62009D91E2 /* LobeTests */, 9B7B03E82475DBF300D34020 /* Products */, + B5A9B5252557373E00FD595B /* Frameworks */, ); sourceTree = ""; }; @@ -57,6 +96,7 @@ isa = PBXGroup; children = ( 9B7B03E72475DBF300D34020 /* Lobe.app */, + B513D84225809D62009D91E2 /* LobeTests.xctest */, ); name = Products; sourceTree = ""; @@ -64,14 +104,15 @@ 9B7B03E92475DBF300D34020 /* Lobe_iOS */ = { isa = PBXGroup; children = ( + B56240FA257ED11B00709520 /* Views */, + B55C4E1325D4F7820075F68B /* Models */, + B5173D5625774E990065A01F /* PlayViewModel.swift */, + 76ED73F2248987E3003B4F6B /* CaptureSessionViewController.swift */, + B5A9B5232557372800FD595B /* Lobe.entitlements */, 9B7B03EA2475DBF300D34020 /* AppDelegate.swift */, + 9B7B03F52475DBF600D34020 /* LaunchScreen.storyboard */, 9B7B03EC2475DBF300D34020 /* SceneDelegate.swift */, - 9B7B03EE2475DBF300D34020 /* ContentView.swift */, - 76ED73F4248988AE003B4F6B /* UpdateTextViewExternal.swift */, - 76ED73F2248987E3003B4F6B /* MyViewController.swift */, - 76CDE1A3247F2E6D0096E882 /* ImagePicker.swift */, 9B7B03F02475DBF600D34020 /* Assets.xcassets */, - 9B7B03F52475DBF600D34020 /* LaunchScreen.storyboard */, 76A9AF53247F138B002086F7 /* LobeModel.mlmodel */, 9B7B03F82475DBF600D34020 /* Info.plist */, 9B7B03F22475DBF600D34020 /* Preview Content */, @@ -87,6 +128,45 @@ path = "Preview Content"; sourceTree = ""; }; + B513D84325809D62009D91E2 /* LobeTests */ = { + isa = PBXGroup; + children = ( + B513D84425809D62009D91E2 /* LobeTests.swift */, + B513D84625809D62009D91E2 /* Info.plist */, + ); + path = LobeTests; + sourceTree = ""; + }; + B55C4E1325D4F7820075F68B /* Models */ = { + isa = PBXGroup; + children = ( + B55D41452599BB48007C9DBF /* CaptureSessionManager.swift */, + B55A1ED725760BAC003EE8AD /* PredictionLayer.swift */, + B53C73752533C150007A0231 /* Project.swift */, + ); + path = Models; + sourceTree = ""; + }; + B56240FA257ED11B00709520 /* Views */ = { + isa = PBXGroup; + children = ( + 9B7B03EE2475DBF300D34020 /* PlayView.swift */, + 76ED73F4248988AE003B4F6B /* PredictionLabelView.swift */, + 76CDE1A3247F2E6D0096E882 /* ImagePicker.swift */, + B55A1ED22574F368003EE8AD /* CameraView.swift */, + B5173D5A25774F400065A01F /* ImagePreview.swift */, + ); + path = Views; + sourceTree = ""; + }; + B5A9B5252557373E00FD595B /* Frameworks */ = { + isa = PBXGroup; + children = ( + B5A9B5262557373E00FD595B /* CloudKit.framework */, + ); + name = Frameworks; + sourceTree = ""; + }; /* End PBXGroup section */ /* Begin PBXNativeTarget section */ @@ -107,19 +187,41 @@ productReference = 9B7B03E72475DBF300D34020 /* Lobe.app */; productType = "com.apple.product-type.application"; }; + B513D84125809D62009D91E2 /* LobeTests */ = { + isa = PBXNativeTarget; + buildConfigurationList = B513D84B25809D62009D91E2 /* Build configuration list for PBXNativeTarget "LobeTests" */; + buildPhases = ( + B513D83E25809D62009D91E2 /* Sources */, + B513D83F25809D62009D91E2 /* Frameworks */, + B513D84025809D62009D91E2 /* Resources */, + ); + buildRules = ( + ); + dependencies = ( + B513D84825809D62009D91E2 /* PBXTargetDependency */, + ); + name = LobeTests; + productName = LobeTests; + productReference = B513D84225809D62009D91E2 /* LobeTests.xctest */; + productType = "com.apple.product-type.bundle.unit-test"; + }; /* End PBXNativeTarget section */ /* Begin PBXProject section */ 9B7B03DF2475DBF300D34020 /* Project object */ = { isa = PBXProject; attributes = { - LastSwiftUpdateCheck = 1140; - LastUpgradeCheck = 1140; - ORGANIZATIONNAME = "Adam Menges"; + LastSwiftUpdateCheck = 1220; + LastUpgradeCheck = 1200; + ORGANIZATIONNAME = Microsoft; TargetAttributes = { 9B7B03E62475DBF300D34020 = { CreatedOnToolsVersion = 11.4.1; }; + B513D84125809D62009D91E2 = { + CreatedOnToolsVersion = 12.2; + TestTargetID = 9B7B03E62475DBF300D34020; + }; }; }; buildConfigurationList = 9B7B03E22475DBF300D34020 /* Build configuration list for PBXProject "Lobe_iOS" */; @@ -136,6 +238,7 @@ projectRoot = ""; targets = ( 9B7B03E62475DBF300D34020 /* Lobe_iOS */, + B513D84125809D62009D91E2 /* LobeTests */, ); }; /* End PBXProject section */ @@ -151,6 +254,13 @@ ); runOnlyForDeploymentPostprocessing = 0; }; + B513D84025809D62009D91E2 /* Resources */ = { + isa = PBXResourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + runOnlyForDeploymentPostprocessing = 0; + }; /* End PBXResourcesBuildPhase section */ /* Begin PBXSourcesBuildPhase section */ @@ -158,18 +268,40 @@ isa = PBXSourcesBuildPhase; buildActionMask = 2147483647; files = ( + B55D41462599BB48007C9DBF /* CaptureSessionManager.swift in Sources */, 76A9AF54247F138B002086F7 /* LobeModel.mlmodel in Sources */, 9B7B03EB2475DBF300D34020 /* AppDelegate.swift in Sources */, + B5173D5B25774F400065A01F /* ImagePreview.swift in Sources */, + B55A1ED32574F368003EE8AD /* CameraView.swift in Sources */, 76CDE1A4247F2E6D0096E882 /* ImagePicker.swift in Sources */, 9B7B03ED2475DBF300D34020 /* SceneDelegate.swift in Sources */, - 76ED73F5248988AE003B4F6B /* UpdateTextViewExternal.swift in Sources */, - 76ED73F3248987E3003B4F6B /* MyViewController.swift in Sources */, - 9B7B03EF2475DBF300D34020 /* ContentView.swift in Sources */, + 76ED73F5248988AE003B4F6B /* PredictionLabelView.swift in Sources */, + 76ED73F3248987E3003B4F6B /* CaptureSessionViewController.swift in Sources */, + B55A1ED825760BAC003EE8AD /* PredictionLayer.swift in Sources */, + B5173D5725774E990065A01F /* PlayViewModel.swift in Sources */, + 9B7B03EF2475DBF300D34020 /* PlayView.swift in Sources */, + B53C73762533C150007A0231 /* Project.swift in Sources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; + B513D83E25809D62009D91E2 /* Sources */ = { + isa = PBXSourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + B513D84525809D62009D91E2 /* LobeTests.swift in Sources */, ); runOnlyForDeploymentPostprocessing = 0; }; /* End PBXSourcesBuildPhase section */ +/* Begin PBXTargetDependency section */ + B513D84825809D62009D91E2 /* PBXTargetDependency */ = { + isa = PBXTargetDependency; + target = 9B7B03E62475DBF300D34020 /* Lobe_iOS */; + targetProxy = B513D84725809D62009D91E2 /* PBXContainerItemProxy */; + }; +/* End PBXTargetDependency section */ + /* Begin PBXVariantGroup section */ 9B7B03F52475DBF600D34020 /* LaunchScreen.storyboard */ = { isa = PBXVariantGroup; @@ -208,6 +340,7 @@ CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES; CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; CLANG_WARN_STRICT_PROTOTYPES = YES; CLANG_WARN_SUSPICIOUS_MOVE = YES; @@ -268,6 +401,7 @@ CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES; CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; CLANG_WARN_STRICT_PROTOTYPES = YES; CLANG_WARN_SUSPICIOUS_MOVE = YES; @@ -300,17 +434,20 @@ isa = XCBuildConfiguration; buildSettings = { ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + CODE_SIGN_ENTITLEMENTS = Lobe_iOS/Lobe.entitlements; + CODE_SIGN_IDENTITY = "Apple Development"; CODE_SIGN_STYLE = Automatic; DEVELOPMENT_ASSET_PATHS = "\"Lobe_iOS/Preview Content\""; - DEVELOPMENT_TEAM = NGV85M44ZU; + DEVELOPMENT_TEAM = UBF8T346G9; ENABLE_PREVIEWS = YES; INFOPLIST_FILE = Lobe_iOS/Info.plist; LD_RUNPATH_SEARCH_PATHS = ( "$(inherited)", "@executable_path/Frameworks", ); - PRODUCT_BUNDLE_IDENTIFIER = "adammenges.Lobe-iOS"; + PRODUCT_BUNDLE_IDENTIFIER = com.microsoft.lobe; PRODUCT_NAME = Lobe; + PROVISIONING_PROFILE_SPECIFIER = ""; SWIFT_VERSION = 5.0; TARGETED_DEVICE_FAMILY = "1,2"; }; @@ -320,19 +457,64 @@ isa = XCBuildConfiguration; buildSettings = { ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + CODE_SIGN_ENTITLEMENTS = Lobe_iOS/Lobe.entitlements; + CODE_SIGN_IDENTITY = "Apple Development"; CODE_SIGN_STYLE = Automatic; DEVELOPMENT_ASSET_PATHS = "\"Lobe_iOS/Preview Content\""; - DEVELOPMENT_TEAM = NGV85M44ZU; + DEVELOPMENT_TEAM = UBF8T346G9; ENABLE_PREVIEWS = YES; INFOPLIST_FILE = Lobe_iOS/Info.plist; LD_RUNPATH_SEARCH_PATHS = ( "$(inherited)", "@executable_path/Frameworks", ); - PRODUCT_BUNDLE_IDENTIFIER = "adammenges.Lobe-iOS"; + PRODUCT_BUNDLE_IDENTIFIER = com.microsoft.lobe; PRODUCT_NAME = Lobe; + PROVISIONING_PROFILE_SPECIFIER = ""; + SWIFT_VERSION = 5.0; + TARGETED_DEVICE_FAMILY = "1,2"; + }; + name = Release; + }; + B513D84925809D62009D91E2 /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + BUNDLE_LOADER = "$(TEST_HOST)"; + CODE_SIGN_STYLE = Automatic; + DEVELOPMENT_TEAM = UBF8T346G9; + INFOPLIST_FILE = LobeTests/Info.plist; + IPHONEOS_DEPLOYMENT_TARGET = 14.2; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/Frameworks", + "@loader_path/Frameworks", + ); + PRODUCT_BUNDLE_IDENTIFIER = test.LobeTests; + PRODUCT_NAME = "$(TARGET_NAME)"; SWIFT_VERSION = 5.0; TARGETED_DEVICE_FAMILY = "1,2"; + TEST_HOST = "$(BUILT_PRODUCTS_DIR)/Lobe.app/Lobe"; + }; + name = Debug; + }; + B513D84A25809D62009D91E2 /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + BUNDLE_LOADER = "$(TEST_HOST)"; + CODE_SIGN_STYLE = Automatic; + DEVELOPMENT_TEAM = UBF8T346G9; + INFOPLIST_FILE = LobeTests/Info.plist; + IPHONEOS_DEPLOYMENT_TARGET = 14.2; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/Frameworks", + "@loader_path/Frameworks", + ); + PRODUCT_BUNDLE_IDENTIFIER = test.LobeTests; + PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_VERSION = 5.0; + TARGETED_DEVICE_FAMILY = "1,2"; + TEST_HOST = "$(BUILT_PRODUCTS_DIR)/Lobe.app/Lobe"; }; name = Release; }; @@ -357,6 +539,15 @@ defaultConfigurationIsVisible = 0; defaultConfigurationName = Release; }; + B513D84B25809D62009D91E2 /* Build configuration list for PBXNativeTarget "LobeTests" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + B513D84925809D62009D91E2 /* Debug */, + B513D84A25809D62009D91E2 /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; /* End XCConfigurationList section */ }; rootObject = 9B7B03DF2475DBF300D34020 /* Project object */; diff --git a/Lobe_iOS/AppDelegate.swift b/Lobe_iOS/AppDelegate.swift index 5c0ffa2..833f72f 100644 --- a/Lobe_iOS/AppDelegate.swift +++ b/Lobe_iOS/AppDelegate.swift @@ -1,3 +1,11 @@ +// +// AppDelegate.swift +// Lobe_iOS +// +// Created by Adam Menges on 5/20/20. +// Copyright © 2020 Microsoft. All rights reserved. +// + import UIKit @UIApplicationMain diff --git a/Lobe_iOS/CaptureSessionViewController.swift b/Lobe_iOS/CaptureSessionViewController.swift new file mode 100644 index 0000000..761bd4f --- /dev/null +++ b/Lobe_iOS/CaptureSessionViewController.swift @@ -0,0 +1,109 @@ +// +// CaptureSessionViewController.swift +// Lobe_iOS +// +// Created by Kathy Zhou on 6/4/20. +// Copyright © 2020 Microsoft. All rights reserved. +// + +import AVKit +import Foundation + +/// Defines tap gesture delegate protocol. +protocol CaptureSessionGestureDelegate { + func viewRecognizedDoubleTap() + func viewRecognizedTripleTap(_ view: UIView) +} + +/// View controller for video capture session. It's responsibilities include: +/// 1. Setting camera output to UI view. +/// 2. Handling orientation changes. +/// 3. Managing tap gestures. +class CaptureSessionViewController: UIViewController { + var previewLayer: AVCaptureVideoPreviewLayer? + var tripleTapGesture: UITapGestureRecognizer? + var doubleTapGesture: UITapGestureRecognizer? + var gestureDelegate: CaptureSessionGestureDelegate? + + override func viewDidLoad() { + super.viewDidLoad() + + /// Define gesture event listeners. We don't use SwiftUI since there isn't support for + /// recognizing a double tap gesture when a triple tap gesture is also present. + let doubleTapGesture = UITapGestureRecognizer(target: self, action:#selector(self.handleDoubleTap(_:))) + doubleTapGesture.numberOfTapsRequired = 2 + view.addGestureRecognizer(doubleTapGesture) + + let tripleTapGesture = UITapGestureRecognizer(target: self, action:#selector(self.handleTripleTap(_:))) + tripleTapGesture.numberOfTapsRequired = 3 + view.addGestureRecognizer(tripleTapGesture) + doubleTapGesture.require(toFail: tripleTapGesture) + } + + /// Set video configuration for subview layout + override func viewDidLayoutSubviews() { + super.viewDidLayoutSubviews() + self.configureVideoOrientation(for: self.previewLayer) + } + + /// Update video configuration when device orientation changes + override func viewWillTransition(to size: CGSize, with coordinator: UIViewControllerTransitionCoordinator) { + super.viewWillTransition(to: size, with: coordinator) + self.configureVideoOrientation(for: self.previewLayer) + } + + /// Configures orientation of preview layer for AVCapture session. + func configureVideoOrientation(for previewLayer: AVCaptureVideoPreviewLayer?) { + if let preview = previewLayer, + let connection = preview.connection { + let orientation = UIDevice.current.orientation + + if connection.isVideoOrientationSupported { + var videoOrientation: AVCaptureVideoOrientation + + switch orientation { + case .portrait: + videoOrientation = .portrait + case .portraitUpsideDown: + videoOrientation = .portraitUpsideDown + case .landscapeLeft: + videoOrientation = .landscapeRight + case .landscapeRight: + videoOrientation = .landscapeLeft + default: + videoOrientation = UIApplication.shared.windows.first?.windowScene?.interfaceOrientation.asAVCaptureVideoOrientation() ?? .portrait + } + connection.videoOrientation = videoOrientation + } + preview.frame = self.view.bounds + } + } + + /// Double tap flips camera. + @objc func handleDoubleTap(_ sender: UITapGestureRecognizer? = nil) { + self.gestureDelegate?.viewRecognizedDoubleTap() + } + + /// Triple tap creates screen shot. + @objc func handleTripleTap(_ sender: UITapGestureRecognizer? = nil) { + self.gestureDelegate?.viewRecognizedTripleTap(self.view) + } +} + +/// Conversion helper for AVCaptureSession orientation changes. +extension UIInterfaceOrientation { + func asAVCaptureVideoOrientation() -> AVCaptureVideoOrientation { + switch self { + case .portrait: + return .portrait + case .landscapeLeft: + return .landscapeLeft + case .landscapeRight: + return .landscapeRight + case .portraitUpsideDown: + return .portraitUpsideDown + default: + return .portrait + } + } +} diff --git a/Lobe_iOS/ContentView.swift b/Lobe_iOS/ContentView.swift deleted file mode 100644 index 80ed66d..0000000 --- a/Lobe_iOS/ContentView.swift +++ /dev/null @@ -1,152 +0,0 @@ -import SwiftUI -import AVKit -import Vision - -var useCamera: Bool = true - -struct ContentView: View { - - var controller: MyViewController = MyViewController() - @State var showImagePicker: Bool = false - @State private var image: UIImage? - @State var scaling: CGSize = .init(width: 1, height: 1) - @State private var offset = CGSize.zero - - var body: some View { - GeometryReader { geometry in - - VStack { - if (self.image != nil) { - /* Placeholder for displaying an image from the photo library. */ - Image(uiImage: self.image!) - .resizable() - .aspectRatio(self.image!.size, contentMode: .fill) - .scaleEffect(1 / self.scaling.height) - .offset(self.offset) - /* Gesture for swiping down to dismiss the image. */ - .gesture(DragGesture() - .onChanged ({value in - self.scaling = value.translation - self.scaling.height = max(self.scaling.height / 50, 1) - self.offset = value.translation - }) - .onEnded {_ in - self.offset = .zero - if self.scaling.height > 1.5 { - self.image = nil - useCamera = true - self.controller.changeStatus(useCam: useCamera, img: self.controller.camImage!) - } - self.scaling = .init(width: 1, height: 1) - } - ) - .opacity(1 / self.scaling.height < 1 ? 0.5: 1) - } else { - /* Background camera. */ - MyRepresentable(controller: self.controller) - /* Gesture for swiping up the photo library. */ - .gesture( - DragGesture() - .onEnded {value in - if value.translation.height < 0 { - withAnimation{ - self.showImagePicker = true - } - self.controller.changeStatus(useCam: false, img: self.controller.camImage!) - } - } - ) - } - } - .frame(minWidth: 0, maxWidth: .infinity, minHeight: 0, maxHeight: .infinity) - .background(Color.black) - .edgesIgnoringSafeArea(.all) - - HStack { - Spacer() - /* Icon for closing the image*/ - Image("x") - .resizable() - .opacity(self.image != nil ? 1: 0) - .frame(width: geometry.size.width / 15, height: geometry.size.width / 15) - .onTapGesture { - self.image = nil - useCamera = true - self.controller.changeStatus(useCam: useCamera, img: self.controller.camImage!) - } - }.padding() - - VStack { - Spacer() - UpdateTextViewExternal(viewModel: self.controller) - HStack { - - /* Button for openning the photo library. */ - Button(action: { - withAnimation { - self.showImagePicker = true - } - self.controller.changeStatus(useCam: false, img: self.controller.camImage!) - }) { - Image("PhotoLib") - .renderingMode(.original) - .frame(width: geometry.size.width / 3, height: geometry.size.height / 16) - }.opacity(0) // not displaying the button - - /* button for taking screenshot. */ - Button(action: { - self.controller.screenShotMethod() - }) { - Image("Button") - .renderingMode(.original) - .frame(width: geometry.size.width / 3, height: geometry.size.width / 9) - }.opacity(0) // not displaying the button - - /* button for flipping the camera. */ - Button(action: { - self.controller.flipCamera() - }) { - Image("Swap") - .renderingMode(.original) - .frame(width: geometry.size.width / 3, height: geometry.size.height / 16) - }.opacity(0) // not displaying the button - } - .frame(width: geometry.size.width, - height: geometry.size.height / 30, alignment: .bottom) - .opacity(self.image == nil ? 1: 0) // hide the buttons when displaying an image from the photo library - } - - ImagePicker(image: self.$image, isShown: self.$showImagePicker, controller: self.controller, sourceType: .photoLibrary) - .edgesIgnoringSafeArea(.all) - .offset(x: 0, y: self.showImagePicker ? 0: UIApplication.shared.keyWindow?.frame.height ?? 0) - }.statusBar(hidden: true) - } - -} - - -/* Gadget to build colors from Hashtag Color Code Hex. */ -extension UIColor { - convenience init(red: Int, green: Int, blue: Int) { - assert(red >= 0 && red <= 255, "Invalid red component") - assert(green >= 0 && green <= 255, "Invalid green component") - assert(blue >= 0 && blue <= 255, "Invalid blue component") - - self.init(red: CGFloat(red) / 255.0, green: CGFloat(green) / 255.0, blue: CGFloat(blue) / 255.0, alpha: 1.0) - } - - convenience init(rgb: Int) { - self.init( - red: (rgb >> 16) & 0xFF, - green: (rgb >> 8) & 0xFF, - blue: rgb & 0xFF - ) - } - -} - -struct ContentView_Previews: PreviewProvider { - static var previews: some View { - ContentView() - } -} diff --git a/Lobe_iOS/ImagePicker.swift b/Lobe_iOS/ImagePicker.swift deleted file mode 100644 index 4c362a4..0000000 --- a/Lobe_iOS/ImagePicker.swift +++ /dev/null @@ -1,54 +0,0 @@ -import Foundation -import SwiftUI - -class ImagePickerCoordinator: NSObject, UINavigationControllerDelegate, UIImagePickerControllerDelegate { - - @Binding var image: UIImage? - @Binding var isShown: Bool - var controller: MyViewController - - init(image: Binding, isShown: Binding, controller: MyViewController) { - _image = image - _isShown = isShown - self.controller = controller - } - func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [UIImagePickerController.InfoKey : Any]) { - if let uiImage = info[UIImagePickerController.InfoKey.originalImage] as? UIImage { - image = uiImage - isShown = false - useCamera = false - controller.changeStatus(useCam: false, img: uiImage) - } - } - func imagePickerControllerDidCancel(_ picker: UIImagePickerController) { - isShown = false - self.controller.changeStatus(useCam: true, img: self.controller.camImage!) - } - -} - -/* Image picker. */ -struct ImagePicker: UIViewControllerRepresentable { - - typealias UIViewControllerType = UIImagePickerController - typealias Coordinator = ImagePickerCoordinator - - @Binding var image: UIImage? - @Binding var isShown: Bool - var controller: MyViewController - var sourceType: UIImagePickerController.SourceType = .camera - - func updateUIViewController(_ uiViewController: UIImagePickerController, context: UIViewControllerRepresentableContext) { - } - func makeCoordinator() -> ImagePicker.Coordinator { - return ImagePickerCoordinator(image: $image, isShown: $isShown, controller: controller) - } - func makeUIViewController(context: UIViewControllerRepresentableContext) -> UIImagePickerController { - let picker = UIImagePickerController() - picker.sourceType = sourceType - picker.delegate = context.coordinator - picker.modalPresentationStyle = .fullScreen - return picker - } - -} diff --git a/Lobe_iOS/Info.plist b/Lobe_iOS/Info.plist index 8d55f5b..1fbb014 100644 --- a/Lobe_iOS/Info.plist +++ b/Lobe_iOS/Info.plist @@ -2,14 +2,6 @@ - UIAppFonts - - labgrotesque-bold.ttf - - NSPhotoLibraryAddUsageDescription - To save screenshot - NSCameraUsageDescription - This app needs to access the camera CFBundleDevelopmentRegion $(DEVELOPMENT_LANGUAGE) CFBundleExecutable @@ -28,6 +20,19 @@ 1 LSRequiresIPhoneOS + NSAppTransportSecurity + + NSAllowsArbitraryLoads + + + NSCameraUsageDescription + This app needs to access the camera + NSPhotoLibraryAddUsageDescription + To save screenshot + UIAppFonts + + labgrotesque-bold.ttf + UIApplicationSceneManifest UIApplicationSupportsMultipleScenes diff --git a/Lobe_iOS/Lobe.entitlements b/Lobe_iOS/Lobe.entitlements new file mode 100644 index 0000000..1d1012c --- /dev/null +++ b/Lobe_iOS/Lobe.entitlements @@ -0,0 +1,16 @@ + + + + + aps-environment + development + com.apple.developer.icloud-container-identifiers + + com.apple.developer.icloud-services + + CloudDocuments + + com.apple.developer.ubiquity-container-identifiers + + + diff --git a/Lobe_iOS/Models/CaptureSessionManager.swift b/Lobe_iOS/Models/CaptureSessionManager.swift new file mode 100644 index 0000000..b8d843f --- /dev/null +++ b/Lobe_iOS/Models/CaptureSessionManager.swift @@ -0,0 +1,231 @@ +// +// CaptureSessionViewModel.swift +// Lobe_iOS +// +// Created by Elliot Boschwitz on 12/27/20. +// Copyright © 2020 Microsoft. All rights reserved. +// + +import AVKit +import Combine +import SwiftUI +import VideoToolbox + +/// View model for camera view. +class CaptureSessionManager: NSObject { + @Published var previewLayer: AVCaptureVideoPreviewLayer? + @Published var capturedImageOutput: UIImage? + var captureSession: AVCaptureSession? + private var backCam = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: AVMediaType.video, position: .back).devices.first + private var frontCam = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: AVMediaType.video, position: .front).devices.first + private var dataOutput: AVCaptureVideoDataOutput? + private var captureDevice: AVCaptureDevice? + private var disposables = Set() + private var totalFrameCount = 0 + + override init() { + self.captureDevice = self.backCam + } + + /// Resets camera feed, which: + /// 1. Creates capture session for specified device. + /// 2. Creates preview layer. + /// 3. Creates new video data output. + /// 4. Starts capture session. + func resetCameraFeed() { + guard let captureDevice = self.captureDevice else { + print("No capture device found on reset camera feed.") + return + } + /// Tear down existing capture session to remove output for buffer delegate. + self.captureSession = nil + self.dataOutput = nil + + /// Create new capture session and preview layer. + let captureSession = self.createCaptureSession(for: captureDevice) + let previewLayer = self.createPreviewLayer(for: captureSession) + let dataOutput = AVCaptureVideoDataOutput() + + /// Set delegate of video output buffer to self. + dataOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "videoQueue")) + captureSession.startRunning() + captureSession.addOutput(dataOutput) + + self.captureSession = captureSession + self.previewLayer = previewLayer + self.dataOutput = dataOutput + } + + /// On disable, stop running capture session and then tear down. + /// Both steps are required to prroperly shut down camera session. + func tearDown() { + self.captureSession?.stopRunning() + self.captureSession = nil + } + + /// Creates a capture session given input device as param. + private func createCaptureSession(for captureDevice: AVCaptureDevice) -> AVCaptureSession { + let captureSession = AVCaptureSession() + + do { + let input = try AVCaptureDeviceInput(device: captureDevice) + captureSession.addInput(input) + } catch { + print("Could not create AVCaptureDeviceInput in viewDidLoad.") + } + + return captureSession + } + + /// Sets up preview layer which gets displayed in view controller. + func createPreviewLayer(for captureSession: AVCaptureSession) -> AVCaptureVideoPreviewLayer { + let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) + previewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill + return previewLayer + } + + /// Toggles between front and back cam. + func rotateCamera() { + self.captureDevice = (captureDevice == backCam) ? frontCam : backCam + self.resetCameraFeed() + } + + /// Wrapper for screen shot. + func takeScreenShot(in view: UIView) { + guard let camImage = self.capturedImageOutput else { + fatalError("Could not call takeScreenShot") + } + + /// Create a `UIImageView` for overlaying the shutter animation over the camera view. + /// Remove it from the super view after image is saved to storage. + let imageView = UIImageView(image: camImage) + screenShotAnimate(in: view, imageView: imageView) + UIImageWriteToSavedPhotosAlbum(camImage, nil, nil, nil) + imageView.removeFromSuperview() + } + + /// Provides flash animation when screenshot is triggered. + private func screenShotAnimate(in view: UIView, imageView: UIImageView) { + imageView.contentMode = .scaleAspectFit + imageView.frame = view.frame + + let black = UIImage(named: "Black") + let blackView = UIImageView(image: black) + imageView.contentMode = .scaleAspectFill + blackView.frame = view.frame + view.addSubview(blackView) + blackView.alpha = 1 + + /// Shutter animation. + UIView.animate(withDuration: 0.3, delay: 0, options: UIView.AnimationOptions.curveLinear, animations: { + blackView.alpha = 0 + }, completion: nil) + } +} + +extension CaptureSessionManager: AVCaptureVideoDataOutputSampleBufferDelegate { + /// Delegate method for `AVCaptureVideoDataOutputSampleBufferDelegate`: formats image for inference. + /// The delegate is set in the capture session view model. + func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { + /// Skip frames to optimize. + totalFrameCount += 1 + if totalFrameCount % 20 != 0{ return } + + guard let pixelBuffer: CVPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer), + let image = UIImage(pixelBuffer: pixelBuffer), + let previewLayer = self.previewLayer, + let videoOrientation = previewLayer.connection?.videoOrientation else { + print("Failed creating image at captureOutput.") + return + } + + /// Determine rotation by radians given device orientation and camera device + var radiansToRotate = CGFloat(0) + switch videoOrientation { + case .portrait: + radiansToRotate = .pi / 2 + break + case .portraitUpsideDown: + radiansToRotate = (3 * .pi) / 2 + break + case .landscapeLeft: + if (self.captureDevice == self.backCam) { + radiansToRotate = .pi + } + break + case .landscapeRight: + if (self.captureDevice == self.frontCam) { + radiansToRotate = .pi + } + break + default: + break + } + + /// Rotate image and flip over x-axis if using front-facing cam. + let isUsingFrontCam = self.captureDevice == self.frontCam + guard let rotatedImage = image.rotate(radians: radiansToRotate, flipX: isUsingFrontCam) else { + fatalError("Could not rotate or crop image.") + } + + self.capturedImageOutput = rotatedImage + } +} + +/// Helpers for editing images. +extension UIImage { + var isPortrait: Bool { size.height > size.width } + var isLandscape: Bool { size.width > size.height } + var breadth: CGFloat { min(size.width, size.height) } + var breadthSize: CGSize { .init(width: breadth, height: breadth) } + + func squared(isOpaque: Bool = false) -> UIImage? { + guard let cgImage = cgImage? + .cropping(to: .init(origin: .init(x: isLandscape ? ((size.width-size.height)/2).rounded(.down) : 0, + y: isPortrait ? ((size.height-size.width)/2).rounded(.down) : 0), + size: breadthSize)) else { return nil } + let format = imageRendererFormat + format.opaque = isOpaque + return UIGraphicsImageRenderer(size: breadthSize, format: format).image { _ in + UIImage(cgImage: cgImage, scale: 1, orientation: imageOrientation) + .draw(in: .init(origin: .zero, size: breadthSize)) + } + } + public convenience init?(pixelBuffer: CVPixelBuffer) { + var cgImage: CGImage? + VTCreateCGImageFromCVPixelBuffer(pixelBuffer, options: nil, imageOut: &cgImage) + + guard let myImage = cgImage else { + return nil + } + + self.init(cgImage: myImage) + } + + func rotate(radians: CGFloat, flipX: Bool = false) -> UIImage? { + var newSize = CGRect(origin: CGPoint.zero, size: self.size).applying(CGAffineTransform(rotationAngle: CGFloat(radians))).size + // Trim off the extremely small float value to prevent core graphics from rounding it up + newSize.width = floor(newSize.width) + newSize.height = floor(newSize.height) + + UIGraphicsBeginImageContextWithOptions(newSize, false, self.scale) + let context = UIGraphicsGetCurrentContext()! + + // Move origin to middle + context.translateBy(x: newSize.width/2, y: newSize.height/2) + + // Flip x-axis if specified (used to correct front-facing cam + if flipX { context.scaleBy(x: -1, y: 1) } + + // Rotate around middle + context.rotate(by: CGFloat(radians)) + + // Draw the image at its center + self.draw(in: CGRect(x: -self.size.width/2, y: -self.size.height/2, width: self.size.width, height: self.size.height)) + + let newImage = UIGraphicsGetImageFromCurrentImageContext() + UIGraphicsEndImageContext() + + return newImage + } +} diff --git a/Lobe_iOS/Models/PredictionLayer.swift b/Lobe_iOS/Models/PredictionLayer.swift new file mode 100644 index 0000000..a8afe1d --- /dev/null +++ b/Lobe_iOS/Models/PredictionLayer.swift @@ -0,0 +1,79 @@ +// +// PredictionLayer.swift +// Lobe_iOS +// +// Created by Elliot Boschwitz on 11/30/20. +// Copyright © 2020 Microsoft. All rights reserved. +// + +import Combine +import SwiftUI +import Vision + +/// Backend logic for predicting classifiers for a given image. +class PredictionLayer: NSObject { + @Published var classificationResult: VNClassificationObservation? + var model: VNCoreMLModel? + + /// Used for debugging image output + @Published var imageForPrediction: UIImage? + + init(model: VNCoreMLModel?) { + self.model = model + } + + /// Prediction handler which updates `classificationResult` publisher. + func getPrediction(forImage image: UIImage) { + let requestHandler = createPredictionRequestHandler(forImage: image) + + /// Add image to publisher if enviornment variable is enabled. + /// Used for debugging purposes. + if Bool(ProcessInfo.processInfo.environment["SHOW_FORMATTED_IMAGE"] ?? "false") ?? false { + self.imageForPrediction = image + } + + /// Create request handler. + let request = createModelRequest( + /// Set classification result to publisher + onComplete: { [weak self] request in + guard let classifications = request.results as? [VNClassificationObservation], + !classifications.isEmpty else { + self?.classificationResult = nil + return + } + let topClassifications = classifications.prefix(1) + self?.classificationResult = topClassifications[0] + }, onError: { [weak self] error in + print("Error getting predictions: \(error)") + self?.classificationResult = nil + }) + + try? requestHandler.perform([request]) + } + + /// Creates request handler and formats image for prediciton processing. + private func createPredictionRequestHandler(forImage image: UIImage) -> VNImageRequestHandler { + /* Crop to square images and send to the model. */ + guard let cgImage = image.cgImage else { + fatalError("Could not create cgImage in captureOutput") + } + + let ciImage = CIImage(cgImage: cgImage) + let requestHandler = VNImageRequestHandler(ciImage: ciImage) + return requestHandler + } + + private func createModelRequest(onComplete: @escaping (VNRequest) -> (), onError: @escaping (Error) -> ()) -> VNCoreMLRequest { + guard let model = model else { + fatalError("Model not found in prediction layer") + } + + let request = VNCoreMLRequest(model: model, completionHandler: { (request, error) in + if let error = error { + onError(error) + } + onComplete(request) + }) + return request + } +} diff --git a/Lobe_iOS/Models/Project.swift b/Lobe_iOS/Models/Project.swift new file mode 100644 index 0000000..30dcfe6 --- /dev/null +++ b/Lobe_iOS/Models/Project.swift @@ -0,0 +1,22 @@ +// +// Project.swift +// Lobe_iOS +// +// Created by Elliot Boschwitz on 10/11/20. +// Copyright © 2020 Microsoft. All rights reserved. +// + +import Foundation +import Vision + +/// Project class. +struct Project { + var model: VNCoreMLModel? + + /// Initialize Project instance with MLModel. + init(mlModel: MLModel?) { + if let mlModel = mlModel { + self.model = try? VNCoreMLModel(for: mlModel) + } + } +} diff --git a/Lobe_iOS/MyViewController.swift b/Lobe_iOS/MyViewController.swift deleted file mode 100644 index e4de7ec..0000000 --- a/Lobe_iOS/MyViewController.swift +++ /dev/null @@ -1,246 +0,0 @@ -import Foundation -import SwiftUI -import AVKit -import Vision - -struct MyRepresentable: UIViewControllerRepresentable{ - - @State var controller: MyViewController - func makeUIViewController(context: Context) -> MyViewController { - return self.controller - } - func updateUIViewController(_ uiViewController: MyViewController, context: Context) { - - } -} - -/* Camera session; ML request handling. */ -class MyViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate, ObservableObject { - - @Published var classificationLabel: String? - var backCam: AVCaptureDevice! - var frontCam: AVCaptureDevice! - var captureDevice: AVCaptureDevice! - var captureSession = AVCaptureSession() - var previewLayer: AVCaptureVideoPreviewLayer? - var useCam: Bool = true - var img: UIImage? - var confidence: Float? - var camImage: UIImage? - var totalFrameCount = 0 - - var tripleTapGesture = UITapGestureRecognizer() - var doubleTapGesture = UITapGestureRecognizer() - - @objc func handleDoubleTap(_ sender: UITapGestureRecognizer? = nil) { - flipCamera() - } - @objc func handleTripleTap(_ sender: UITapGestureRecognizer? = nil) { - screenShotMethod() - } - @objc func screenShotMethod() { - let imageView = UIImageView(image: self.camImage!) - imageView.contentMode = .scaleAspectFit - imageView.frame = view.frame - - let black = UIImage(named: "Black") - let blackView = UIImageView(image: black) - imageView.contentMode = .scaleAspectFill - blackView.frame = view.frame - view.addSubview(blackView) - blackView.alpha = 1 - - /* Shutter animation. */ - UIView.animate(withDuration: 0.3, delay: 0, options: UIView.AnimationOptions.curveLinear, animations: { - blackView.alpha = 0 - }, completion: nil) - - if useCamera{ - UIView.transition(with: view, duration: 1, options: .curveEaseIn, animations: nil) - view.addSubview(imageView) - self.changeStatus(useCam: false, img: camImage!) - } - let layer = UIApplication.shared.keyWindow!.layer - let scale = UIScreen.main.scale - UIGraphicsBeginImageContextWithOptions(layer.frame.size, false, scale); - layer.render(in: UIGraphicsGetCurrentContext()!) - let screenshot = UIGraphicsGetImageFromCurrentImageContext() - UIGraphicsEndImageContext() - UIImageWriteToSavedPhotosAlbum(screenshot!, nil, nil, nil) - if useCamera { - imageView.removeFromSuperview() - self.changeStatus(useCam: true, img: self.camImage!) - } - } - @objc func flipCamera() { - UIView.transition(with: view, duration: 0.5, options: .transitionFlipFromLeft, animations: nil) - if captureDevice == backCam{ - captureDevice = frontCam} - else { - captureDevice = backCam} - captureSession = AVCaptureSession() - guard let input = try? AVCaptureDeviceInput(device: self.captureDevice) else {return} - captureSession.addInput(input) - captureSession.startRunning() - setPreviewLayer() - setOutput() - } - override func viewDidLoad() { - super.viewDidLoad() - - doubleTapGesture = UITapGestureRecognizer(target: self, action:#selector(self.handleDoubleTap(_:))) - doubleTapGesture.numberOfTapsRequired = 2 - view.addGestureRecognizer(doubleTapGesture) - - tripleTapGesture = UITapGestureRecognizer(target: self, action:#selector(self.handleTripleTap(_:))) - tripleTapGesture.numberOfTapsRequired = 3 - view.addGestureRecognizer(tripleTapGesture) - doubleTapGesture.require(toFail: tripleTapGesture) - - backCam = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: AVMediaType.video, position: .back).devices.first - frontCam = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: AVMediaType.video, position: .front).devices.first - captureDevice = backCam - let input: AVCaptureInput! - if self.captureDevice != nil { - input = try! AVCaptureDeviceInput(device: self.captureDevice) - } else { - return - } - captureSession.addInput(input) - captureSession.startRunning() - setPreviewLayer() - setOutput() - } - func setPreviewLayer() { - previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) - previewLayer!.videoGravity = AVLayerVideoGravity.resizeAspectFill - view.layer.addSublayer(previewLayer!) - previewLayer!.frame = view.frame - } - func setOutput() { - let dataOutput = AVCaptureVideoDataOutput() - dataOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "videoQueue")) - captureSession.addOutput(dataOutput) - } - func getDevice(position: AVCaptureDevice.Position) -> AVCaptureDevice? { - let cam = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: AVMediaType.video, position: position).devices - return cam.first - } - func changeStatus(useCam: Bool, img: UIImage){ - if useCam { - self.useCam = true - self.img = nil - } else { - self.useCam = false - self.img = img - } - } - func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { - - /* Skip frames to optimize. */ - totalFrameCount += 1 - if totalFrameCount % 20 != 0{ return } - - guard let pixelBuffer: CVPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return } - - let curImg = UIImage(pixelBuffer: pixelBuffer) - let rotatedImage = curImg!.rotate(radians: .pi / 2) - /* Crop the captured image to be the size of the screen. */ - self.camImage = rotatedImage.crop(height: (previewLayer?.frame.height)!, width: (previewLayer?.frame.width)!) - - guard let model = try? VNCoreMLModel(for: LobeModel().model) else { return } - let request = VNCoreMLRequest(model: model) { (finishReq, err) in - self.processClassifications(for: finishReq, error: err) - } - - /* Crop to square images and send to the model. */ - if self.useCam { - try? VNImageRequestHandler(ciImage: CIImage(cgImage: (self.camImage?.squared()?.cgImage!)!)).perform([request]) - } else { - try? VNImageRequestHandler(ciImage: CIImage(cgImage: (self.img?.squared()?.cgImage!)!)).perform([request]) - } - } - func processClassifications(for request: VNRequest, error: Error?) { - DispatchQueue.main.async { - guard let results = request.results else { - self.classificationLabel = "Unable to classify image.\n\(error!.localizedDescription)" - return - } - let classifications = results as! [VNClassificationObservation] - - if classifications.isEmpty { - self.classificationLabel = "Nothing recognized." - } else { - /* Display top classifications ranked by confidence in the UI. */ - let topClassifications = classifications.prefix(1) - self.classificationLabel = topClassifications[0].identifier - self.confidence = topClassifications[0].confidence - } - } - } -} - -/* Helpers for editing images. */ -import VideoToolbox -extension UIImage { - var isPortrait: Bool { size.height > size.width } - var isLandscape: Bool { size.width > size.height } - var breadth: CGFloat { min(size.width, size.height) } - var breadthSize: CGSize { .init(width: breadth, height: breadth) } - - func squared(isOpaque: Bool = false) -> UIImage? { - guard let cgImage = cgImage? - .cropping(to: .init(origin: .init(x: isLandscape ? ((size.width-size.height)/2).rounded(.down) : 0, - y: isPortrait ? ((size.height-size.width)/2).rounded(.down) : 0), - size: breadthSize)) else { return nil } - let format = imageRendererFormat - format.opaque = isOpaque - return UIGraphicsImageRenderer(size: breadthSize, format: format).image { _ in - UIImage(cgImage: cgImage, scale: 1, orientation: imageOrientation) - .draw(in: .init(origin: .zero, size: breadthSize)) - } - } - func crop(isOpaque: Bool = false, height: CGFloat, width: CGFloat) -> UIImage? { - let newWidth = size.width - let newHeight = height / width * size.width - var screenSize: CGSize { .init(width: newWidth, height: newHeight)} - guard let cgImage = cgImage? - .cropping(to: .init(origin: .init(x: 0, - y: ((size.height - newHeight) / 2)), - size: screenSize)) else { return nil } - let format = imageRendererFormat - format.opaque = isOpaque - return UIGraphicsImageRenderer(size: screenSize, format: format).image { _ in - UIImage(cgImage: cgImage, scale: 1, orientation: imageOrientation) - .draw(in: .init(origin: .zero, size: screenSize)) - } - } - public convenience init?(pixelBuffer: CVPixelBuffer) { - var cgImage: CGImage? - VTCreateCGImageFromCVPixelBuffer(pixelBuffer, options: nil, imageOut: &cgImage) - - guard let myImage = cgImage else { - return nil - } - - self.init(cgImage: myImage) - } - func rotate(radians: CGFloat) -> UIImage { - let rotatedSize = CGRect(origin: .zero, size: size) - .applying(CGAffineTransform(rotationAngle: CGFloat(radians))) - .integral.size - UIGraphicsBeginImageContext(rotatedSize) - if let context = UIGraphicsGetCurrentContext() { - let origin = CGPoint(x: rotatedSize.width / 2.0, - y: rotatedSize.height / 2.0) - context.translateBy(x: origin.x, y: origin.y) - context.rotate(by: radians) - draw(in: CGRect(x: -origin.y, y: -origin.x, - width: size.width, height: size.height)) - let rotatedImage = UIGraphicsGetImageFromCurrentImageContext() - UIGraphicsEndImageContext() - return rotatedImage ?? self - } - return self - } -} diff --git a/Lobe_iOS/PlayViewModel.swift b/Lobe_iOS/PlayViewModel.swift new file mode 100644 index 0000000..d9cd7d4 --- /dev/null +++ b/Lobe_iOS/PlayViewModel.swift @@ -0,0 +1,75 @@ +// +// PlayViewModel.swift +// Lobe_iOS +// +// Created by Elliot Boschwitz on 12/1/20. +// Copyright © 2020 Microsoft. All rights reserved. +// + +import Combine +import SwiftUI + +enum PlayViewMode { + case Camera + case ImagePreview + case NotLoaded +} + +/// View model for the Play View +class PlayViewModel: ObservableObject { + @Published var classificationLabel: String? + @Published var confidence: Float? + @Published var viewMode: PlayViewMode = PlayViewMode.NotLoaded + @Published var showImagePicker: Bool = false + @Published var imageFromPhotoPicker: UIImage? + var captureSessionManager: CaptureSessionManager + let project: Project + var imagePredicter: PredictionLayer + private var disposables = Set() + + init(project: Project) { + self.project = project + self.imagePredicter = PredictionLayer(model: project.model) + self.captureSessionManager = CaptureSessionManager() + + /// Subscribes to two publishers: + /// 1. `capturedImageOutput` published from `Camera` mode. + /// 2. `imageFromPhotoPicker` published from `ImagePreview` mode. + /// If either of the above publishers emit, we send it's output to the prediction layer for classification results. + self.self.$imageFromPhotoPicker + .merge(with: captureSessionManager.$capturedImageOutput) + .compactMap { $0 } // remove non-nill values + .receive(on: DispatchQueue.global(qos: .userInitiated)) + .sink(receiveValue: { [weak self] image in + guard let squaredImage = image.squared() else { + print("Could not create squared image in PlayViewModel.") + return + } + self?.imagePredicter.getPrediction(forImage: squaredImage) + }) + .store(in: &disposables) + + /// Subscribe to classifier results from prediction layer + self.imagePredicter.$classificationResult + .receive(on: DispatchQueue.main) + .sink(receiveValue: {[weak self] classificationResult in + guard let _classificationResult = classificationResult else { + self?.classificationLabel = "Loading Results..." + return + } + self?.classificationLabel = _classificationResult.identifier + self?.confidence = _classificationResult.confidence + + }) + .store(in: &disposables) + + /// Update camera session if toggled between view mode. + self.$viewMode + .receive(on: DispatchQueue.main) + .sink(receiveValue: { [weak self] _viewMode in + if _viewMode == .Camera { self?.captureSessionManager.resetCameraFeed() } + else { self?.captureSessionManager.tearDown() } + }) + .store(in: &disposables) + } +} diff --git a/Lobe_iOS/README.md b/Lobe_iOS/README.md new file mode 100644 index 0000000..c7acc33 --- /dev/null +++ b/Lobe_iOS/README.md @@ -0,0 +1,38 @@ +# Files in iOS Bootstrap + +This project adheres to [MVVM architecture](https://www.raywenderlich.com/34-design-patterns-by-tutorials-mvvm). MVVM is a design pattern which organizes objects by model, view model, and view. Our app leverages MVVM as follows: + +![Code Diagram](https://github.com/lobe/iOS-bootstrap/raw/master/assets/codeDiagram.png) + +*Arrows designate subscriptions in their pointed direction, i.e `PlayViewModel` subscribes to `PredictionLayer`.* + +## View +View files define the look-and-feel of the app. This starter project defines a `PlayView` superview which imports all other view objects. Please consult the README in the [`/Views`](https://github.com/lobe/iOS-bootstrap/tree/master/Lobe_iOS/Views) folder for more information. + +## View Model +[`PlayViewModel`](https://github.com/lobe/iOS-bootstrap/tree/master/Lobe_iOS/PlayViewModel.swift) is the view model which publishes data to the `PlayView` view. View models act as important intermediaries between the view and model by de-coupling business logic from views and by subscribing to changes from the model. + +`PlayViewModel` publishes changes to the view by subscribing to the following events: +1. New images (either from video capture or image preview) will trigger a prediction request. +2. Responses to prediction requests update the UI to display the prediction result. +3. Switching to `ImagePreview` mode (which uses prediction on a chosen image from the device's library) will tear-down the video capture session manager. + +## Model +The model layer handles all data processing in the app, publishing results to any subscribers. In this case, `PlayViewModel` is the only subscriber to model objects, which are: +- [`CaptureSessionManager`](https://github.com/lobe/iOS-bootstrap/tree/master/Lobe_iOS/Models/CaptureSessionManager.swift): publishes select frames from the video capture feed. +- [`PredictionLayer`](https://github.com/lobe/iOS-bootstrap/tree/master/Lobe_iOS/Models/PredictionLayer.swift): uses the imported Core ML model to publish the results from prediction requests. +- [`Project`](https://github.com/lobe/iOS-bootstrap/tree/master/Lobe_iOS/Models/Project.swift): a struct for managing Core ML models. + +## Other Files +[`CaptureSessionViewController`](https://github.com/lobe/iOS-bootstrap/tree/master/Lobe_iOS/CaptureSessionViewController.swift) is an exception to the MVVM rule. Although we leverage the SwiftUI library whenever possible, we still need the older UIKit library for select purposes relating to video capture handling. + +Thankfully, [we can integrate UIKit easily into SwiftUI](https://developer.apple.com/tutorials/swiftui/interfacing-with-uikit) with `UIViewControllerRepresentable`, a struct that manages view controllers directly in a SwiftUI view. In our app, [`CameraView`](https://github.com/lobe/iOS-bootstrap/tree/master/Lobe_iOS/Views/CameraView.swift) is a `UIViewControllerRepresentable` which creates `CaptureSessionViewController`. This view controller is responsible for: +1. Setting the view frame to the video feed. +2. Handling device orientation changes, ensuring the video feed is correctly oriented. +3. Managing tap gestures. UIKit handles conflicts between multiple tap gesture handlers better than SwiftUI, at the time of this writing. [Click here](https://github.com/lobe/iOS-bootstrap#in-app-gestures) to read more about tap-gestures in iOS-bootstrap. + +## Useful Links + +For further reading and guidance for Swift best practices: +- [Ray Wenderlich](https://www.raywenderlich.com/4161005-mvvm-with-combine-tutorial-for-ios) has a great tutorial showcasing MVVM with the [Combine](https://developer.apple.com/documentation/combine) library, which is used to define publishers and subscribers between MVVM layers. We use Combine a lot in the view model and model layers. +- [Design+Code](https://designcode.io/swi\ftui2-course) has in-depth material for creating a Swift app in iOS 14. diff --git a/Lobe_iOS/SceneDelegate.swift b/Lobe_iOS/SceneDelegate.swift index dcee5d0..a33af08 100644 --- a/Lobe_iOS/SceneDelegate.swift +++ b/Lobe_iOS/SceneDelegate.swift @@ -1,3 +1,11 @@ +// +// SceneDelegate.swift +// Lobe_iOS +// +// Created by Adam Menges on 5/20/20. +// Copyright © 2020 Microsoft. All rights reserved. +// + import UIKit import SwiftUI @@ -12,13 +20,18 @@ class SceneDelegate: UIResponder, UIWindowSceneDelegate { // This delegate does not imply the connecting scene or session are new (see `application:configurationForConnectingSceneSession` instead). // Create the SwiftUI view that provides the window contents. - let contentView = ContentView() + let modelUrl = LobeModel.urlOfModelInThisBundle + let model = try? LobeModel(contentsOf: modelUrl).model + let project = Project(mlModel: model) + let viewModel = PlayViewModel(project: project) + let view = PlayView(viewModel: viewModel) // Use a UIHostingController as window root view controller. if let windowScene = scene as? UIWindowScene { let window = UIWindow(windowScene: windowScene) - window.rootViewController = UIHostingController(rootView: contentView) + window.rootViewController = UIHostingController(rootView: view) self.window = window + self.window?.tintColor = UIColor(rgb: 0x00DDAD) window.makeKeyAndVisible() } } diff --git a/Lobe_iOS/Views/CameraView.swift b/Lobe_iOS/Views/CameraView.swift new file mode 100644 index 0000000..f933218 --- /dev/null +++ b/Lobe_iOS/Views/CameraView.swift @@ -0,0 +1,56 @@ +// +// CameraView.swift +// Lobe_iOS +// +// Created by Elliot Boschwitz on 11/29/20. +// Copyright © 2020 Microsoft. All rights reserved. +// + +import AVKit +import SwiftUI +import UIKit +import Vision + +struct CameraView: UIViewControllerRepresentable { + var captureSessionManager: CaptureSessionManager + + init(captureSessionManager: CaptureSessionManager) { + self.captureSessionManager = captureSessionManager + } + + func makeUIViewController(context: Context) -> CaptureSessionViewController { + let vc = CaptureSessionViewController() + vc.gestureDelegate = context.coordinator + return vc + } + + /// Update preview layer when state changes for camera device + func updateUIViewController(_ uiViewController: CaptureSessionViewController, context: Context) { + /// Set view with previewlayer + let previewLayer = self.captureSessionManager.previewLayer + uiViewController.previewLayer = previewLayer + uiViewController.configureVideoOrientation(for: previewLayer) + if previewLayer != nil { uiViewController.view.layer.addSublayer(previewLayer!) } + else { print("Preview layer null in updateUIViewController.") } + } + + func makeCoordinator() -> Coordinator { + Coordinator(self) + } + + class Coordinator: NSObject, CaptureSessionGestureDelegate { + var parent: CameraView + + init(_ parent: CameraView) { + self.parent = parent + } + + func viewRecognizedDoubleTap() { + parent.captureSessionManager.rotateCamera() + } + + func viewRecognizedTripleTap(_ view: UIView) { + parent.captureSessionManager.takeScreenShot(in: view) + } + } +} diff --git a/Lobe_iOS/Views/ImagePicker.swift b/Lobe_iOS/Views/ImagePicker.swift new file mode 100644 index 0000000..2029459 --- /dev/null +++ b/Lobe_iOS/Views/ImagePicker.swift @@ -0,0 +1,61 @@ +// +// ImagePicker.swift +// Lobe_iOS +// +// Created by Kathy Zhou on 5/27/20. +// Copyright © 2020 Microsoft. All rights reserved. +// + + +import Foundation +import SwiftUI + + +/* Image picker. */ +struct ImagePicker: UIViewControllerRepresentable { + // dismisses view when document is selected + @Environment(\.presentationMode) var presentationMode + + typealias UIViewControllerType = UIImagePickerController + typealias Coordinator = ImagePickerCoordinator + + @Binding var image: UIImage? + @Binding var viewMode: PlayViewMode + let predictionLayer: PredictionLayer + + var sourceType: UIImagePickerController.SourceType = .camera + + func updateUIViewController(_ uiViewController: UIImagePickerController, context: UIViewControllerRepresentableContext) { + } + + func makeCoordinator() -> ImagePicker.Coordinator { + ImagePickerCoordinator(self) + } + + func makeUIViewController(context: UIViewControllerRepresentableContext) -> UIImagePickerController { + let picker = UIImagePickerController() + picker.sourceType = sourceType + picker.delegate = context.coordinator + picker.modalPresentationStyle = .fullScreen + return picker + } + + class ImagePickerCoordinator: NSObject, UINavigationControllerDelegate, UIImagePickerControllerDelegate { + var parent: ImagePicker + + init(_ parent: ImagePicker) { + self.parent = parent + } + + func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [UIImagePickerController.InfoKey : Any]) { + defer { parent.presentationMode.wrappedValue.dismiss() } + + if let uiImage = info[UIImagePickerController.InfoKey.originalImage] as? UIImage { + self.parent.image = uiImage + self.parent.predictionLayer.getPrediction(forImage: uiImage) + self.parent.viewMode = .ImagePreview + } + } + } + +} diff --git a/Lobe_iOS/Views/ImagePreview.swift b/Lobe_iOS/Views/ImagePreview.swift new file mode 100644 index 0000000..90ebc5a --- /dev/null +++ b/Lobe_iOS/Views/ImagePreview.swift @@ -0,0 +1,46 @@ +// +// ImagePreview.swift +// Lobe_iOS +// +// Created by Elliot Boschwitz on 12/1/20. +// Copyright © 2020 Microsoft. All rights reserved. +// + +import SwiftUI + +/// Shows image preview from photo library on top of PlayView. +struct ImagePreview: View { + @Binding var image: UIImage? + @Binding var viewMode: PlayViewMode + @State private var offset = CGSize.zero + @State private var scaling: CGSize = .init(width: 1, height: 1) + + var body: some View { + if let image = self.image { + Image(uiImage: image) + .resizable() + .aspectRatio(image.size, contentMode: .fill) + .scaleEffect(1 / self.scaling.height) + .offset(self.offset) + + /* Gesture for swiping down to dismiss the image. */ + .gesture(DragGesture() + .onChanged ({value in + self.scaling = value.translation + self.scaling.height = max(self.scaling.height / 50, 1) + self.offset = value.translation + }) + .onEnded {_ in + self.offset = .zero + if self.scaling.height > 1.5 { + self.image = nil + self.viewMode = .Camera + } + self.scaling = .init(width: 1, height: 1) + } + ) + .opacity(1 / self.scaling.height < 1 ? 0.5: 1) + } + } +} + diff --git a/Lobe_iOS/Views/PlayView.swift b/Lobe_iOS/Views/PlayView.swift new file mode 100644 index 0000000..9bea368 --- /dev/null +++ b/Lobe_iOS/Views/PlayView.swift @@ -0,0 +1,190 @@ +// +// PlayView.swift +// Lobe_iOS +// +// Created by Adam Menges on 5/20/20. +// Copyright © 2020 Microsoft. All rights reserved. +// + +import AVKit +import SwiftUI + +struct PlayView: View { + @Environment(\.presentationMode) var presentationMode: Binding + @ObservedObject var viewModel: PlayViewModel + + init(viewModel: PlayViewModel) { + self.viewModel = viewModel + } + + var body: some View { + NavigationView { + GeometryReader { geometry in + VStack { + switch(self.viewModel.viewMode) { + // Background camera view. + case .Camera: + ZStack { + CameraView(captureSessionManager: self.viewModel.captureSessionManager) + // Gesture for swiping up the photo library. + .gesture( + DragGesture() + .onEnded {value in + if value.translation.height < 0 { + withAnimation{ + self.viewModel.showImagePicker.toggle() + } + } + } + ) + } + + // Placeholder for displaying an image from the photo library. + case .ImagePreview: + ImagePreview(image: self.$viewModel.imageFromPhotoPicker, viewMode: self.$viewModel.viewMode) + + // TO-DO: loading screen here + case .NotLoaded: + Text("View Loading...") + } + } + .frame(minWidth: 0, maxWidth: .infinity, minHeight: 0, maxHeight: .infinity) + .background(Color.black) + .edgesIgnoringSafeArea(.all) + + VStack { + /// Show processed image that gets used for prediction. + /// Used for debugging purposes + if Bool(ProcessInfo.processInfo.environment["SHOW_FORMATTED_IMAGE"] ?? "false") ?? false { + if let imageForProcessing = self.viewModel.imagePredicter.imageForPrediction { + Image(uiImage: imageForProcessing) + .resizable() + .scaledToFit() + .frame(width: 300, height: 300) + .border(Color.blue, width: 8) + } + } + Spacer() + PredictionLabelView(classificationLabel: self.$viewModel.classificationLabel, confidence: self.$viewModel.confidence) + } + } + .statusBar(hidden: true) + .navigationBarBackButtonHidden(true) + .navigationBarItems(trailing: + HStack { + /// Render `rotateCameraButton` for all modes--this is a workaround for bug where right padding is off for `ImagePreview` mode. + rotateCameraButton + .disabled(self.viewModel.viewMode != .Camera) + .opacity(self.viewModel.viewMode == .Camera ? 1 : 0) + /// Photo picker button if in camera mode, else we show button to toggle to camera mode + if (self.viewModel.viewMode == .Camera) { + openPhotoPickerButton + } else { + showCameraModeButton + } + } + .buttonStyle(PlayViewButtonStyle()) + ) + .sheet(isPresented: self.$viewModel.showImagePicker) { + ImagePicker(image: self.$viewModel.imageFromPhotoPicker, viewMode: self.$viewModel.viewMode, predictionLayer: self.viewModel.imagePredicter, sourceType: .photoLibrary) + .edgesIgnoringSafeArea(.all) + } + .onAppear { + self.viewModel.viewMode = .Camera + } + .onDisappear { + /// Disable capture session + self.viewModel.viewMode = .NotLoaded + } + } + .navigationViewStyle(StackNavigationViewStyle()) + } +} + +extension PlayView { + /// Button style for navigation row + struct PlayViewButtonStyle: ButtonStyle { + func makeBody(configuration: Configuration) -> some View { + configuration.label + .padding(10) + .foregroundColor(.white) + .background(Color.black.opacity(0.35).blur(radius: 20)) + .cornerRadius(8) + } + } + + /// Button for opening photo picker + var openPhotoPickerButton: some View { + Button(action: { + self.viewModel.showImagePicker.toggle() + }) { + Image(systemName: "photo.fill") + } + } + + /// Button for enabling camera mode + var showCameraModeButton: some View { + Button(action: { + self.viewModel.viewMode = .Camera + }) { + Image(systemName: "camera.viewfinder") + } + } + + /// Button for rotating camera + var rotateCameraButton: some View { + Button(action: { self.viewModel.captureSessionManager.rotateCamera() }) { + Image(systemName: "camera.rotate.fill") + } + } +} + +/// Gadget to build colors from Hashtag Color Code Hex. +extension UIColor { + convenience init(red: Int, green: Int, blue: Int) { + assert(red >= 0 && red <= 255, "Invalid red component") + assert(green >= 0 && green <= 255, "Invalid green component") + assert(blue >= 0 && blue <= 255, "Invalid blue component") + + self.init(red: CGFloat(red) / 255.0, green: CGFloat(green) / 255.0, blue: CGFloat(blue) / 255.0, alpha: 1.0) + } + + convenience init(rgb: Int) { + self.init( + red: (rgb >> 16) & 0xFF, + green: (rgb >> 8) & 0xFF, + blue: rgb & 0xFF + ) + } + +} + +struct PlayView_Previews: PreviewProvider { + struct TestImage: View { + var body: some View { + Image("testing_image") + .frame(minWidth: 0, maxWidth: .infinity, minHeight: 0, maxHeight: .infinity) + .edgesIgnoringSafeArea(.all) + } + } + + static var previews: some View { + let viewModel = PlayViewModel(project: Project(mlModel: nil)) + viewModel.viewMode = .Camera + + return Group { + NavigationView { + ZStack { + TestImage() + PlayView(viewModel: viewModel) + } + } + .previewDevice("iPhone 12") + ZStack { + TestImage() + PlayView(viewModel: viewModel) + } + .previewDevice("iPad Pro (11-inch) (2nd generation)") + } + } +} diff --git a/Lobe_iOS/UpdateTextViewExternal.swift b/Lobe_iOS/Views/PredictionLabelView.swift similarity index 72% rename from Lobe_iOS/UpdateTextViewExternal.swift rename to Lobe_iOS/Views/PredictionLabelView.swift index aefeb6f..96cadec 100644 --- a/Lobe_iOS/UpdateTextViewExternal.swift +++ b/Lobe_iOS/Views/PredictionLabelView.swift @@ -1,3 +1,11 @@ +// +// PredictionLabelView.swift +// Lobe_iOS +// +// Created by Kathy Zhou on 6/4/20. +// Copyright © 2020 Microsoft. All rights reserved. +// + import Foundation import SwiftUI @@ -8,10 +16,10 @@ struct VisualEffectView: UIViewRepresentable { } /* View for displaying the green bar containing the prediction label. */ -struct UpdateTextViewExternal: View { - @ObservedObject var viewModel: MyViewController - @State var showImagePicker: Bool = false - @State private var image: UIImage? +struct PredictionLabelView: View { + @State private var showImagePicker: Bool = false + @Binding var classificationLabel: String? + @Binding var confidence: Float? var body: some View { GeometryReader { geometry in @@ -25,17 +33,17 @@ struct UpdateTextViewExternal: View { Rectangle() .foregroundColor(Color(UIColor(rgb: 0x00DDAD))) - .frame(width: min(CGFloat(self.viewModel.confidence ?? 0) * geometry.size.width / 1.2, geometry.size.width / 1.2)) + .frame(width: min(CGFloat(self.confidence ?? 0) * geometry.size.width / 1.2, geometry.size.width / 1.2)) .animation(.linear) - - Text(self.viewModel.classificationLabel ?? "Loading...") - .padding() - .foregroundColor(.white) + + Text(self.classificationLabel ?? "Loading...") .font(.system(size: 28)) + .foregroundColor(.white) + .padding() } } .frame(width: geometry.size.width / 1.2, - height: 65, + height: 75, alignment: .center ) .cornerRadius(17.0) @@ -46,6 +54,7 @@ struct UpdateTextViewExternal: View { } } + struct UpdateTextViewExternal_Previews: PreviewProvider { static var previews: some View { GeometryReader { geometry in @@ -56,7 +65,7 @@ struct UpdateTextViewExternal_Previews: PreviewProvider { .edgesIgnoringSafeArea(.all) .frame(width: geometry.size.width, height: geometry.size.height) - UpdateTextViewExternal(viewModel: MyViewController()).zIndex(0) + PredictionLabelView(classificationLabel: .constant(nil), confidence: .constant(nil)) }.frame(width: geometry.size.width, height: geometry.size.height) } diff --git a/Lobe_iOS/Views/README.md b/Lobe_iOS/Views/README.md new file mode 100644 index 0000000..ffc593f --- /dev/null +++ b/Lobe_iOS/Views/README.md @@ -0,0 +1,8 @@ +# View Objects +The Lobe iOS-bootstrap app organizes view logic into the following objects: + +- [`PlayView`](https://github.com/lobe/iOS-bootstrap/tree/master/Lobe_iOS/Views/PlayView.swift) is the superview object, which handles rendering logic for both `Camera` and `ImagePreview` modes. `PlayView` also formats the location of overlayed buttons and labels in the frame. +- [`CameraView`](https://github.com/lobe/iOS-bootstrap/tree/master/Lobe_iOS/Views/CameraView.swift) is a `UIViewControllerRepresentable`, rather than a `View`. It manages a view controller which sets the video feed to the view frame, described in more detail [here](https://github.com/lobe/iOS-bootstrap/tree/master/Lobe_iOS#other-files). +- [`ImagePreview`](https://github.com/lobe/iOS-bootstrap/tree/master/Lobe_iOS/Views/ImagePreview.swift) displays a `UIImage` as selected from the `ImagePicker` photo picker. +- [`ImagePicker`](https://github.com/lobe/iOS-bootstrap/tree/master/Lobe_iOS/Views/ImagePicker.swift) is another `UIViewControllerRepresentable` which integrates `UIImagePickerControllerDelegate`, a UIKit delegate for handling selected images from an image picker. The selected image is used for `ImagePreview` mode. +- [`PredictionLabelView`](https://github.com/lobe/iOS-bootstrap/tree/master/Lobe_iOS/Views/PredictionLabelView.swift) defines the view for the UI label displaying prediction text and confidence percentage. \ No newline at end of file diff --git a/README.md b/README.md index 9f24786..846e8d3 100644 --- a/README.md +++ b/README.md @@ -4,9 +4,7 @@ iOS Bootstrap takes the machine learning model created in Lobe, and adds it to a project on iOS that uses CoreML and [SwiftUI](https://developer.apple.com/xcode/swiftui/). We help you along the way with everything you need to do to integrate it in your project. -
- -## Installing Your Development Environment +## Installing Development Environment You need to get you setup so you can build, launch, and play with your app. These instructions are written for macOS, the only system you can develop iOS apps on. @@ -42,9 +40,7 @@ Once it's done, double click on the `Lobe_iOS.xcodeproj` file in your project di Now we need to export your custom model from Lobe. If you'd like, you can skip to the [deploying your app](#deploying-your-app) section if you just want to see this app working with the default sample model. -
- -### Step 3 - Exporting your model +### Step 3 - Exporting Model After your machine learning is done training, and you are getting good results, you can export your model by going into the file menu and clicking export. Lobe supports a bunch of industry standard platforms. For this project, we'll select CoreML, the standard for Apple's platforms. @@ -52,9 +48,7 @@ Once you have the CoreML model, rename it to `LobeModel.mlmodel` and drag it int ![Illustration of Finder](https://github.com/lobe/iOS-bootstrap/raw/master/assets/modeldrag.png) -
- -## Step 4 - Deploying your app +### Step 4 - Deploying App Next, we'll want to get this app onto your phone so you can see it working live with your device's camera. To do this, plug in your device via a USB-Lightning cable and, in the open Xcode window, press the play button in the top left corner of the window: @@ -68,31 +62,22 @@ And there you have it! You're app should be running on your device. If Xcode pop And finally, if you'd like to post your app (running your custom image classification model) to the App Store, you're more than welcome to do so. [Follow the instructions here](https://developer.apple.com/app-store/submitting/) to get the process rolling. You'll need to have an Apple Developer account. -
- -## Tips and Tricks - -This app is meant as a starting place for your own project. Below is a high level overview of the project to get you started. Like any good bootstrap app, this project has been kept intentionally simple. There are only two main components in two files, `ContentView.swift` and `MyViewController.swift`. - -#### `ContentView.swift` - -This file contains all the main UI, built using SwiftUI. If you'd like to adjust the placement of any UI elements or add you own, start here. If you'd like a primer on SwiftUI, start with this: [Build a SwiftUI app for iOS 14](https://designcode.io/swiftui2-course) - -#### `MyViewController.swift` +## Miscellaneous Information -This file contains all parts that needed to be done using the old style UIKit. Mainly this is making the camera view. Luckily, this is all ported back to SwiftUI using Apple's `UIViewControllerRepresentable` API. This allows us to make the camera view, and then use it like any other SwiftUI view above. You'll also see the CoreML prediction call here. +### In-App Gestures -#### `UpdateTextViewExternal.swift` +The Lobe iOS bootstrap app supports the following gestures: +- **Swipe Up**: opens an image picker for the device's photo library. The selected image is previewed and used for prediction. +- **Double Tap**: toggles between front and back-facing cameras for the video feed. +- **Triple Tap**: saves a screenshot of the video feed, omitting overlayed UI components in the capture. -Includes the small amount of SwiftUI for the prediction bar at the bottom of the screen. +### Device Support -#### Miscellaneous Pointers +This app works for iPhones and iPads running iOS/iPadOS 13.4 or greater. -- This project contains a sample icon and other assets, feel free to use these or create your own. -- When you're using the app, swiping up on the screen pulls open the image picker. -- Double tapping flips the camera around to the front facing camera. Double tapping again flips the camera back to the front. +## Understanding the Code -
+Follow the README in the [`/Lobe_iOS`](https://github.com/lobe/iOS-bootstrap/tree/master/Lobe_iOS) folder. ## Contributing diff --git a/assets/codeDiagram.png b/assets/codeDiagram.png new file mode 100644 index 0000000..f0c2efd Binary files /dev/null and b/assets/codeDiagram.png differ