diff --git a/Example/YoonitFacefyDemo/Podfile.lock b/Example/YoonitFacefyDemo/Podfile.lock index a02628b..b325cc4 100644 --- a/Example/YoonitFacefyDemo/Podfile.lock +++ b/Example/YoonitFacefyDemo/Podfile.lock @@ -53,7 +53,7 @@ PODS: - PromisesObjC (1.2.12) - Protobuf (3.14.0) - YoonitCamera (2.3.0) - - YoonitFacefy (1.0.0): + - YoonitFacefy (1.0.2): - GoogleMLKit/FaceDetection DEPENDENCIES: @@ -94,7 +94,7 @@ SPEC CHECKSUMS: PromisesObjC: 3113f7f76903778cf4a0586bd1ab89329a0b7b97 Protobuf: 0cde852566359049847168e51bd1c690e0f70056 YoonitCamera: a25677c77bc14f22fc9efaffceff60b54cc32049 - YoonitFacefy: 73aff35fe27d97775f04a3a0f74504eb0947a0f5 + YoonitFacefy: b627d9a94fa968d90a8b9915895cd5986d1630fa PODFILE CHECKSUM: 45150530e3052be2fb55a1493c2f00eedc5c1d3b diff --git a/Example/YoonitFacefyDemo/Pods/Local Podspecs/YoonitFacefy.podspec.json b/Example/YoonitFacefyDemo/Pods/Local Podspecs/YoonitFacefy.podspec.json index d1d42cc..81ac524 100644 --- a/Example/YoonitFacefyDemo/Pods/Local Podspecs/YoonitFacefy.podspec.json +++ b/Example/YoonitFacefyDemo/Pods/Local Podspecs/YoonitFacefy.podspec.json @@ -1,6 +1,6 @@ { "name": "YoonitFacefy", - "version": "1.0.0", + "version": "1.0.2", "summary": "The face detection's module for iOS with a lot of awesome features", "description": "\"The face detection's module for iOS with a lot of awesome features\"", "homepage": "https://github.com/Yoonit-Labs/ios-yoonit-facefy", @@ -18,7 +18,7 @@ }, "source": { "git": "https://github.com/Yoonit-Labs/ios-yoonit-facefy.git", - "tag": "1.0.0" + "tag": "1.0.2" }, "source_files": [ "YoonitFacefy/src/**/*", diff --git a/Example/YoonitFacefyDemo/Pods/Manifest.lock b/Example/YoonitFacefyDemo/Pods/Manifest.lock index a02628b..b325cc4 100644 --- a/Example/YoonitFacefyDemo/Pods/Manifest.lock +++ b/Example/YoonitFacefyDemo/Pods/Manifest.lock @@ -53,7 +53,7 @@ PODS: - PromisesObjC (1.2.12) - Protobuf (3.14.0) - YoonitCamera (2.3.0) - - YoonitFacefy (1.0.0): + - YoonitFacefy (1.0.2): - GoogleMLKit/FaceDetection DEPENDENCIES: @@ -94,7 +94,7 @@ SPEC CHECKSUMS: PromisesObjC: 3113f7f76903778cf4a0586bd1ab89329a0b7b97 Protobuf: 0cde852566359049847168e51bd1c690e0f70056 YoonitCamera: a25677c77bc14f22fc9efaffceff60b54cc32049 - YoonitFacefy: 73aff35fe27d97775f04a3a0f74504eb0947a0f5 + YoonitFacefy: b627d9a94fa968d90a8b9915895cd5986d1630fa PODFILE CHECKSUM: 45150530e3052be2fb55a1493c2f00eedc5c1d3b diff --git a/Example/YoonitFacefyDemo/YoonitFacefyDemo.xcodeproj/project.pbxproj b/Example/YoonitFacefyDemo/YoonitFacefyDemo.xcodeproj/project.pbxproj index 2498ced..9861ee9 100644 --- a/Example/YoonitFacefyDemo/YoonitFacefyDemo.xcodeproj/project.pbxproj +++ b/Example/YoonitFacefyDemo/YoonitFacefyDemo.xcodeproj/project.pbxproj @@ -15,11 +15,11 @@ 6176EFCB252E496F00F4D4DD /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 6176EFCA252E496F00F4D4DD /* Assets.xcassets */; }; 6176EFCE252E496F00F4D4DD /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 6176EFCC252E496F00F4D4DD /* LaunchScreen.storyboard */; }; 6176EFDE252E4A9100F4D4DD /* FacefyViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 6176EFDB252E4A9100F4D4DD /* FacefyViewController.swift */; }; - 9471A16A133B5BC59273DC59 /* Pods_YoonitFacefyDemo.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = EC04C950B084B2E078258965 /* Pods_YoonitFacefyDemo.framework */; }; + E04B1D1FB428FE36ABCC7B91 /* Pods_YoonitFacefyDemo.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = C72063803094F50B0D23D173 /* Pods_YoonitFacefyDemo.framework */; }; /* End PBXBuildFile section */ /* Begin PBXFileReference section */ - 5C06E54725D1F09A00E6770F /* Podfile */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text; path = Podfile; sourceTree = ""; }; + 5C06E54725D1F09A00E6770F /* Podfile */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text; path = Podfile; sourceTree = ""; xcLanguageSpecificationIdentifier = xcode.lang.ruby; }; 5C06E5A925D1FBC500E6770F /* YoonitFacefy.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; path = YoonitFacefy.framework; sourceTree = BUILT_PRODUCTS_DIR; }; 5C6580CB25D5ACA3001171F8 /* GraphicView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = GraphicView.swift; sourceTree = ""; }; 6176EFBE252E496D00F4D4DD /* YoonitFacefyDemo.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = YoonitFacefyDemo.app; sourceTree = BUILT_PRODUCTS_DIR; }; @@ -30,9 +30,9 @@ 6176EFCD252E496F00F4D4DD /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; }; 6176EFCF252E496F00F4D4DD /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; 6176EFDB252E4A9100F4D4DD /* FacefyViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = FacefyViewController.swift; sourceTree = ""; }; - 9E5010989592DAF42C37B6FB /* Pods-YoonitFacefyDemo.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-YoonitFacefyDemo.release.xcconfig"; path = "Target Support Files/Pods-YoonitFacefyDemo/Pods-YoonitFacefyDemo.release.xcconfig"; sourceTree = ""; }; - C34D9F83E1CDDB618E5A3BE8 /* Pods-YoonitFacefyDemo.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-YoonitFacefyDemo.debug.xcconfig"; path = "Target Support Files/Pods-YoonitFacefyDemo/Pods-YoonitFacefyDemo.debug.xcconfig"; sourceTree = ""; }; - EC04C950B084B2E078258965 /* Pods_YoonitFacefyDemo.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_YoonitFacefyDemo.framework; sourceTree = BUILT_PRODUCTS_DIR; }; + C72063803094F50B0D23D173 /* Pods_YoonitFacefyDemo.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_YoonitFacefyDemo.framework; sourceTree = BUILT_PRODUCTS_DIR; }; + EFDC5B8287CD37C30E835D54 /* Pods-YoonitFacefyDemo.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-YoonitFacefyDemo.debug.xcconfig"; path = "Target Support Files/Pods-YoonitFacefyDemo/Pods-YoonitFacefyDemo.debug.xcconfig"; sourceTree = ""; }; + F834A363E2F719CA947B1589 /* Pods-YoonitFacefyDemo.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-YoonitFacefyDemo.release.xcconfig"; path = "Target Support Files/Pods-YoonitFacefyDemo/Pods-YoonitFacefyDemo.release.xcconfig"; sourceTree = ""; }; /* End PBXFileReference section */ /* Begin PBXFrameworksBuildPhase section */ @@ -40,7 +40,7 @@ isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( - 9471A16A133B5BC59273DC59 /* Pods_YoonitFacefyDemo.framework in Frameworks */, + E04B1D1FB428FE36ABCC7B91 /* Pods_YoonitFacefyDemo.framework in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -84,8 +84,8 @@ E43515AC47E0279330A1606A /* Pods */ = { isa = PBXGroup; children = ( - C34D9F83E1CDDB618E5A3BE8 /* Pods-YoonitFacefyDemo.debug.xcconfig */, - 9E5010989592DAF42C37B6FB /* Pods-YoonitFacefyDemo.release.xcconfig */, + EFDC5B8287CD37C30E835D54 /* Pods-YoonitFacefyDemo.debug.xcconfig */, + F834A363E2F719CA947B1589 /* Pods-YoonitFacefyDemo.release.xcconfig */, ); path = Pods; sourceTree = ""; @@ -94,7 +94,7 @@ isa = PBXGroup; children = ( 5C06E5A925D1FBC500E6770F /* YoonitFacefy.framework */, - EC04C950B084B2E078258965 /* Pods_YoonitFacefyDemo.framework */, + C72063803094F50B0D23D173 /* Pods_YoonitFacefyDemo.framework */, ); name = Frameworks; sourceTree = ""; @@ -106,12 +106,12 @@ isa = PBXNativeTarget; buildConfigurationList = 6176EFD2252E496F00F4D4DD /* Build configuration list for PBXNativeTarget "YoonitFacefyDemo" */; buildPhases = ( - 26D4B6F4B8D7E7268557A322 /* [CP] Check Pods Manifest.lock */, + BA2A97F47C37B6EF19CC3A4D /* [CP] Check Pods Manifest.lock */, 6176EFBA252E496D00F4D4DD /* Sources */, 6176EFBB252E496D00F4D4DD /* Frameworks */, 6176EFBC252E496D00F4D4DD /* Resources */, - 645887ACDFC93977CE31A258 /* [CP] Embed Pods Frameworks */, - FB2832E61E79E64FF8934907 /* [CP] Copy Pods Resources */, + B63DF2774B864E8B757221CC /* [CP] Embed Pods Frameworks */, + 9E6DB6C3F3022D55C95EC306 /* [CP] Copy Pods Resources */, ); buildRules = ( ); @@ -169,29 +169,24 @@ /* End PBXResourcesBuildPhase section */ /* Begin PBXShellScriptBuildPhase section */ - 26D4B6F4B8D7E7268557A322 /* [CP] Check Pods Manifest.lock */ = { + 9E6DB6C3F3022D55C95EC306 /* [CP] Copy Pods Resources */ = { isa = PBXShellScriptBuildPhase; buildActionMask = 2147483647; files = ( ); inputFileListPaths = ( + "${PODS_ROOT}/Target Support Files/Pods-YoonitFacefyDemo/Pods-YoonitFacefyDemo-resources-${CONFIGURATION}-input-files.xcfilelist", ); - inputPaths = ( - "${PODS_PODFILE_DIR_PATH}/Podfile.lock", - "${PODS_ROOT}/Manifest.lock", - ); - name = "[CP] Check Pods Manifest.lock"; + name = "[CP] Copy Pods Resources"; outputFileListPaths = ( - ); - outputPaths = ( - "$(DERIVED_FILE_DIR)/Pods-YoonitFacefyDemo-checkManifestLockResult.txt", + "${PODS_ROOT}/Target Support Files/Pods-YoonitFacefyDemo/Pods-YoonitFacefyDemo-resources-${CONFIGURATION}-output-files.xcfilelist", ); runOnlyForDeploymentPostprocessing = 0; shellPath = /bin/sh; - shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n"; + shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-YoonitFacefyDemo/Pods-YoonitFacefyDemo-resources.sh\"\n"; showEnvVarsInLog = 0; }; - 645887ACDFC93977CE31A258 /* [CP] Embed Pods Frameworks */ = { + B63DF2774B864E8B757221CC /* [CP] Embed Pods Frameworks */ = { isa = PBXShellScriptBuildPhase; buildActionMask = 2147483647; files = ( @@ -208,21 +203,26 @@ shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-YoonitFacefyDemo/Pods-YoonitFacefyDemo-frameworks.sh\"\n"; showEnvVarsInLog = 0; }; - FB2832E61E79E64FF8934907 /* [CP] Copy Pods Resources */ = { + BA2A97F47C37B6EF19CC3A4D /* [CP] Check Pods Manifest.lock */ = { isa = PBXShellScriptBuildPhase; buildActionMask = 2147483647; files = ( ); inputFileListPaths = ( - "${PODS_ROOT}/Target Support Files/Pods-YoonitFacefyDemo/Pods-YoonitFacefyDemo-resources-${CONFIGURATION}-input-files.xcfilelist", ); - name = "[CP] Copy Pods Resources"; + inputPaths = ( + "${PODS_PODFILE_DIR_PATH}/Podfile.lock", + "${PODS_ROOT}/Manifest.lock", + ); + name = "[CP] Check Pods Manifest.lock"; outputFileListPaths = ( - "${PODS_ROOT}/Target Support Files/Pods-YoonitFacefyDemo/Pods-YoonitFacefyDemo-resources-${CONFIGURATION}-output-files.xcfilelist", + ); + outputPaths = ( + "$(DERIVED_FILE_DIR)/Pods-YoonitFacefyDemo-checkManifestLockResult.txt", ); runOnlyForDeploymentPostprocessing = 0; shellPath = /bin/sh; - shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-YoonitFacefyDemo/Pods-YoonitFacefyDemo-resources.sh\"\n"; + shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n"; showEnvVarsInLog = 0; }; /* End PBXShellScriptBuildPhase section */ @@ -384,7 +384,7 @@ }; 6176EFD3252E496F00F4D4DD /* Debug */ = { isa = XCBuildConfiguration; - baseConfigurationReference = C34D9F83E1CDDB618E5A3BE8 /* Pods-YoonitFacefyDemo.debug.xcconfig */; + baseConfigurationReference = EFDC5B8287CD37C30E835D54 /* Pods-YoonitFacefyDemo.debug.xcconfig */; buildSettings = { ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; @@ -404,7 +404,7 @@ "$(inherited)", "@executable_path/Frameworks", ); - MARKETING_VERSION = 1.0.0; + MARKETING_VERSION = 1.0.2; PRODUCT_BUNDLE_IDENTIFIER = ai.cyberlabs.YoonitFacefyDemo; PRODUCT_NAME = "$(TARGET_NAME)"; SWIFT_VERSION = 5.0; @@ -414,7 +414,7 @@ }; 6176EFD4252E496F00F4D4DD /* Release */ = { isa = XCBuildConfiguration; - baseConfigurationReference = 9E5010989592DAF42C37B6FB /* Pods-YoonitFacefyDemo.release.xcconfig */; + baseConfigurationReference = F834A363E2F719CA947B1589 /* Pods-YoonitFacefyDemo.release.xcconfig */; buildSettings = { ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; @@ -434,7 +434,7 @@ "$(inherited)", "@executable_path/Frameworks", ); - MARKETING_VERSION = 1.0.0; + MARKETING_VERSION = 1.0.2; PRODUCT_BUNDLE_IDENTIFIER = ai.cyberlabs.YoonitFacefyDemo; PRODUCT_NAME = "$(TARGET_NAME)"; SWIFT_VERSION = 5.0; diff --git a/Example/YoonitFacefyDemo/YoonitFacefyDemo/Base.lproj/Main.storyboard b/Example/YoonitFacefyDemo/YoonitFacefyDemo/Base.lproj/Main.storyboard index e64b33b..92e392f 100644 --- a/Example/YoonitFacefyDemo/YoonitFacefyDemo/Base.lproj/Main.storyboard +++ b/Example/YoonitFacefyDemo/YoonitFacefyDemo/Base.lproj/Main.storyboard @@ -131,7 +131,7 @@ - + diff --git a/Example/YoonitFacefyDemo/YoonitFacefyDemo/FacefyViewController.swift b/Example/YoonitFacefyDemo/YoonitFacefyDemo/FacefyViewController.swift index 2eb9b97..0986ae8 100644 --- a/Example/YoonitFacefyDemo/YoonitFacefyDemo/FacefyViewController.swift +++ b/Example/YoonitFacefyDemo/YoonitFacefyDemo/FacefyViewController.swift @@ -25,7 +25,7 @@ class FacefyViewController: @IBOutlet var faceImageView: UIImageView! @IBOutlet var leftEyeLabel: UILabel! @IBOutlet var rightEyeLabel: UILabel! - @IBOutlet var smillingLabel: UILabel! + @IBOutlet var smilingLabel: UILabel! @IBOutlet var horizontalMovementLabel: UILabel! @IBOutlet var verticalMovementLabel: UILabel! @IBOutlet var tiltMovementLabel: UILabel! @@ -53,21 +53,61 @@ class FacefyViewController: self.facefy?.detect(image!) { faceDetected in - + if let faceDetected: FaceDetected = faceDetected { - if let rightEyeOpenProbability = faceDetected.rightEyeOpenProbability { - self.rightEyeLabel.text = - rightEyeOpenProbability > 0.8 ? "Open" : "Close" - } - if let leftEyeOpenProbability = faceDetected.leftEyeOpenProbability { - self.leftEyeLabel.text = - leftEyeOpenProbability > 0.8 ? "Open" : "Close" - } - if let smilingProbability = faceDetected.smilingProbability { - self.smillingLabel.text = - smilingProbability > 0.8 ? "Smilling" : "Not smilling" + + print( + "onFaceDetected" + + "\n x: \(faceDetected.boundingBox.minX), y: \(faceDetected.boundingBox.minY), width: \(faceDetected.boundingBox.width), height: \(faceDetected.boundingBox.height)" + + "\n leftEyeOpenProbability: \(faceDetected.hasLeftEyeOpenProbability) \(faceDetected.leftEyeOpenProbability)" + + "\n rightEyeOpenProbability: \(faceDetected.hasRightEyeOpenProbability) \(faceDetected.rightEyeOpenProbability)" + + "\n smilingProbability: \(faceDetected.hasSmilingProbability) \(faceDetected.smilingProbability)" + + "\n headEulerAngleX: \(faceDetected.hasHeadEulerAngleX) \(faceDetected.headEulerAngleX)" + + "\n headEulerAngleY: \(faceDetected.hasHeadEulerAngleY) \(faceDetected.headEulerAngleY)" + + "\n headEulerAngleZ: \(faceDetected.hasHeadEulerAngleZ) \(faceDetected.headEulerAngleZ)" + ) + + self.handleDisplayProbability( + label: self.leftEyeLabel, + hasValue: faceDetected.hasLeftEyeOpenProbability, + value: faceDetected.leftEyeOpenProbability, + validText: "Open", + invalidText: "Close" + ) + self.handleDisplayProbability( + label: self.rightEyeLabel, + hasValue: faceDetected.hasRightEyeOpenProbability, + value: faceDetected.rightEyeOpenProbability, + validText: "Open", + invalidText: "Close" + ) + self.handleDisplayProbability( + label: self.smilingLabel, + hasValue: faceDetected.hasSmilingProbability, + value: faceDetected.smilingProbability, + validText: "Smiling", + invalidText: "Not Smiling" + ) + + if faceDetected.hasHeadEulerAngleX { + let headEulerAngleX = faceDetected.headEulerAngleX + var headPosition = "" + if headEulerAngleX < -36 { + headPosition = "Super Down" + } else if -36 < headEulerAngleX && headEulerAngleX < -12 { + headPosition = "Down" + } else if -12 < headEulerAngleX && headEulerAngleX < 12 { + headPosition = "Frontal" + } else if 12 < headEulerAngleX && headEulerAngleX < 36 { + headPosition = "Up" + } else if headEulerAngleX > 36 { + headPosition = "Super Up" + } + self.verticalMovementLabel.text = headPosition } - if let headEulerAngleY = faceDetected.headEulerAngleY { + + if faceDetected.hasHeadEulerAngleY { + let headEulerAngleY = faceDetected.headEulerAngleY var headPosition = "" if headEulerAngleY < -36 { headPosition = "Super Right" @@ -82,22 +122,9 @@ class FacefyViewController: } self.horizontalMovementLabel.text = headPosition } - if let headEulerAngleX = faceDetected.headEulerAngleX { - var headPosition = "" - if headEulerAngleX < -36 { - headPosition = "Super Down" - } else if -36 < headEulerAngleX && headEulerAngleX < -12 { - headPosition = "Down" - } else if -12 < headEulerAngleX && headEulerAngleX < 12 { - headPosition = "Frontal" - } else if 12 < headEulerAngleX && headEulerAngleX < 36 { - headPosition = "Up" - } else if headEulerAngleX > 36 { - headPosition = "Super Up" - } - self.verticalMovementLabel.text = headPosition - } - if let headEulerAngleZ = faceDetected.headEulerAngleZ { + + if faceDetected.hasHeadEulerAngleZ { + let headEulerAngleZ = faceDetected.headEulerAngleZ var headPosition = "" if headEulerAngleZ < -36 { headPosition = "Super Left" @@ -111,8 +138,9 @@ class FacefyViewController: headPosition = "Super Right" } self.tiltMovementLabel.text = headPosition - } - if let cgImage = image?.cgImage { + } + + if let cgImage = image?.cgImage { // Crop the face image. self.faceImageView.image = UIImage( cgImage: cgImage.cropping(to: faceDetected.boundingBox)! @@ -129,6 +157,18 @@ class FacefyViewController: self.faceImage = image! } + + func handleDisplayProbability( + label: UILabel, + hasValue: Bool, + value: Float, + validText: String, + invalidText: String + ) { + if hasValue { + label.text = value > 0.8 ? validText : invalidText + } + } func onFaceDetected( _ x: Int, @@ -173,21 +213,3 @@ extension CGFloat { return "\(String(format: "%.2f", self))" } } - -extension UIImage { - func flipHorizontally() -> UIImage? { - UIGraphicsBeginImageContextWithOptions(self.size, false, self.scale) - let context = UIGraphicsGetCurrentContext()! - - context.translateBy(x: self.size.width/2, y: self.size.height/2) - context.scaleBy(x: -1.0, y: 1.0) - context.translateBy(x: -self.size.width/2, y: -self.size.height/2) - - self.draw(in: CGRect(x: 0, y: 0, width: self.size.width, height: self.size.height)) - - let newImage = UIGraphicsGetImageFromCurrentImageContext() - UIGraphicsEndImageContext() - - return newImage - } -} diff --git a/Example/YoonitFacefyDemo/YoonitFacefyDemo/GraphicView.swift b/Example/YoonitFacefyDemo/YoonitFacefyDemo/GraphicView.swift index e1ef89f..0294af5 100644 --- a/Example/YoonitFacefyDemo/YoonitFacefyDemo/GraphicView.swift +++ b/Example/YoonitFacefyDemo/YoonitFacefyDemo/GraphicView.swift @@ -4,8 +4,8 @@ // +-+-+-+-+-+-+ // // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ -// | Yoonit Camera lib for iOS applications | -// | Haroldo Teruya @ Cyberlabs AI 2020 | +// | Yoonit Facefy lib for iOS applications | +// | Haroldo Teruya @ Cyberlabs AI 2021 | // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ // diff --git a/Example/YoonitFacefyDemo/YoonitFacefyDemo/Info.plist b/Example/YoonitFacefyDemo/YoonitFacefyDemo/Info.plist index b37d958..78f2e43 100644 --- a/Example/YoonitFacefyDemo/YoonitFacefyDemo/Info.plist +++ b/Example/YoonitFacefyDemo/YoonitFacefyDemo/Info.plist @@ -2,8 +2,6 @@ - NSCameraUsageDescription - Camera usage for Facefy CFBundleDevelopmentRegion $(DEVELOPMENT_LANGUAGE) CFBundleExecutable @@ -22,8 +20,10 @@ 1 LSRequiresIPhoneOS + NSCameraUsageDescription + Camera usage for Facefy NSFacefyUsageDescription - The face detection's module for iOS with a lot of awesome features + The face detection's module for iOS with a lot of awesome features UIApplicationSceneManifest UIApplicationSupportsMultipleScenes diff --git a/Example/YoonitFacefyDemo/YoonitFacefyDemo/SceneDelegate.swift b/Example/YoonitFacefyDemo/YoonitFacefyDemo/SceneDelegate.swift index 8cdd366..41e6b46 100644 --- a/Example/YoonitFacefyDemo/YoonitFacefyDemo/SceneDelegate.swift +++ b/Example/YoonitFacefyDemo/YoonitFacefyDemo/SceneDelegate.swift @@ -9,7 +9,6 @@ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ // - import UIKit class SceneDelegate: UIResponder, UIWindowSceneDelegate { diff --git a/README.md b/README.md index 60167da..9deffa9 100644 --- a/README.md +++ b/README.md @@ -52,21 +52,28 @@ let facefy: Facefy = Facefy() self.facefy.detect(image!) { faceDetected in if let faceDetected: FaceDetected = faceDetected { - if let leftEyeOpenProbability = faceDetected.leftEyeOpenProbability { - self.leftEyeLabel.text = String(format: "%.2f", leftEyeOpenProbability) + + if faceDetected.hasLeftEyeOpenProbability { + print(String(format: "%.2f", faceDetected.leftEyeOpenProbability)) } - if let rightEyeOpenProbability = faceDetected.rightEyeOpenProbability { - self.rightEyeLabel.text = String(format: "%.2f", rightEyeOpenProbability) + if faceDetected.rightEyeOpenProbability { + print(String(format: "%.2f", faceDetected.rightEyeOpenProbability)) } - if let smilingProbability = faceDetected.smilingProbability { - self.smillingLabel.text = String(format: "%.2f", smilingProbability) + if faceDetected.smilingProbability { + print(String(format: "%.2f", faceDetected.smilingProbability)) } - if let headEulerAngleY = faceDetected.headEulerAngleY { - self.leftRightMovementeLabel.text = String(format: "%.2f", headEulerAngleY) + if faceDetected.hasHeadEulerAngleX { + print(String(format: "%.2f", faceDetected.headEulerAngleX)) + } + if faceDetected.hasHeadEulerAngleY { + print(String(format: "%.2f", faceDetected.headEulerAngleY)) + } + if faceDetected.hasHeadEulerAngleZ { + print(String(format: "%.2f", faceDetected.headEulerAngleZ)) } - if let cgImage = image?.cgImage { - + if let cgImage = image?.cgImage { + // Crop the face image from. UIImage( cgImage: cgImage.cropping(to: faceDetected.boundingBox)! @@ -84,20 +91,26 @@ self.facefy.detect(image!) { faceDetected in | Function | Parameters | Return Type | Description | | - | - | - | - | -| detect | `image: InputImage, onFaceDetected: @escaping (FaceDetected) -> Void, onMessage: @escaping (String) -> Void` | void | Detec a face from image and return the result in the [`FaceDetected`](#facedetected) as a closure. | +| detect | `image: UIImage, onSuccess: @escaping (FaceDetected?) -> Void, onError: @escaping (String) -> Void` | void | Detect a face from image and return the result in the [`FaceDetected`](#facedetected) as a closure. | ### FaceDetected | Attribute | Type | Description | | - | - | - | -| leftEyeOpenProbability | `CGFloat?` | The left eye open probability. | -| rightEyeOpenProbability | `CGFloat?` | The right eye open probability. | -| smilingProbability | `CGFloat?` | The smilling probability. | -| headEulerAngleX | `CGFloat?` | The angle in degrees that indicate the vertical head direction. See [Head Movements](#headmovements) | -| headEulerAngleY | `CGFloat?` | The angle in degrees that indicate the horizontal head direction. See [Head Movements](#headmovements) | -| headEulerAngleZ | `CGFloat?` | The angle in degrees that indicate the tilt head direction. See [Head Movements](#headmovements) | +| boundingBox | `CGRect` | The face bounding box related to the image input. | +| leftEyeOpenProbability | `Float` | The left eye open probability. | +| hasLeftEyeOpenProbability | `Bool` | Indicates whether a left eye open probability is available. | +| rightEyeOpenProbability | `Float` | The right eye open probability. | +| hasRightEyeOpenProbability | `Bool` | Indicates whether a right eye open probability is available. | +| smilingProbability | `Float` | The smiling probability. | +| hasSmilingProbability | `Bool` | Indicates whether a smiling probability is available. | +| headEulerAngleX | `Float` | The angle in degrees that indicate the vertical head direction. See [Head Movements](#headmovements). | +| hasHeadEulerAngleX | `Bool` | Indicates whether the detector found the head x euler angle. | +| headEulerAngleY | `Float` | The angle in degrees that indicate the horizontal head direction. See [Head Movements](#headmovements). | +| hasHeadEulerAngleY | `Bool` | Indicates whether the detector found the head y euler angle. | +| headEulerAngleZ | `Float` | The angle in degrees that indicate the tilt head direction. See [Head Movements](#headmovements). | +| hasHeadEulerAngleZ | `Bool` | Indicates whether the detector found the head z euler angle. | | contours | `[CGPoint]` | List of points that represents the shape of the detected face. | -| boundingBox | `CGRect` | The face bounding box. | #### Head Movements diff --git a/YoonitFacefy.podspec b/YoonitFacefy.podspec index 71caad3..fb2880e 100644 --- a/YoonitFacefy.podspec +++ b/YoonitFacefy.podspec @@ -9,7 +9,7 @@ Pod::Spec.new do |spec| # spec.name = "YoonitFacefy" - spec.version = "1.0.1" + spec.version = "1.0.2" spec.summary = "The face detection's module for iOS with a lot of awesome features" # This description is used to generate tags and improve search results. diff --git a/YoonitFacefy.xcodeproj/project.pbxproj b/YoonitFacefy.xcodeproj/project.pbxproj index 33599fe..7c22102 100644 --- a/YoonitFacefy.xcodeproj/project.pbxproj +++ b/YoonitFacefy.xcodeproj/project.pbxproj @@ -422,7 +422,7 @@ "@executable_path/Frameworks", "@loader_path/Frameworks", ); - MARKETING_VERSION = 1.0.1; + MARKETING_VERSION = 1.0.2; PRODUCT_BUNDLE_IDENTIFIER = ai.cyberlabs.YoonitFacefy; PRODUCT_NAME = "$(TARGET_NAME:c99extidentifier)"; SKIP_INSTALL = YES; @@ -452,7 +452,7 @@ "@executable_path/Frameworks", "@loader_path/Frameworks", ); - MARKETING_VERSION = 1.0.1; + MARKETING_VERSION = 1.0.2; PRODUCT_BUNDLE_IDENTIFIER = ai.cyberlabs.YoonitFacefy; PRODUCT_NAME = "$(TARGET_NAME:c99extidentifier)"; SKIP_INSTALL = YES; diff --git a/YoonitFacefy/src/FaceDetected.swift b/YoonitFacefy/src/FaceDetected.swift index 9fcdd90..f051ed1 100644 --- a/YoonitFacefy/src/FaceDetected.swift +++ b/YoonitFacefy/src/FaceDetected.swift @@ -4,41 +4,59 @@ // +-+-+-+-+-+-+ // // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ -// | Yoonit Camera lib for iOS applications | -// | Haroldo Teruya @ Cyberlabs AI 2020 | +// | Yoonit Facefy lib for iOS applications | +// | Haroldo Teruya @ Cyberlabs AI 2021 | // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ // import UIKit public class FaceDetected { - - public var leftEyeOpenProbability: CGFloat? = nil - public var rightEyeOpenProbability: CGFloat? = nil - public var smilingProbability: CGFloat? = nil - public var headEulerAngleX: CGFloat? = nil - public var headEulerAngleY: CGFloat? = nil - public var headEulerAngleZ: CGFloat? = nil + + public var boundingBox: CGRect + public var leftEyeOpenProbability: Float + public var hasLeftEyeOpenProbability: Bool + public var rightEyeOpenProbability: Float + public var hasRightEyeOpenProbability: Bool + public var smilingProbability: Float + public var hasSmilingProbability: Bool + public var headEulerAngleX: Float + public var hasHeadEulerAngleX: Bool + public var headEulerAngleY: Float + public var hasHeadEulerAngleY: Bool + public var headEulerAngleZ: Float + public var hasHeadEulerAngleZ: Bool public var contours: [CGPoint] = [] - public var boundingBox: CGRect = CGRect(x: 0.0, y: 0.0, width: 0.0, height: 0.0) init( - leftEyeOpenProbability: CGFloat?, - rightEyeOpenProbability: CGFloat?, - smilingProbability: CGFloat?, - headEulerAngleX: CGFloat?, - headEulerAngleY: CGFloat?, - headEulerAngleZ: CGFloat?, - contours: [CGPoint], - boundingBox: CGRect + boundingBox: CGRect, + leftEyeOpenProbability: Float, + hasLeftEyeOpenProbability: Bool, + rightEyeOpenProbability: Float, + hasRightEyeOpenProbability: Bool, + smilingProbability: Float, + hasSmilingProbability: Bool, + headEulerAngleX: Float, + hasHeadEulerAngleX: Bool, + headEulerAngleY: Float, + hasHeadEulerAngleY: Bool, + headEulerAngleZ: Float, + hasHeadEulerAngleZ: Bool, + contours: [CGPoint] ) { + self.boundingBox = boundingBox self.leftEyeOpenProbability = leftEyeOpenProbability + self.hasLeftEyeOpenProbability = hasLeftEyeOpenProbability self.rightEyeOpenProbability = rightEyeOpenProbability + self.hasRightEyeOpenProbability = hasRightEyeOpenProbability self.smilingProbability = smilingProbability + self.hasSmilingProbability = hasSmilingProbability self.headEulerAngleX = headEulerAngleX + self.hasHeadEulerAngleX = hasHeadEulerAngleX self.headEulerAngleY = headEulerAngleY + self.hasHeadEulerAngleY = hasHeadEulerAngleY self.headEulerAngleZ = headEulerAngleZ + self.hasHeadEulerAngleZ = hasHeadEulerAngleZ self.contours = contours - self.boundingBox = boundingBox } } diff --git a/YoonitFacefy/src/FacefyController.swift b/YoonitFacefy/src/FacefyController.swift index 6edac2f..3968620 100644 --- a/YoonitFacefy/src/FacefyController.swift +++ b/YoonitFacefy/src/FacefyController.swift @@ -53,18 +53,9 @@ public class FacefyController { let face: Face = faces.sorted { return $0.frame.width > $1.frame.width }[0] - - var faceContours: [CGPoint] = [] - - // Get face analysis classification. - let leftEyeOpenProbability: CGFloat? = face.hasRightEyeOpenProbability ? face.rightEyeOpenProbability : nil - let rightEyeOpenProbability: CGFloat? = face.hasLeftEyeOpenProbability ? face.leftEyeOpenProbability : nil - let smilingProbability: CGFloat? = face.hasSmilingProbability ? face.smilingProbability : nil - let headEulerAngleX: CGFloat? = face.hasHeadEulerAngleX ? face.headEulerAngleX : nil - let headEulerAngleY: CGFloat? = face.hasHeadEulerAngleY ? face.headEulerAngleY : nil - let headEulerAngleZ: CGFloat? = face.hasHeadEulerAngleZ ? face.headEulerAngleZ : nil - + // Get face contours. + var faceContours: [CGPoint] = [] if !face.contours.isEmpty { for faceContour in face.contours { for point in faceContour.points { @@ -75,14 +66,20 @@ public class FacefyController { onSuccess( FaceDetected( - leftEyeOpenProbability: leftEyeOpenProbability, - rightEyeOpenProbability: rightEyeOpenProbability, - smilingProbability: smilingProbability, - headEulerAngleX: headEulerAngleX, - headEulerAngleY: headEulerAngleY, - headEulerAngleZ: headEulerAngleZ, - contours: faceContours, - boundingBox: face.frame + boundingBox: face.frame, + leftEyeOpenProbability: Float(face.rightEyeOpenProbability), + hasLeftEyeOpenProbability: face.hasRightEyeOpenProbability, + rightEyeOpenProbability: Float(face.leftEyeOpenProbability), + hasRightEyeOpenProbability: face.hasLeftEyeOpenProbability, + smilingProbability: Float(face.smilingProbability), + hasSmilingProbability: face.hasSmilingProbability, + headEulerAngleX: Float(face.headEulerAngleX), + hasHeadEulerAngleX: face.hasHeadEulerAngleX, + headEulerAngleY: Float(face.headEulerAngleY), + hasHeadEulerAngleY: face.hasHeadEulerAngleY, + headEulerAngleZ: Float(face.headEulerAngleZ), + hasHeadEulerAngleZ: face.hasHeadEulerAngleZ, + contours: faceContours ) ) }