diff --git a/.github/workflows/code-analysis.yml b/.github/workflows/code-analysis.yml
index 4338f748..00ed05bd 100644
--- a/.github/workflows/code-analysis.yml
+++ b/.github/workflows/code-analysis.yml
@@ -134,3 +134,9 @@ jobs:
run: brew install ktlint
- name: Run ktlint on Kotlin code
run: ktlint
+
+ # Lint Swift code (iOS plugins) with SwiftLint.
+ - name: Install SwiftLint
+ run: brew install swiftlint
+ - name: Run SwiftLint on Swift code
+ run: swiftlint lint
diff --git a/.run/Example App - Debug.run.xml b/.run/Example App - Debug.run.xml
new file mode 100644
index 00000000..8eba9a03
--- /dev/null
+++ b/.run/Example App - Debug.run.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
diff --git a/.run/Example App - Profile.run.xml b/.run/Example App - Profile.run.xml
new file mode 100644
index 00000000..b6cebf2a
--- /dev/null
+++ b/.run/Example App - Profile.run.xml
@@ -0,0 +1,7 @@
+
+
+
+
+
+
+
diff --git a/.run/Example App - Release.run.xml b/.run/Example App - Release.run.xml
new file mode 100644
index 00000000..db79af23
--- /dev/null
+++ b/.run/Example App - Release.run.xml
@@ -0,0 +1,7 @@
+
+
+
+
+
+
+
diff --git a/.swiftlint.yml b/.swiftlint.yml
new file mode 100644
index 00000000..3aebd394
--- /dev/null
+++ b/.swiftlint.yml
@@ -0,0 +1,13 @@
+# Exclude generated / third-party paths
+excluded:
+ - packages/example/ios/Flutter/ephemeral
+
+function_body_length:
+ warning: 90
+ error: 120
+function_parameter_count:
+ warning: 6
+ error: 8
+cyclomatic_complexity:
+ warning: 20
+ error: 30
diff --git a/.vscode/launch.json b/.vscode/launch.json
index da673a92..7d425512 100644
--- a/.vscode/launch.json
+++ b/.vscode/launch.json
@@ -2,14 +2,14 @@
"version": "0.2.0",
"configurations": [
{
- "name": "Flutter (Example App)",
+ "name": "Example App - Debug",
"request": "launch",
"type": "dart",
"program": "${workspaceFolder}/packages/example/lib/main.dart",
"cwd": "${workspaceFolder}/packages/example"
},
{
- "name": "Flutter (Example App - Profile)",
+ "name": "Example App - Profile",
"request": "launch",
"type": "dart",
"flutterMode": "profile",
@@ -17,7 +17,7 @@
"cwd": "${workspaceFolder}/packages/example"
},
{
- "name": "Flutter (Example App - Release)",
+ "name": "Example App - Release",
"request": "launch",
"type": "dart",
"flutterMode": "release",
diff --git a/packages/example/ios/Podfile.lock b/packages/example/ios/Podfile.lock
index c8edb1f8..3d814d2b 100644
--- a/packages/example/ios/Podfile.lock
+++ b/packages/example/ios/Podfile.lock
@@ -1,5 +1,9 @@
PODS:
+ - camera_avfoundation (0.0.1):
+ - Flutter
- Flutter (1.0.0)
+ - flutter_pdfview (1.0.2):
+ - Flutter
- google_mlkit_barcode_scanning (0.14.2):
- Flutter
- google_mlkit_commons
@@ -152,6 +156,8 @@ PODS:
- GoogleUtilities/Logger
- GoogleUtilities/Privacy
- GTMSessionFetcher/Core (3.5.0)
+ - image_picker_ios (0.0.1):
+ - Flutter
- MLImage (1.0.0-beta8)
- MLKitBarcodeScanning (8.0.0):
- MLKitCommon (~> 14.0)
@@ -272,11 +278,16 @@ PODS:
- nanopb/encode (= 3.30910.0)
- nanopb/decode (3.30910.0)
- nanopb/encode (3.30910.0)
+ - path_provider_foundation (0.0.1):
+ - Flutter
+ - FlutterMacOS
- PromisesObjC (2.4.0)
- SSZipArchive (2.6.0)
DEPENDENCIES:
+ - camera_avfoundation (from `.symlinks/plugins/camera_avfoundation/ios`)
- Flutter (from `Flutter`)
+ - flutter_pdfview (from `.symlinks/plugins/flutter_pdfview/ios`)
- google_mlkit_barcode_scanning (from `.symlinks/plugins/google_mlkit_barcode_scanning/ios`)
- google_mlkit_commons (from `.symlinks/plugins/google_mlkit_commons/ios`)
- google_mlkit_digital_ink_recognition (from `.symlinks/plugins/google_mlkit_digital_ink_recognition/ios`)
@@ -303,6 +314,8 @@ DEPENDENCIES:
- GoogleMLKit/TextRecognitionDevanagari (~> 9.0.0)
- GoogleMLKit/TextRecognitionJapanese (~> 9.0.0)
- GoogleMLKit/TextRecognitionKorean (~> 9.0.0)
+ - image_picker_ios (from `.symlinks/plugins/image_picker_ios/ios`)
+ - path_provider_foundation (from `.symlinks/plugins/path_provider_foundation/darwin`)
SPEC REPOS:
trunk:
@@ -347,8 +360,12 @@ SPEC REPOS:
- SSZipArchive
EXTERNAL SOURCES:
+ camera_avfoundation:
+ :path: ".symlinks/plugins/camera_avfoundation/ios"
Flutter:
:path: Flutter
+ flutter_pdfview:
+ :path: ".symlinks/plugins/flutter_pdfview/ios"
google_mlkit_barcode_scanning:
:path: ".symlinks/plugins/google_mlkit_barcode_scanning/ios"
google_mlkit_commons:
@@ -393,36 +410,43 @@ EXTERNAL SOURCES:
:path: ".symlinks/plugins/google_mlkit_text_recognition/ios"
google_mlkit_translation:
:path: ".symlinks/plugins/google_mlkit_translation/ios"
+ image_picker_ios:
+ :path: ".symlinks/plugins/image_picker_ios/ios"
+ path_provider_foundation:
+ :path: ".symlinks/plugins/path_provider_foundation/darwin"
SPEC CHECKSUMS:
+ camera_avfoundation: 5675ca25298b6f81fa0a325188e7df62cc217741
Flutter: cabc95a1d2626b1b06e7179b784ebcf0c0cde467
- google_mlkit_barcode_scanning: 12d8422d8f7b00726dedf9cac00188a2b98750c2
- google_mlkit_commons: a5e4ffae5bc59ea4c7b9025dc72cb6cb79dc1166
- google_mlkit_digital_ink_recognition: a9f14c9a75bad980ea26f2d3ce277f5963f413ba
- google_mlkit_document_scanner: 107c2c91ea967acb9eb989ff988443b81b8e761a
- google_mlkit_entity_extraction: 45de8519319089085569ef9e10e2500b8d0d55a0
- google_mlkit_face_detection: ee4b72cfae062b4c972204be955d83055a4bfd36
- google_mlkit_face_mesh_detection: 644aad01e609e0962bc38495d1d807e2ae9f5e1b
- google_mlkit_genai_image_description: 84e90c2ad87ae5e2f05cc4d5f1924059c799fc12
- google_mlkit_genai_prompt: f4a41c9548172a86c6723e5c83c1c3295b6ad299
- google_mlkit_genai_proofreading: 09ca9edfaa66e58ff165d2047286c10ba0a831ca
- google_mlkit_genai_rewriting: 7a647b345cf7d9fe8fda004142ca980abbba9724
- google_mlkit_genai_speech_recognition: 783fd846946877dc812a81d629c676b41973ce72
- google_mlkit_genai_summarization: 8d750cfca622746aef09d6521bf2e764e8647ded
- google_mlkit_image_labeling: 6f6fdb11c14600e01898e59a8c4413b255ede272
- google_mlkit_language_id: de6f5cc02967420549c3c3a1624b359217442db9
- google_mlkit_object_detection: 6a81b32faf7a9b700bed7a2caa67254818553257
- google_mlkit_pose_detection: 211eabf55f5ea8d6a9537fdade0a37148fc84d8b
- google_mlkit_selfie_segmentation: 0317616b7e460f242bd13a805b70f4e0ba636336
- google_mlkit_smart_reply: fa236bc7f5f8ed70a894a659ec8d43b5b05374a6
- google_mlkit_subject_segmentation: 864c91cd7c89cedc4b17021794c6c182ba165e0f
- google_mlkit_text_recognition: b3de5adb786ad7a0fe8e13618387b8d2df0f3c70
- google_mlkit_translation: c5a10fefc8f641df87a49d2744b2b1bf0c418064
+ flutter_pdfview: 32bf27bda6fd85b9dd2c09628a824df5081246cf
+ google_mlkit_barcode_scanning: 37a95c619ade2966885d549d337c7ce25cdcd6e1
+ google_mlkit_commons: fdbddbd42f5f4680ce01932017e31d1e0f02b4ef
+ google_mlkit_digital_ink_recognition: 4faaf4dc06be7427ca2e5a2b605e8c4e6f8919b5
+ google_mlkit_document_scanner: 4286ef97b690c747db9db15337207a631e3efa24
+ google_mlkit_entity_extraction: 2500f93887c590c0c68341d9b64523c992346281
+ google_mlkit_face_detection: 4cf4f7bda5e0cfdb4e52c11603aa025a81138a47
+ google_mlkit_face_mesh_detection: 103cfafc7ee649085abc40b07cedb1c9e165e9cb
+ google_mlkit_genai_image_description: 0a468e8016204f093f9f352a92823573f1b49728
+ google_mlkit_genai_prompt: d527fb9e9b6885c373075a058c96b00c552f14c0
+ google_mlkit_genai_proofreading: d4f6e35918c40f08069b4c2c89ce4f2fb9f1b4d9
+ google_mlkit_genai_rewriting: b9821623a69e2cb9e54eedf68200a29dd89b3e92
+ google_mlkit_genai_speech_recognition: 1064074540af10fdc14cf6810a004c38f0a46434
+ google_mlkit_genai_summarization: 40a3ba7e03ab369f1ce7702e0970d6e7a35baa50
+ google_mlkit_image_labeling: 7d5de5ee0fbbc23d95d33f9880e2c761beca6f23
+ google_mlkit_language_id: 892ac40627c56faece8753d0ca5f1402854bf030
+ google_mlkit_object_detection: 960463a8dff3a818a41d704632f9017a3d80a5a6
+ google_mlkit_pose_detection: c378a1e88fc7ed97b64219f72edc2880ac40ca23
+ google_mlkit_selfie_segmentation: 38f73022f0e63f253a050298f6d89c2978548991
+ google_mlkit_smart_reply: ddac7d36acf06981e4e1fb009cfd8e10197af3cc
+ google_mlkit_subject_segmentation: b6d3bfeee5deb973d2832a68022c28cb8c36668f
+ google_mlkit_text_recognition: 00511b4b7987ad04770382e343ad73d40250a049
+ google_mlkit_translation: f163102828e75ce10a4d5b31852599755a516809
GoogleDataTransport: aae35b7ea0c09004c3797d53c8c41f66f219d6a7
GoogleMLKit: b1eee21a41c57704fe72483b15c85cb2c0cd7444
GoogleToolboxForMac: d1a2cbf009c453f4d6ded37c105e2f67a32206d8
GoogleUtilities: 00c88b9a86066ef77f0da2fab05f65d7768ed8e1
GTMSessionFetcher: 5aea5ba6bd522a239e236100971f10cb71b96ab6
+ image_picker_ios: e0ece4aa2a75771a7de3fa735d26d90817041326
MLImage: 0de5c6c2bf9e93b80ef752e2797f0836f03b58c0
MLKitBarcodeScanning: 39de223e7b1b8a8fbf10816a536dd292d8a39343
MLKitCommon: 47d47b50a031d00db62f1b0efe5a1d8b09a3b2e6
@@ -455,6 +479,7 @@ SPEC CHECKSUMS:
MLKitVisionKit: 316cd349468797ef4e7fd785bf658ca838984de3
MLKitXenoCommon: 1a4268c1222a6043047af5bb9435028206c63287
nanopb: fad817b59e0457d11a5dfbde799381cd727c1275
+ path_provider_foundation: bb55f6dbba17d0dccd6737fe6f7f34fbd0376880
PromisesObjC: f5707f49cb48b9636751c5b2e7d227e43fba9f47
SSZipArchive: 8a6ee5677c8e304bebc109e39cf0da91ccef22ea
diff --git a/packages/example/ios/Runner.xcodeproj/project.pbxproj b/packages/example/ios/Runner.xcodeproj/project.pbxproj
index 54cd2fa0..1e5adb77 100644
--- a/packages/example/ios/Runner.xcodeproj/project.pbxproj
+++ b/packages/example/ios/Runner.xcodeproj/project.pbxproj
@@ -17,16 +17,6 @@
97C147011CF9000F007C117D /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FF1CF9000F007C117D /* LaunchScreen.storyboard */; };
/* End PBXBuildFile section */
-/* Begin PBXContainerItemProxy section */
- 331C8085294A63A400263BE5 /* PBXContainerItemProxy */ = {
- isa = PBXContainerItemProxy;
- containerPortal = 97C146E61CF9000F007C117D /* Project object */;
- proxyType = 1;
- remoteGlobalIDString = 97C146ED1CF9000F007C117D;
- remoteInfo = Runner;
- };
-/* End PBXContainerItemProxy section */
-
/* Begin PBXCopyFilesBuildPhase section */
9705A1C41CF9048500538489 /* Embed Frameworks */ = {
isa = PBXCopyFilesBuildPhase;
@@ -45,7 +35,6 @@
1498D2321E8E86230040F4C2 /* GeneratedPluginRegistrant.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = GeneratedPluginRegistrant.h; sourceTree = ""; };
1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GeneratedPluginRegistrant.m; sourceTree = ""; };
185FF648C8C3525E10052A2C /* libPods-Runner.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = "libPods-Runner.a"; sourceTree = BUILT_PRODUCTS_DIR; };
- 331C8081294A63A400263BE5 /* RunnerTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = RunnerTests.xctest; sourceTree = BUILT_PRODUCTS_DIR; };
3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; name = AppFrameworkInfo.plist; path = Flutter/AppFrameworkInfo.plist; sourceTree = ""; };
74858FAD1ED2DC5600515810 /* Runner-Bridging-Header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "Runner-Bridging-Header.h"; sourceTree = ""; };
74858FAE1ED2DC5600515810 /* AppDelegate.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = ""; };
@@ -122,7 +111,6 @@
isa = PBXGroup;
children = (
97C146EE1CF9000F007C117D /* Runner.app */,
- 331C8081294A63A400263BE5 /* RunnerTests.xctest */,
);
name = Products;
sourceTree = "";
@@ -145,29 +133,13 @@
/* End PBXGroup section */
/* Begin PBXNativeTarget section */
- 331C8080294A63A400263BE5 /* RunnerTests */ = {
- isa = PBXNativeTarget;
- buildConfigurationList = 331C8087294A63A400263BE5 /* Build configuration list for PBXNativeTarget "RunnerTests" */;
- buildPhases = (
- 331C807D294A63A400263BE5 /* Sources */,
- 331C807F294A63A400263BE5 /* Resources */,
- );
- buildRules = (
- );
- dependencies = (
- 331C8086294A63A400263BE5 /* PBXTargetDependency */,
- );
- name = RunnerTests;
- productName = RunnerTests;
- productReference = 331C8081294A63A400263BE5 /* RunnerTests.xctest */;
- productType = "com.apple.product-type.bundle.unit-test";
- };
97C146ED1CF9000F007C117D /* Runner */ = {
isa = PBXNativeTarget;
buildConfigurationList = 97C147051CF9000F007C117D /* Build configuration list for PBXNativeTarget "Runner" */;
buildPhases = (
80DABC9846A6DDC8F0D21E5C /* [CP] Check Pods Manifest.lock */,
9740EEB61CF901F6004384FC /* Run Script */,
+ A7B8C9D0E1F2A3B4C5D6E7F8 /* SwiftLint */,
97C146EA1CF9000F007C117D /* Sources */,
97C146EB1CF9000F007C117D /* Frameworks */,
97C146EC1CF9000F007C117D /* Resources */,
@@ -197,10 +169,6 @@
LastUpgradeCheck = 1510;
ORGANIZATIONNAME = "";
TargetAttributes = {
- 331C8080294A63A400263BE5 = {
- CreatedOnToolsVersion = 14.0;
- TestTargetID = 97C146ED1CF9000F007C117D;
- };
97C146ED1CF9000F007C117D = {
CreatedOnToolsVersion = 7.3.1;
LastSwiftMigration = 1100;
@@ -224,19 +192,11 @@
projectRoot = "";
targets = (
97C146ED1CF9000F007C117D /* Runner */,
- 331C8080294A63A400263BE5 /* RunnerTests */,
);
};
/* End PBXProject section */
/* Begin PBXResourcesBuildPhase section */
- 331C807F294A63A400263BE5 /* Resources */ = {
- isa = PBXResourcesBuildPhase;
- buildActionMask = 2147483647;
- files = (
- );
- runOnlyForDeploymentPostprocessing = 0;
- };
97C146EC1CF9000F007C117D /* Resources */ = {
isa = PBXResourcesBuildPhase;
buildActionMask = 2147483647;
@@ -319,18 +279,26 @@
);
runOnlyForDeploymentPostprocessing = 0;
shellPath = /bin/sh;
- shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" build";
+ shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" build\n";
};
-/* End PBXShellScriptBuildPhase section */
-
-/* Begin PBXSourcesBuildPhase section */
- 331C807D294A63A400263BE5 /* Sources */ = {
- isa = PBXSourcesBuildPhase;
+ A7B8C9D0E1F2A3B4C5D6E7F8 /* SwiftLint */ = {
+ isa = PBXShellScriptBuildPhase;
buildActionMask = 2147483647;
files = (
);
+ inputPaths = (
+ );
+ name = SwiftLint;
+ outputPaths = (
+ );
runOnlyForDeploymentPostprocessing = 0;
+ shellPath = /bin/sh;
+ shellScript = "if [[ \"$(uname -m)\" == arm64 ]]; then\n export PATH=\"/opt/homebrew/bin:$PATH\"\nfi\nif ! command -v swiftlint &>/dev/null; then\n echo \"warning: swiftlint not found. Install with: brew install swiftlint\"\n exit 0\nfi\nREPO_ROOT=\"${SRCROOT}/../../..\"\ncd \"$REPO_ROOT\" && swiftlint lint --fix && swiftlint lint --config \"$REPO_ROOT/.swiftlint.yml\"\n";
+ showEnvVarsInLog = 0;
};
+/* End PBXShellScriptBuildPhase section */
+
+/* Begin PBXSourcesBuildPhase section */
97C146EA1CF9000F007C117D /* Sources */ = {
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
@@ -342,14 +310,6 @@
};
/* End PBXSourcesBuildPhase section */
-/* Begin PBXTargetDependency section */
- 331C8086294A63A400263BE5 /* PBXTargetDependency */ = {
- isa = PBXTargetDependency;
- target = 97C146ED1CF9000F007C117D /* Runner */;
- targetProxy = 331C8085294A63A400263BE5 /* PBXContainerItemProxy */;
- };
-/* End PBXTargetDependency section */
-
/* Begin PBXVariantGroup section */
97C146FA1CF9000F007C117D /* Main.storyboard */ = {
isa = PBXVariantGroup;
@@ -394,6 +354,7 @@
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
+ CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
@@ -402,6 +363,7 @@
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
+ DEVELOPMENT_TEAM = "$(DEVELOPMENT_TEAM)";
ENABLE_NS_ASSERTIONS = NO;
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_USER_SCRIPT_SANDBOXING = NO;
@@ -417,6 +379,7 @@
IPHONEOS_DEPLOYMENT_TARGET = 15.5;
MTL_ENABLE_DEBUG_INFO = NO;
SDKROOT = iphoneos;
+ STRING_CATALOG_GENERATE_SYMBOLS = YES;
SUPPORTED_PLATFORMS = iphoneos;
TARGETED_DEVICE_FAMILY = "1,2";
VALIDATE_PRODUCT = YES;
@@ -430,7 +393,6 @@
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
CLANG_ENABLE_MODULES = YES;
CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)";
- DEVELOPMENT_TEAM = "$(DEVELOPMENT_TEAM)";
ENABLE_BITCODE = NO;
INFOPLIST_FILE = Runner/Info.plist;
IPHONEOS_DEPLOYMENT_TARGET = 15.5;
@@ -446,53 +408,6 @@
};
name = Profile;
};
- 331C8088294A63A400263BE5 /* Debug */ = {
- isa = XCBuildConfiguration;
- buildSettings = {
- BUNDLE_LOADER = "$(TEST_HOST)";
- CODE_SIGN_STYLE = Automatic;
- CURRENT_PROJECT_VERSION = 1;
- GENERATE_INFOPLIST_FILE = YES;
- MARKETING_VERSION = 1.0;
- PRODUCT_BUNDLE_IDENTIFIER = com.google.ml.kit.flutter.example.RunnerTests;
- PRODUCT_NAME = "$(TARGET_NAME)";
- SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG;
- SWIFT_OPTIMIZATION_LEVEL = "-Onone";
- SWIFT_VERSION = 5.0;
- TEST_HOST = "$(BUILT_PRODUCTS_DIR)/Runner.app/$(BUNDLE_EXECUTABLE_FOLDER_PATH)/Runner";
- };
- name = Debug;
- };
- 331C8089294A63A400263BE5 /* Release */ = {
- isa = XCBuildConfiguration;
- buildSettings = {
- BUNDLE_LOADER = "$(TEST_HOST)";
- CODE_SIGN_STYLE = Automatic;
- CURRENT_PROJECT_VERSION = 1;
- GENERATE_INFOPLIST_FILE = YES;
- MARKETING_VERSION = 1.0;
- PRODUCT_BUNDLE_IDENTIFIER = com.google.ml.kit.flutter.example.RunnerTests;
- PRODUCT_NAME = "$(TARGET_NAME)";
- SWIFT_VERSION = 5.0;
- TEST_HOST = "$(BUILT_PRODUCTS_DIR)/Runner.app/$(BUNDLE_EXECUTABLE_FOLDER_PATH)/Runner";
- };
- name = Release;
- };
- 331C808A294A63A400263BE5 /* Profile */ = {
- isa = XCBuildConfiguration;
- buildSettings = {
- BUNDLE_LOADER = "$(TEST_HOST)";
- CODE_SIGN_STYLE = Automatic;
- CURRENT_PROJECT_VERSION = 1;
- GENERATE_INFOPLIST_FILE = YES;
- MARKETING_VERSION = 1.0;
- PRODUCT_BUNDLE_IDENTIFIER = com.google.ml.kit.flutter.example.RunnerTests;
- PRODUCT_NAME = "$(TARGET_NAME)";
- SWIFT_VERSION = 5.0;
- TEST_HOST = "$(BUILT_PRODUCTS_DIR)/Runner.app/$(BUNDLE_EXECUTABLE_FOLDER_PATH)/Runner";
- };
- name = Profile;
- };
97C147031CF9000F007C117D /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
@@ -517,6 +432,7 @@
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
+ CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
@@ -525,6 +441,7 @@
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = dwarf;
+ DEVELOPMENT_TEAM = "$(DEVELOPMENT_TEAM)";
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_TESTABILITY = YES;
ENABLE_USER_SCRIPT_SANDBOXING = NO;
@@ -547,6 +464,7 @@
MTL_ENABLE_DEBUG_INFO = YES;
ONLY_ACTIVE_ARCH = YES;
SDKROOT = iphoneos;
+ STRING_CATALOG_GENERATE_SYMBOLS = YES;
TARGETED_DEVICE_FAMILY = "1,2";
};
name = Debug;
@@ -575,6 +493,7 @@
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
+ CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
@@ -583,6 +502,7 @@
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
+ DEVELOPMENT_TEAM = "$(DEVELOPMENT_TEAM)";
ENABLE_NS_ASSERTIONS = NO;
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_USER_SCRIPT_SANDBOXING = NO;
@@ -598,6 +518,7 @@
IPHONEOS_DEPLOYMENT_TARGET = 15.5;
MTL_ENABLE_DEBUG_INFO = NO;
SDKROOT = iphoneos;
+ STRING_CATALOG_GENERATE_SYMBOLS = YES;
SUPPORTED_PLATFORMS = iphoneos;
SWIFT_COMPILATION_MODE = wholemodule;
SWIFT_OPTIMIZATION_LEVEL = "-O";
@@ -613,7 +534,6 @@
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
CLANG_ENABLE_MODULES = YES;
CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)";
- DEVELOPMENT_TEAM = "$(DEVELOPMENT_TEAM)";
ENABLE_BITCODE = NO;
INFOPLIST_FILE = Runner/Info.plist;
IPHONEOS_DEPLOYMENT_TARGET = 15.5;
@@ -637,7 +557,6 @@
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
CLANG_ENABLE_MODULES = YES;
CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)";
- DEVELOPMENT_TEAM = "$(DEVELOPMENT_TEAM)";
ENABLE_BITCODE = NO;
INFOPLIST_FILE = Runner/Info.plist;
IPHONEOS_DEPLOYMENT_TARGET = 15.5;
@@ -656,16 +575,6 @@
/* End XCBuildConfiguration section */
/* Begin XCConfigurationList section */
- 331C8087294A63A400263BE5 /* Build configuration list for PBXNativeTarget "RunnerTests" */ = {
- isa = XCConfigurationList;
- buildConfigurations = (
- 331C8088294A63A400263BE5 /* Debug */,
- 331C8089294A63A400263BE5 /* Release */,
- 331C808A294A63A400263BE5 /* Profile */,
- );
- defaultConfigurationIsVisible = 0;
- defaultConfigurationName = Release;
- };
97C146E91CF9000F007C117D /* Build configuration list for PBXProject "Runner" */ = {
isa = XCConfigurationList;
buildConfigurations = (
diff --git a/packages/example/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme b/packages/example/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme
index c3fedb29..95d6e55f 100644
--- a/packages/example/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme
+++ b/packages/example/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme
@@ -1,7 +1,7 @@
+ version = "1.7">
diff --git a/packages/google_mlkit_barcode_scanning/ios/Assets/.gitkeep b/packages/google_mlkit_barcode_scanning/ios/Assets/.gitkeep
deleted file mode 100644
index e69de29b..00000000
diff --git a/packages/google_mlkit_barcode_scanning/ios/Classes/GoogleMlKitBarcodeScanningPlugin.h b/packages/google_mlkit_barcode_scanning/ios/Classes/GoogleMlKitBarcodeScanningPlugin.h
deleted file mode 100644
index 3b42608f..00000000
--- a/packages/google_mlkit_barcode_scanning/ios/Classes/GoogleMlKitBarcodeScanningPlugin.h
+++ /dev/null
@@ -1,4 +0,0 @@
-#import
-
-@interface GoogleMlKitBarcodeScanningPlugin : NSObject
-@end
diff --git a/packages/google_mlkit_barcode_scanning/ios/Classes/GoogleMlKitBarcodeScanningPlugin.m b/packages/google_mlkit_barcode_scanning/ios/Classes/GoogleMlKitBarcodeScanningPlugin.m
deleted file mode 100644
index 118493a9..00000000
--- a/packages/google_mlkit_barcode_scanning/ios/Classes/GoogleMlKitBarcodeScanningPlugin.m
+++ /dev/null
@@ -1,271 +0,0 @@
-#import
-#import "GoogleMlKitBarcodeScanningPlugin.h"
-#import
-#import
-
-#define channelName @"google_mlkit_barcode_scanning"
-#define startBarcodeScanner @"vision#startBarcodeScanner"
-#define closeBarcodeScanner @"vision#closeBarcodeScanner"
-
-@implementation GoogleMlKitBarcodeScanningPlugin {
- NSMutableDictionary *instances;
-}
-
-+ (void)registerWithRegistrar:(NSObject*)registrar {
- FlutterMethodChannel* channel = [FlutterMethodChannel
- methodChannelWithName:channelName
- binaryMessenger:[registrar messenger]];
- GoogleMlKitBarcodeScanningPlugin* instance = [[GoogleMlKitBarcodeScanningPlugin alloc] init];
- [registrar addMethodCallDelegate:instance channel:channel];
-}
-
-- (id)init {
- self = [super init];
- if (self)
- instances = [NSMutableDictionary dictionary];
- return self;
-}
-
-- (void)handleMethodCall:(FlutterMethodCall *)call result:(FlutterResult)result {
- if ([call.method isEqualToString:startBarcodeScanner]) {
- [self handleDetection:call result:result];
- } else if ([call.method isEqualToString:closeBarcodeScanner]) {
- NSString *uid = call.arguments[@"id"];
- [instances removeObjectForKey:uid];
- result(NULL);
- } else {
- result(FlutterMethodNotImplemented);
- }
-}
-
-- (MLKBarcodeScanner*)initialize:(FlutterMethodCall *)call {
- NSArray *array = call.arguments[@"formats"];
- NSInteger formats = 0;
- for (NSNumber *num in array) {
- formats += [num intValue];
- }
- MLKBarcodeScannerOptions *options = [[MLKBarcodeScannerOptions alloc] initWithFormats: formats];
- return [MLKBarcodeScanner barcodeScannerWithOptions:options];
-}
-
-- (void)handleDetection:(FlutterMethodCall *)call result:(FlutterResult)result {
- MLKVisionImage *image = [MLKVisionImage visionImageFromData:call.arguments[@"imageData"]];
-
- NSString *uid = call.arguments[@"id"];
- MLKBarcodeScanner *barcodeScanner = [instances objectForKey:uid];
- if (barcodeScanner == NULL) {
- barcodeScanner = [self initialize:call];
- instances[uid] = barcodeScanner;
- }
-
- [barcodeScanner processImage:image
- completion:^(NSArray *barcodes,
- NSError *error) {
- if (error) {
- result(getFlutterError(error));
- return;
- } else if (!barcodes) {
- result(@[]);
- return;
- }
-
- NSMutableArray *array = [NSMutableArray array];
- for (MLKBarcode *barcode in barcodes) {
- [array addObject:[self barcodeToDictionary:barcode]];
- }
- result(array);
- }];
-}
-
-- (NSDictionary *)barcodeToDictionary:(MLKBarcode *)barcode {
- NSMutableDictionary *dictionary = [NSMutableDictionary new];
- [dictionary addEntriesFromDictionary:@{
- @"type" : @(barcode.valueType) ?: [NSNull null],
- @"format" : @(barcode.format) ?: [NSNull null],
- @"rawValue" : barcode.rawValue ?: [NSNull null],
- @"rawBytes" : barcode.rawData ?: [NSNull null],
- @"displayValue" : barcode.displayValue ?: [NSNull null],
- @"rect" : @{
- @"left" : @(barcode.frame.origin.x),
- @"top" : @(barcode.frame.origin.y),
- @"right" : @(barcode.frame.origin.x + barcode.frame.size.width),
- @"bottom" : @(barcode.frame.origin.y + barcode.frame.size.height)
- }
- }];
-
- NSMutableArray *cornerPoints = [NSMutableArray array];
- for (NSValue * point in barcode.cornerPoints) {
- CGPoint newPoint = [point CGPointValue];
- [cornerPoints addObject: @{
- @"x": @(newPoint.x),
- @"y": @(newPoint.y),
- }];
- }
- dictionary[@"points"] = cornerPoints;
-
- switch (barcode.valueType) {
- case MLKBarcodeValueTypeUnknown:
- case MLKBarcodeValueTypeISBN:
- case MLKBarcodeValueTypeProduct:
- case MLKBarcodeValueTypeText:
- break;
- case MLKBarcodeValueTypeWiFi:
- [dictionary addEntriesFromDictionary:[self wifiToDictionary:barcode.wifi]];
- break;
- case MLKBarcodeValueTypeURL:
- [dictionary addEntriesFromDictionary:[self urlToDictionary:barcode.URL]];
- break;
- case MLKBarcodeValueTypeEmail:
- [dictionary addEntriesFromDictionary:[self emailToDictionary:barcode.email]];
- break;
- case MLKBarcodeValueTypePhone:
- [dictionary addEntriesFromDictionary:[self phoneToDictionary:barcode.phone]];
- break;
- case MLKBarcodeValueTypeSMS:
- [dictionary addEntriesFromDictionary:[self smsToDictionary:barcode.sms]];
- break;
- case MLKBarcodeValueTypeGeographicCoordinates:
- [dictionary addEntriesFromDictionary:[self geoPointToDictionary:barcode.geoPoint]];
- break;
- case MLKBarcodeValueTypeDriversLicense:
- [dictionary addEntriesFromDictionary:[self driverLicenseToDictionary:barcode.driverLicense]];
- break;
- case MLKBarcodeValueTypeContactInfo:
- [dictionary addEntriesFromDictionary:[self contactInfoToDictionary:barcode.contactInfo]];
- break;
- case MLKBarcodeValueTypeCalendarEvent:
- [dictionary addEntriesFromDictionary:[self calendarEventToDictionary:barcode.calendarEvent]];
- break;
- }
-
- return dictionary;
-}
-
-- (NSDictionary *)wifiToDictionary:(MLKBarcodeWiFi *)wifi {
- return @{
- @"ssid" : wifi.ssid ?: [NSNull null],
- @"password" : wifi.password ?: [NSNull null],
- @"encryption" : @(wifi.type)
- };
-}
-
-- (NSDictionary *)urlToDictionary:(MLKBarcodeURLBookmark *)url {
- return @{
- @"title" : url.title ?: [NSNull null],
- @"url" : url.url ?: [NSNull null]
- };
-}
-
-- (NSDictionary *)emailToDictionary:(MLKBarcodeEmail *)email {
- return @{
- @"address" : email.address ?: [NSNull null],
- @"body" : email.body ?: [NSNull null],
- @"subject" : email.subject ?: [NSNull null],
- @"emailType" : @(email.type)
- };
-}
-
-- (NSDictionary *)phoneToDictionary:(MLKBarcodePhone *)phone {
- return @{
- @"number" : phone.number ?: [NSNull null],
- @"phoneType" : @(phone.type)
- };
-}
-
-- (NSDictionary *)smsToDictionary:(MLKBarcodeSMS *)sms {
- return @{
- @"number" : sms.phoneNumber ?: [NSNull null],
- @"message" : sms.message ?: [NSNull null]
- };
-}
-
-- (NSDictionary *)geoPointToDictionary:(MLKBarcodeGeoPoint *)geo {
- return @{
- @"longitude" : @(geo.longitude),
- @"latitude" : @(geo.latitude)
- };
-}
-
-- (NSDictionary *)driverLicenseToDictionary:(MLKBarcodeDriverLicense *)license {
- return @{
- @"firstName" : license.firstName ?: [NSNull null],
- @"middleName" : license.middleName ?: [NSNull null],
- @"lastName" : license.lastName ?: [NSNull null],
- @"gender" : license.gender ?: [NSNull null],
- @"addressCity" : license.addressCity ?: [NSNull null],
- @"addressStreet" : license.addressStreet ?: [NSNull null],
- @"addressState" : license.addressState ?: [NSNull null],
- @"addressZip" : license.addressZip ?: [NSNull null],
- @"birthDate" : license.birthDate ?: [NSNull null],
- @"documentType" : license.documentType ?: [NSNull null],
- @"licenseNumber" : license.licenseNumber ?: [NSNull null],
- @"expiryDate" : license.expiryDate ?: [NSNull null],
- @"issueDate" : license.issuingDate ?: [NSNull null],
- @"country" : license.issuingCountry ?: [NSNull null]
- };
-}
-
-- (NSDictionary *)contactInfoToDictionary:(MLKBarcodeContactInfo *)contact {
- NSMutableArray *addresses = [NSMutableArray array];
- [contact.addresses enumerateObjectsUsingBlock:^(MLKBarcodeAddress *_Nonnull address,
- NSUInteger idx, BOOL *_Nonnull stop) {
- NSMutableArray *addressLines = [NSMutableArray array];
- [address.addressLines enumerateObjectsUsingBlock:^(NSString *_Nonnull addressLine,
- NSUInteger idx, BOOL *_Nonnull stop) {
- [addressLines addObject:addressLine];
- }];
- [addresses addObject:@{@"addressLines" : addressLines, @"addressType" : @(address.type)}];
- }];
-
- NSMutableArray *emails = [NSMutableArray array];
- [contact.emails enumerateObjectsUsingBlock:^(MLKBarcodeEmail *_Nonnull email,
- NSUInteger idx, BOOL *_Nonnull stop) {
- [emails addObject:@{
- @"address" : email.address ?: [NSNull null],
- @"body" : email.body ?: [NSNull null],
- @"subject" : email.subject ?: [NSNull null],
- @"emailType" : @(email.type)
- }];
- }];
-
- NSMutableArray *phones = [NSMutableArray array];
- [contact.phones enumerateObjectsUsingBlock:^(MLKBarcodePhone *_Nonnull phone,
- NSUInteger idx, BOOL *_Nonnull stop) {
- [phones addObject:@{@"number" : phone.number ?: [NSNull null], @"phoneType" : @(phone.type)}];
- }];
-
- NSMutableArray *urls = [NSMutableArray array];
- [contact.urls
- enumerateObjectsUsingBlock:^(NSString *_Nonnull url, NSUInteger idx, BOOL *_Nonnull stop) {
- [urls addObject:url];
- }];
- return @{
- @"addresses" : addresses,
- @"emails" : emails,
- @"phones" : phones,
- @"urls" : urls,
- @"formattedName" : contact.name.formattedName ?: [NSNull null],
- @"firstName" : contact.name.first ?: [NSNull null],
- @"lastName" : contact.name.last ?: [NSNull null],
- @"middleName" : contact.name.middle ?: [NSNull null],
- @"prefix" : contact.name.prefix ?: [NSNull null],
- @"pronunciation" : contact.name.pronunciation ?: [NSNull null],
- @"suffix" : contact.name.suffix ?: [NSNull null],
- @"jobTitle" : contact.jobTitle ?: [NSNull null],
- @"organization" : contact.organization ?: [NSNull null]
- };
-}
-
-- (NSDictionary *)calendarEventToDictionary:(MLKBarcodeCalendarEvent *)calendar {
- return @{
- @"description" : calendar.eventDescription ?: [NSNull null],
- @"location" : calendar.location ?: [NSNull null],
- @"organizer" : calendar.organizer ?: [NSNull null],
- @"status" : calendar.status ?: [NSNull null],
- @"summary" : calendar.summary ?: [NSNull null],
- @"start" : @(calendar.start.timeIntervalSince1970),
- @"end" : @(calendar.end.timeIntervalSince1970)
- };
-}
-
-@end
diff --git a/packages/google_mlkit_barcode_scanning/ios/Classes/GoogleMlKitBarcodeScanningPlugin.swift b/packages/google_mlkit_barcode_scanning/ios/Classes/GoogleMlKitBarcodeScanningPlugin.swift
new file mode 100644
index 00000000..a88e5f96
--- /dev/null
+++ b/packages/google_mlkit_barcode_scanning/ios/Classes/GoogleMlKitBarcodeScanningPlugin.swift
@@ -0,0 +1,244 @@
+import Flutter
+import MLKitVision
+import MLKitBarcodeScanning
+import google_mlkit_commons
+
+@objc
+public class GoogleMlKitBarcodeScanningPlugin: NSObject, FlutterPlugin {
+ private var instances: [String: BarcodeScanner] = [:]
+
+ public static func register(with registrar: FlutterPluginRegistrar) {
+ let channel = FlutterMethodChannel(
+ name: "google_mlkit_barcode_scanning",
+ binaryMessenger: registrar.messenger()
+ )
+ let instance = GoogleMlKitBarcodeScanningPlugin()
+ registrar.addMethodCallDelegate(instance, channel: channel)
+ }
+
+ public func handle(_ call: FlutterMethodCall, result: @escaping FlutterResult) {
+ switch call.method {
+ case "vision#startBarcodeScanner":
+ handleDetection(call: call, result: result)
+ case "vision#closeBarcodeScanner":
+ if let args = call.arguments as? [String: Any], let uid = args["id"] as? String {
+ instances.removeValue(forKey: uid)
+ }
+ result(nil)
+ default:
+ result(FlutterMethodNotImplemented)
+ }
+ }
+
+ private func initialize(call: FlutterMethodCall) -> BarcodeScanner? {
+ guard let args = call.arguments as? [String: Any],
+ let array = args["formats"] as? [NSNumber] else { return nil }
+ var formats: Int = 0
+ for num in array {
+ formats += num.intValue
+ }
+ let options = BarcodeScannerOptions(formats: BarcodeFormat(rawValue: formats))
+ return BarcodeScanner.barcodeScanner(options: options)
+ }
+
+ private func handleDetection(call: FlutterMethodCall, result: @escaping FlutterResult) {
+ guard let args = call.arguments as? [String: Any],
+ let imageData = args["imageData"] as? [String: Any],
+ let uid = args["id"] as? String else {
+ result(FlutterError(code: "invalid_args", message: "Missing arguments", details: nil))
+ return
+ }
+ guard let image = VisionImage.visionImage(from: imageData) else {
+ result(FlutterError(code: "invalid_image", message: "Invalid or missing image data", details: nil))
+ return
+ }
+
+ let barcodeScanner: BarcodeScanner
+ if let existing = instances[uid] {
+ barcodeScanner = existing
+ } else {
+ guard let newScanner = initialize(call: call) else {
+ result(FlutterError(code: "invalid_args", message: "Invalid options", details: nil))
+ return
+ }
+ barcodeScanner = newScanner
+ instances[uid] = barcodeScanner
+ }
+
+ barcodeScanner.process(image) { barcodes, error in
+ if let error = error as NSError? {
+ result(FlutterError(code: "Error \(error.code)", message: error.domain, details: error.localizedDescription))
+ return
+ }
+ guard let barcodes = barcodes else {
+ result([])
+ return
+ }
+ result(barcodes.map { self.barcodeToDictionary($0) })
+ }
+ }
+
+ private func barcodeToDictionary(_ barcode: Barcode) -> [String: Any] {
+ var dictionary: [String: Any] = [
+ "type": barcode.valueType.rawValue,
+ "format": barcode.format.rawValue,
+ "rect": [
+ "left": Double(barcode.frame.origin.x),
+ "top": Double(barcode.frame.origin.y),
+ "right": Double(barcode.frame.origin.x + barcode.frame.size.width),
+ "bottom": Double(barcode.frame.origin.y + barcode.frame.size.height)
+ ] as [String: Double]
+ ]
+ dictionary["rawValue"] = barcode.rawValue ?? NSNull()
+ dictionary["rawBytes"] = barcode.rawData ?? NSNull()
+ dictionary["displayValue"] = barcode.displayValue ?? NSNull()
+ let points = (barcode.cornerPoints ?? []).map { point -> [String: Double] in
+ let cgPoint = point.cgPointValue
+ return ["x": Double(cgPoint.x), "y": Double(cgPoint.y)]
+ }
+ dictionary["points"] = points
+
+ switch barcode.valueType {
+ case .wiFi:
+ if let wifi = barcode.wifi { dictionary.merge(wifiToDictionary(wifi)) { _, new in new } }
+ case .URL:
+ if let url = barcode.url { dictionary.merge(urlToDictionary(url)) { _, new in new } }
+ case .email:
+ if let email = barcode.email { dictionary.merge(emailToDictionary(email)) { _, new in new } }
+ case .phone:
+ if let phone = barcode.phone { dictionary.merge(phoneToDictionary(phone)) { _, new in new } }
+ case .SMS:
+ if let sms = barcode.sms { dictionary.merge(smsToDictionary(sms)) { _, new in new } }
+ case .geographicCoordinates:
+ if let geo = barcode.geoPoint { dictionary.merge(geoPointToDictionary(geo)) { _, new in new } }
+ case .driversLicense:
+ if let license = barcode.driverLicense { dictionary.merge(driverLicenseToDictionary(license)) { _, new in new } }
+ case .contactInfo:
+ if let contact = barcode.contactInfo {
+ dictionary.merge(contactInfoToDictionary(contact)) { _, new in new }
+ }
+ case .calendarEvent:
+ if let calendar = barcode.calendarEvent {
+ dictionary.merge(calendarEventToDictionary(calendar)) { _, new in new }
+ }
+ default:
+ break
+ }
+ return dictionary
+ }
+
+ private func wifiToDictionary(_ wifi: BarcodeWifi) -> [String: Any] {
+ [
+ "ssid": wifi.ssid ?? NSNull(),
+ "password": wifi.password ?? NSNull(),
+ "encryption": wifi.type.rawValue
+ ]
+ }
+
+ private func urlToDictionary(_ url: BarcodeURLBookmark) -> [String: Any] {
+ [
+ "title": url.title ?? NSNull(),
+ "url": url.url ?? NSNull()
+ ]
+ }
+
+ private func emailToDictionary(_ email: BarcodeEmail) -> [String: Any] {
+ [
+ "address": email.address ?? NSNull(),
+ "body": email.body ?? NSNull(),
+ "subject": email.subject ?? NSNull(),
+ "emailType": email.type.rawValue
+ ]
+ }
+
+ private func phoneToDictionary(_ phone: BarcodePhone) -> [String: Any] {
+ [
+ "number": phone.number ?? NSNull(),
+ "phoneType": phone.type.rawValue
+ ]
+ }
+
+ private func smsToDictionary(_ sms: BarcodeSMS) -> [String: Any] {
+ [
+ "number": sms.phoneNumber ?? NSNull(),
+ "message": sms.message ?? NSNull()
+ ]
+ }
+
+ private func geoPointToDictionary(_ geo: BarcodeGeoPoint) -> [String: Any] {
+ [
+ "longitude": geo.longitude,
+ "latitude": geo.latitude
+ ]
+ }
+
+ private func driverLicenseToDictionary(_ license: BarcodeDriverLicense) -> [String: Any] {
+ [
+ "firstName": license.firstName ?? NSNull(),
+ "middleName": license.middleName ?? NSNull(),
+ "lastName": license.lastName ?? NSNull(),
+ "gender": license.gender ?? NSNull(),
+ "addressCity": license.addressCity ?? NSNull(),
+ "addressStreet": license.addressStreet ?? NSNull(),
+ "addressState": license.addressState ?? NSNull(),
+ "addressZip": license.addressZip ?? NSNull(),
+ "birthDate": license.birthDate ?? NSNull(),
+ "documentType": license.documentType ?? NSNull(),
+ "licenseNumber": license.licenseNumber ?? NSNull(),
+ "expiryDate": license.expiryDate ?? NSNull(),
+ "issueDate": license.issuingDate ?? NSNull(),
+ "country": license.issuingCountry ?? NSNull()
+ ]
+ }
+
+ private func contactInfoToDictionary(_ contact: BarcodeContactInfo) -> [String: Any] {
+ let addresses: [[String: Any]] = (contact.addresses ?? []).map { address in
+ [
+ "addressLines": address.addressLines ?? NSNull(),
+ "addressType": address.type.rawValue
+ ] as [String: Any]
+ }
+ let emails = (contact.emails ?? []).map { (email: BarcodeEmail) -> [String: Any] in
+ [
+ "address": email.address ?? NSNull(),
+ "body": email.body ?? NSNull(),
+ "subject": email.subject ?? NSNull(),
+ "emailType": email.type.rawValue
+ ] as [String: Any]
+ }
+ let phones = (contact.phones ?? []).map { (phone: BarcodePhone) -> [String: Any] in
+ [
+ "number": phone.number ?? NSNull(),
+ "phoneType": phone.type.rawValue
+ ] as [String: Any]
+ }
+ let name = contact.name
+ return [
+ "addresses": addresses,
+ "emails": emails,
+ "phones": phones,
+ "urls": contact.urls ?? NSNull(),
+ "formattedName": name?.formattedName ?? NSNull(),
+ "firstName": name?.first ?? NSNull(),
+ "lastName": name?.last ?? NSNull(),
+ "middleName": name?.middle ?? NSNull(),
+ "prefix": name?.prefix ?? NSNull(),
+ "pronunciation": name?.pronunciation ?? NSNull(),
+ "suffix": name?.suffix ?? NSNull(),
+ "jobTitle": contact.jobTitle ?? NSNull(),
+ "organization": contact.organization ?? NSNull()
+ ]
+ }
+
+ private func calendarEventToDictionary(_ calendar: BarcodeCalendarEvent) -> [String: Any] {
+ [
+ "description": calendar.eventDescription ?? NSNull(),
+ "location": calendar.location ?? NSNull(),
+ "organizer": calendar.organizer ?? NSNull(),
+ "status": calendar.status ?? NSNull(),
+ "summary": calendar.summary ?? NSNull(),
+ "start": calendar.start.map { NSNumber(value: $0.timeIntervalSince1970) } ?? NSNull(),
+ "end": calendar.end.map { NSNumber(value: $0.timeIntervalSince1970) } ?? NSNull()
+ ]
+ }
+}
diff --git a/packages/google_mlkit_barcode_scanning/ios/google_mlkit_barcode_scanning.podspec b/packages/google_mlkit_barcode_scanning/ios/google_mlkit_barcode_scanning.podspec
index 0fa21e68..c11671e8 100644
--- a/packages/google_mlkit_barcode_scanning/ios/google_mlkit_barcode_scanning.podspec
+++ b/packages/google_mlkit_barcode_scanning/ios/google_mlkit_barcode_scanning.podspec
@@ -10,10 +10,9 @@ Pod::Spec.new do |s|
s.description = pubspec['description']
s.homepage = pubspec['homepage']
s.license = { :file => '../LICENSE' }
- s.authors = 'Multiple Authors'
+ s.authors = 'flutter-ml.dev'
s.source = { :path => '.' }
- s.source_files = 'Classes/**/*'
- s.public_header_files = 'Classes/**/*.h'
+ s.source_files = 'Classes/**/*.swift'
s.dependency 'Flutter'
s.dependency 'GoogleMLKit/BarcodeScanning', '~> 9.0.0'
s.dependency 'google_mlkit_commons'
diff --git a/packages/google_mlkit_commons/ios/Assets/.gitkeep b/packages/google_mlkit_commons/ios/Assets/.gitkeep
deleted file mode 100644
index e69de29b..00000000
diff --git a/packages/google_mlkit_commons/ios/Classes/GenericModelManager.h b/packages/google_mlkit_commons/ios/Classes/GenericModelManager.h
deleted file mode 100644
index 05d77402..00000000
--- a/packages/google_mlkit_commons/ios/Classes/GenericModelManager.h
+++ /dev/null
@@ -1,6 +0,0 @@
-#import
-#import
-
-@interface GenericModelManager : NSObject
-- (void)manageModel:(MLKRemoteModel*)model call:(FlutterMethodCall*)call result:(FlutterResult)result;
-@end
diff --git a/packages/google_mlkit_commons/ios/Classes/GenericModelManager.m b/packages/google_mlkit_commons/ios/Classes/GenericModelManager.m
deleted file mode 100644
index 97928e28..00000000
--- a/packages/google_mlkit_commons/ios/Classes/GenericModelManager.m
+++ /dev/null
@@ -1,50 +0,0 @@
-#import "GenericModelManager.h"
-
-@implementation GenericModelManager {
- FlutterResult downloadInkResult;
-}
-
-- (void)manageModel:(MLKRemoteModel*)model call:(FlutterMethodCall*)call result:(FlutterResult)result {
- MLKModelManager *modelManager = [MLKModelManager modelManager];
- NSString *task = call.arguments[@"task"];
- if ([task isEqualToString:@"download"]) {
- MLKModelDownloadConditions *downloadConditions = [[MLKModelDownloadConditions alloc]
- initWithAllowsCellularAccess:YES
- allowsBackgroundDownloading:YES];
- [modelManager downloadModel:model conditions:downloadConditions];
- [[NSNotificationCenter defaultCenter] addObserver:self
- selector:@selector(receiveTestNotification:)
- name:MLKModelDownloadDidSucceedNotification
- object:nil];
- [[NSNotificationCenter defaultCenter] addObserver:self
- selector:@selector(receiveTestNotification:)
- name:MLKModelDownloadDidFailNotification
- object:nil];
- downloadInkResult = result;
- } else if ([task isEqualToString:@"delete"]) {
- [modelManager deleteDownloadedModel:model completion:^(NSError * _Nullable error) {
- if (error == NULL) {
- result(@"success");
- } else {
- result(@"error");
- }
- }];
- } else if ([task isEqualToString:@"check"]) {
- BOOL isModelDownloaded = [modelManager isModelDownloaded:model];
- result(@(isModelDownloaded));
- } else {
- result(FlutterMethodNotImplemented);
- }
-}
-
-- (void) receiveTestNotification:(NSNotification *) notification {
- if ([notification.name isEqualToString:MLKModelDownloadDidSucceedNotification]) {
- downloadInkResult(@"success");
- [[NSNotificationCenter defaultCenter] removeObserver:self];
- } else if ([notification.name isEqualToString:MLKModelDownloadDidFailNotification]) {
- downloadInkResult(@"error");
- [[NSNotificationCenter defaultCenter] removeObserver:self];
- }
-}
-
-@end
diff --git a/packages/google_mlkit_commons/ios/Classes/GenericModelManager.swift b/packages/google_mlkit_commons/ios/Classes/GenericModelManager.swift
new file mode 100644
index 00000000..c0d6ce91
--- /dev/null
+++ b/packages/google_mlkit_commons/ios/Classes/GenericModelManager.swift
@@ -0,0 +1,139 @@
+import Flutter
+import MLKitCommon
+
+@objc
+public class GenericModelManager: NSObject {
+ /// Tracks observer tokens and callback per in-flight download so we remove observers and avoid overwriting callbacks.
+ private var pendingDownloads: [ObjectIdentifier: PendingDownload] = [:]
+ /// Observer tokens by key so we can always remove them in completeDownload,
+ /// even when pending was already consumed (e.g. duplicate notification).
+ private var observerTokensByKey: [ObjectIdentifier: (success: NSObjectProtocol, fail: NSObjectProtocol)] = [:]
+ private let pendingDownloadsLock = NSLock()
+
+ @objc(manageModel:call:result:)
+ public func manage(
+ model: RemoteModel,
+ call: FlutterMethodCall,
+ result: @escaping FlutterResult
+ ) {
+ let modelManager = ModelManager.modelManager()
+ guard let task = call.arguments as? [String: Any],
+ let taskName = task["task"] as? String else {
+ result(FlutterMethodNotImplemented)
+ return
+ }
+
+ switch taskName {
+ case "download":
+ if modelManager.isModelDownloaded(model) {
+ result("success")
+ return
+ }
+ let modelObject = model as AnyObject
+ let key = ObjectIdentifier(modelObject)
+ do {
+ pendingDownloadsLock.lock()
+ defer { pendingDownloadsLock.unlock() }
+ if pendingDownloads[key] != nil {
+ result(FlutterError(
+ code: "download_in_progress",
+ message: "A download for this model is already in progress",
+ details: nil
+ ))
+ return
+ }
+ let successToken = NotificationCenter.default.addObserver(
+ forName: Notification.Name.mlkitModelDownloadDidSucceed,
+ object: model,
+ queue: .main
+ ) { [weak self] _ in
+ self?.completeDownload(key: key, success: true)
+ }
+ let failToken = NotificationCenter.default.addObserver(
+ forName: Notification.Name.mlkitModelDownloadDidFail,
+ object: model,
+ queue: .main
+ ) { [weak self] _ in
+ self?.completeDownload(key: key, success: false)
+ }
+ pendingDownloads[key] = PendingDownload(
+ successToken: successToken,
+ failToken: failToken,
+ result: result
+ )
+ observerTokensByKey[key] = (success: successToken, fail: failToken)
+ }
+ // Call download after releasing the lock so a synchronous notification cannot deadlock
+ // (completeDownload needs the lock). pendingDownloads is already populated above.
+ let conditions = ModelDownloadConditions(
+ allowsCellularAccess: true,
+ allowsBackgroundDownloading: true
+ )
+ modelManager.download(model, conditions: conditions)
+
+ case "delete":
+ modelManager.deleteDownloadedModel(model) { error in
+ if error == nil {
+ result("success")
+ } else {
+ result("error")
+ }
+ }
+
+ case "check":
+ let isDownloaded = modelManager.isModelDownloaded(model)
+ result(isDownloaded)
+
+ default:
+ result(FlutterMethodNotImplemented)
+ }
+ }
+
+ /// Removes observers for this download and invokes the stored callback once. Called on main queue.
+ /// Always removes observer tokens when present so we don't leak them if both success and fail fire (race).
+ private func completeDownload(key: ObjectIdentifier, success: Bool) {
+ pendingDownloadsLock.lock()
+ let pending = pendingDownloads.removeValue(forKey: key)
+ let tokens = observerTokensByKey.removeValue(forKey: key)
+ pendingDownloadsLock.unlock()
+ if let tokens = tokens {
+ NotificationCenter.default.removeObserver(tokens.success)
+ NotificationCenter.default.removeObserver(tokens.fail)
+ }
+ if let pending = pending {
+ pending.result(success ? "success" : "error")
+ }
+ }
+
+ deinit {
+ pendingDownloadsLock.lock()
+ let copyPending = pendingDownloads
+ let copyTokens = observerTokensByKey
+ pendingDownloads.removeAll()
+ observerTokensByKey.removeAll()
+ pendingDownloadsLock.unlock()
+ for (_, tokens) in copyTokens {
+ NotificationCenter.default.removeObserver(tokens.success)
+ NotificationCenter.default.removeObserver(tokens.fail)
+ }
+ for pending in copyPending.values {
+ pending.result(FlutterError(
+ code: "cancelled",
+ message: "Model manager deallocated during download",
+ details: nil
+ ))
+ }
+ }
+}
+
+private final class PendingDownload {
+ let successToken: NSObjectProtocol
+ let failToken: NSObjectProtocol
+ let result: FlutterResult
+
+ init(successToken: NSObjectProtocol, failToken: NSObjectProtocol, result: @escaping FlutterResult) {
+ self.successToken = successToken
+ self.failToken = failToken
+ self.result = result
+ }
+}
diff --git a/packages/google_mlkit_commons/ios/Classes/GoogleMlKitCommonsPlugin.h b/packages/google_mlkit_commons/ios/Classes/GoogleMlKitCommonsPlugin.h
deleted file mode 100644
index cdae2a63..00000000
--- a/packages/google_mlkit_commons/ios/Classes/GoogleMlKitCommonsPlugin.h
+++ /dev/null
@@ -1,16 +0,0 @@
-#import
-#import
-#import "GenericModelManager.h"
-
-@interface GoogleMlKitCommonsPlugin : NSObject
-@end
-
-@interface MLKVisionImage(FlutterPlugin)
-+ (MLKVisionImage *)visionImageFromData:(NSDictionary *)imageData;
-@end
-
-static FlutterError *getFlutterError(NSError *error) {
- return [FlutterError errorWithCode:[NSString stringWithFormat:@"Error %d", (int)error.code]
- message:error.domain
- details:error.localizedDescription];
-}
diff --git a/packages/google_mlkit_commons/ios/Classes/GoogleMlKitCommonsPlugin.m b/packages/google_mlkit_commons/ios/Classes/GoogleMlKitCommonsPlugin.m
deleted file mode 100644
index 1eb6e73e..00000000
--- a/packages/google_mlkit_commons/ios/Classes/GoogleMlKitCommonsPlugin.m
+++ /dev/null
@@ -1,19 +0,0 @@
-#import "GoogleMlKitCommonsPlugin.h"
-
-#define channelName @"google_mlkit_commons"
-
-@implementation GoogleMlKitCommonsPlugin
-
-+ (void)registerWithRegistrar:(NSObject*)registrar {
- FlutterMethodChannel* channel = [FlutterMethodChannel
- methodChannelWithName:channelName
- binaryMessenger:[registrar messenger]];
- GoogleMlKitCommonsPlugin* instance = [[GoogleMlKitCommonsPlugin alloc] init];
- [registrar addMethodCallDelegate:instance channel:channel];
-}
-
-- (void)handleMethodCall:(FlutterMethodCall *)call result:(FlutterResult)result {
- result(FlutterMethodNotImplemented);
-}
-
-@end
diff --git a/packages/google_mlkit_commons/ios/Classes/GoogleMlKitCommonsPlugin.swift b/packages/google_mlkit_commons/ios/Classes/GoogleMlKitCommonsPlugin.swift
new file mode 100644
index 00000000..c91ba711
--- /dev/null
+++ b/packages/google_mlkit_commons/ios/Classes/GoogleMlKitCommonsPlugin.swift
@@ -0,0 +1,18 @@
+import Flutter
+import MLKitVision
+
+@objc
+public class GoogleMlKitCommonsPlugin: NSObject, FlutterPlugin {
+ public static func register(with registrar: FlutterPluginRegistrar) {
+ let channel = FlutterMethodChannel(
+ name: "google_mlkit_commons",
+ binaryMessenger: registrar.messenger()
+ )
+ let instance = GoogleMlKitCommonsPlugin()
+ registrar.addMethodCallDelegate(instance, channel: channel)
+ }
+
+ public func handle(_ call: FlutterMethodCall, result: @escaping FlutterResult) {
+ result(FlutterMethodNotImplemented)
+ }
+}
diff --git a/packages/google_mlkit_commons/ios/Classes/MLKVisionImage+FlutterPlugin.m b/packages/google_mlkit_commons/ios/Classes/MLKVisionImage+FlutterPlugin.m
deleted file mode 100644
index 466f5a2c..00000000
--- a/packages/google_mlkit_commons/ios/Classes/MLKVisionImage+FlutterPlugin.m
+++ /dev/null
@@ -1,134 +0,0 @@
-#import "GoogleMlKitCommonsPlugin.h"
-#import
-
-@implementation MLKVisionImage(FlutterPlugin)
-
-+ (MLKVisionImage *)visionImageFromData:(NSDictionary *)imageData {
- NSString *imageType = imageData[@"type"];
- if ([@"file" isEqualToString:imageType]) {
- return [self filePathToVisionImage:imageData[@"path"]];
- } else if ([@"bytes" isEqualToString:imageType]) {
- return [self bytesToVisionImage:imageData];
- } else if ([@"bitmap" isEqualToString:imageType]) {
- return [self bitmapToVisionImage:imageData];
- } else {
- NSString *errorReason = [NSString stringWithFormat:@"No image type for: %@", imageType];
- @throw [NSException exceptionWithName:NSInvalidArgumentException
- reason:errorReason
- userInfo:nil];
- }
-}
-
-+ (MLKVisionImage *)filePathToVisionImage:(NSString *)filePath {
- UIImage *image = [UIImage imageWithContentsOfFile:filePath];
- MLKVisionImage *visionImage = [[MLKVisionImage alloc] initWithImage:image];
- visionImage.orientation = image.imageOrientation;
- return visionImage;
-}
-
-+ (MLKVisionImage *)bytesToVisionImage:(NSDictionary *)imageData {
- FlutterStandardTypedData *byteData = imageData[@"bytes"];
- NSData *imageBytes = byteData.data;
- NSDictionary *metadata = imageData[@"metadata"];
- NSNumber *width = metadata[@"width"];
- NSNumber *height = metadata[@"height"];
- NSNumber *rawFormat = metadata[@"image_format"];
- NSNumber *bytesPerRow = metadata[@"bytes_per_row"];
- CVPixelBufferRef pxBuffer = [self bytesToPixelBuffer:width.unsignedLongValue
- height:height.unsignedLongValue
- format:FOUR_CHAR_CODE(rawFormat.unsignedIntValue)
- baseAddress:(void *)imageBytes.bytes
- bytesPerRow:bytesPerRow.unsignedLongValue];
- return [self pixelBufferToVisionImage:pxBuffer];
-}
-
-+ (CVPixelBufferRef)bytesToPixelBuffer:(size_t)width
- height:(size_t)height
- format:(FourCharCode)format
- baseAddress:(void *)baseAddress
- bytesPerRow:(size_t)bytesPerRow {
- CVPixelBufferRef pxBuffer = NULL;
- CVPixelBufferCreateWithBytes(kCFAllocatorDefault, width, height, format, baseAddress, bytesPerRow,
- NULL, NULL, NULL, &pxBuffer);
- return pxBuffer;
-}
-
-+ (MLKVisionImage *)pixelBufferToVisionImage:(CVPixelBufferRef)pixelBufferRef {
- CIImage *ciImage = [CIImage imageWithCVPixelBuffer:pixelBufferRef];
-
- CIContext *temporaryContext = [CIContext contextWithOptions:nil];
- CGImageRef videoImage =
- [temporaryContext createCGImage:ciImage
- fromRect:CGRectMake(0, 0, CVPixelBufferGetWidth(pixelBufferRef),
- CVPixelBufferGetHeight(pixelBufferRef))];
-
- UIImage *uiImage = [UIImage imageWithCGImage:videoImage];
- CVPixelBufferRelease(pixelBufferRef);
- CGImageRelease(videoImage);
- return [[MLKVisionImage alloc] initWithImage:uiImage];
-}
-
-+ (MLKVisionImage *)bitmapToVisionImage:(NSDictionary *)imageDict {
- // Get the bitmap data
- FlutterStandardTypedData *bitmapData = imageDict[@"bitmapData"];
-
- if (bitmapData == nil) {
- NSString *errorReason = @"Bitmap data is nil";
- @throw [NSException exceptionWithName:NSInvalidArgumentException
- reason:errorReason
- userInfo:nil];
- }
-
- // Try to get metadata if available
- NSDictionary *metadata = imageDict[@"metadata"];
- if (metadata != nil) {
- NSNumber *width = metadata[@"width"];
- NSNumber *height = metadata[@"height"];
-
- if (width != nil && height != nil) {
- // Create bitmap context from raw RGBA data
- CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
- uint8_t *rawData = (uint8_t*)[bitmapData.data bytes];
- size_t bytesPerPixel = 4;
- size_t bytesPerRow = bytesPerPixel * width.intValue;
- size_t bitsPerComponent = 8;
-
- CGContextRef context = CGBitmapContextCreate(rawData, width.intValue, height.intValue,
- bitsPerComponent, bytesPerRow, colorSpace,
- kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big);
-
- if (context) {
- CGImageRef imageRef = CGBitmapContextCreateImage(context);
- UIImage *image = [UIImage imageWithCGImage:imageRef];
-
- CGImageRelease(imageRef);
- CGContextRelease(context);
- CGColorSpaceRelease(colorSpace);
-
- if (image) {
- MLKVisionImage *visionImage = [[MLKVisionImage alloc] initWithImage:image];
- visionImage.orientation = image.imageOrientation;
- return visionImage;
- }
- }
-
- CGColorSpaceRelease(colorSpace);
- }
- }
-
- // Fallback: try to create UIImage directly from data
- UIImage *image = [UIImage imageWithData:bitmapData.data];
-
- if (image == nil) {
- NSString *errorReason = @"Failed to create UIImage from bitmap data";
- @throw [NSException exceptionWithName:NSInvalidArgumentException
- reason:errorReason
- userInfo:nil];
- }
-
- MLKVisionImage *visionImage = [[MLKVisionImage alloc] initWithImage:image];
- visionImage.orientation = image.imageOrientation;
- return visionImage;
-}
-
-@end
diff --git a/packages/google_mlkit_commons/ios/Classes/MLKVisionImage+FlutterPlugin.swift b/packages/google_mlkit_commons/ios/Classes/MLKVisionImage+FlutterPlugin.swift
new file mode 100644
index 00000000..d2265806
--- /dev/null
+++ b/packages/google_mlkit_commons/ios/Classes/MLKVisionImage+FlutterPlugin.swift
@@ -0,0 +1,179 @@
+import Flutter
+import MLKitVision
+import UIKit
+import CoreGraphics
+import CoreVideo
+
+// MARK: - VisionImage from Flutter imageData
+// CVPixelBuffer and CGImage from CVPixelBufferCreateWithBytes / createCGImage are Core Foundation
+// types. In Swift they are memory-managed by ARC (CVPixelBufferRelease/CGImageRelease are
+// explicitly unavailable). The Objective-C version's explicit releases are therefore not needed here.
+
+extension VisionImage {
+ /// Creates a VisionImage from method-channel imageData. Returns nil for invalid/missing data instead of crashing.
+ @objc(visionImageFromData:)
+ public static func visionImage(from imageData: [String: Any]) -> VisionImage? {
+ guard let imageType = imageData["type"] as? String else {
+ return nil
+ }
+ switch imageType {
+ case "file":
+ guard let path = imageData["path"] as? String else {
+ return nil
+ }
+ return filePathToVisionImage(path)
+ case "bytes":
+ return bytesToVisionImage(imageData)
+ case "bitmap":
+ return bitmapToVisionImage(imageData)
+ default:
+ return nil
+ }
+ }
+
+ private static func filePathToVisionImage(_ filePath: String) -> VisionImage? {
+ guard let image = UIImage(contentsOfFile: filePath) else {
+ return nil
+ }
+ let visionImage = VisionImage(image: image)
+ visionImage.orientation = image.imageOrientation
+ return visionImage
+ }
+
+ private static func bytesToVisionImage(_ imageData: [String: Any]) -> VisionImage? {
+ guard let byteData = imageData["bytes"] as? FlutterStandardTypedData else {
+ return nil
+ }
+ let imageBytes = byteData.data
+ guard let metadata = imageData["metadata"] as? [String: Any],
+ let width = metadata["width"] as? NSNumber,
+ let height = metadata["height"] as? NSNumber,
+ let rawFormat = metadata["image_format"] as? NSNumber,
+ let bytesPerRow = metadata["bytes_per_row"] as? NSNumber else {
+ return nil
+ }
+ let widthVal = Int(truncating: width)
+ let heightVal = Int(truncating: height)
+ let bytesPerRowVal = Int(truncating: bytesPerRow)
+ let bufferSize = bytesPerRowVal * heightVal
+ guard bufferSize > 0, imageBytes.count >= bufferSize else {
+ return nil
+ }
+ let copy = UnsafeMutableRawPointer.allocate(byteCount: bufferSize, alignment: 1)
+ let copyBuffer = UnsafeMutableBufferPointer(start: copy.assumingMemoryBound(to: UInt8.self), count: bufferSize)
+ imageBytes.copyBytes(to: copyBuffer)
+ guard let pxBuffer = bytesToPixelBuffer(
+ width: widthVal,
+ height: heightVal,
+ format: OSType(truncating: rawFormat),
+ baseAddress: copy,
+ bytesPerRow: bytesPerRowVal,
+ releaseCallback: Self.releasePixelBufferBytes
+ ) else {
+ copy.deallocate()
+ return nil
+ }
+ // pixelBufferToVisionImage creates a VisionImage from the buffer; on success we return it.
+ // On nil, pxBuffer goes out of scope here and ARC releases the CVPixelBuffer, which
+ // invokes releasePixelBufferBytes and deallocates copy—no leak.
+ if let visionImage = pixelBufferToVisionImage(pxBuffer) {
+ return visionImage
+ }
+ return nil
+ }
+
+ private static let releasePixelBufferBytes: CVPixelBufferReleaseBytesCallback = { _, baseAddress in
+ guard let baseAddress = baseAddress else { return }
+ UnsafeMutableRawPointer(mutating: baseAddress).deallocate()
+ }
+
+ private static func bytesToPixelBuffer(
+ width: Int,
+ height: Int,
+ format: OSType,
+ baseAddress: UnsafeMutableRawPointer,
+ bytesPerRow: Int,
+ releaseCallback: CVPixelBufferReleaseBytesCallback?
+ ) -> CVPixelBuffer? {
+ var pxBuffer: CVPixelBuffer?
+ CVPixelBufferCreateWithBytes(
+ kCFAllocatorDefault,
+ width,
+ height,
+ format,
+ baseAddress,
+ bytesPerRow,
+ releaseCallback,
+ nil,
+ nil,
+ &pxBuffer
+ )
+ return pxBuffer
+ }
+
+ private static func pixelBufferToVisionImage(_ pixelBufferRef: CVPixelBuffer) -> VisionImage? {
+ let ciImage = CIImage(cvPixelBuffer: pixelBufferRef)
+ let context = CIContext(options: nil)
+ let width = CVPixelBufferGetWidth(pixelBufferRef)
+ let height = CVPixelBufferGetHeight(pixelBufferRef)
+ guard let cgImage = context.createCGImage(
+ ciImage,
+ from: CGRect(x: 0, y: 0, width: width, height: height)
+ ) else {
+ return nil
+ }
+ // Swift ARC manages CGImage; UIImage(cgImage:) retains it.
+ let uiImage = UIImage(cgImage: cgImage)
+ return VisionImage(image: uiImage)
+ }
+
+ private static func bitmapToVisionImage(_ imageDict: [String: Any]) -> VisionImage? {
+ guard let bitmapData = imageDict["bitmapData"] as? FlutterStandardTypedData else {
+ return nil
+ }
+
+ if let metadata = imageDict["metadata"] as? [String: Any],
+ let width = metadata["width"] as? NSNumber,
+ let height = metadata["height"] as? NSNumber {
+ var result: VisionImage?
+ let colorSpace = CGColorSpaceCreateDeviceRGB()
+ let bytesPerPixel = 4
+ let bytesPerRow = bytesPerPixel * width.intValue
+ let bitsPerComponent = 8
+
+ bitmapData.data.withUnsafeBytes { rawBuffer in
+ guard let rawData = rawBuffer.baseAddress?.assumingMemoryBound(to: UInt8.self) else {
+ return
+ }
+ guard let context = CGContext(
+ data: UnsafeMutableRawPointer(mutating: rawData),
+ width: width.intValue,
+ height: height.intValue,
+ bitsPerComponent: bitsPerComponent,
+ bytesPerRow: bytesPerRow,
+ space: colorSpace,
+ bitmapInfo: CGImageAlphaInfo.premultipliedLast.rawValue | CGBitmapInfo.byteOrder32Big.rawValue
+ ) else {
+ return
+ }
+ guard let imageRef = context.makeImage() else {
+ return
+ }
+ let image = UIImage(cgImage: imageRef)
+ let visionImage = VisionImage(image: image)
+ visionImage.orientation = image.imageOrientation
+ result = visionImage
+ }
+ if let result = result {
+ return result
+ }
+ }
+
+ guard let image = UIImage(data: bitmapData.data) else {
+ return nil
+ }
+ let visionImage = VisionImage(image: image)
+ visionImage.orientation = image.imageOrientation
+ return visionImage
+ }
+}
diff --git a/packages/google_mlkit_commons/ios/google_mlkit_commons.podspec b/packages/google_mlkit_commons/ios/google_mlkit_commons.podspec
index 4927ec62..16f46bd3 100644
--- a/packages/google_mlkit_commons/ios/google_mlkit_commons.podspec
+++ b/packages/google_mlkit_commons/ios/google_mlkit_commons.podspec
@@ -10,10 +10,9 @@ Pod::Spec.new do |s|
s.description = pubspec['description']
s.homepage = pubspec['homepage']
s.license = { :file => '../LICENSE' }
- s.authors = 'Multiple Authors'
+ s.authors = 'flutter-ml.dev'
s.source = { :path => '.' }
- s.source_files = 'Classes/**/*'
- s.public_header_files = 'Classes/**/*.h'
+ s.source_files = 'Classes/**/*.swift'
s.dependency 'Flutter'
s.dependency 'MLKitVision', '~> 10.0.0'
s.platform = :ios, '15.5'
diff --git a/packages/google_mlkit_digital_ink_recognition/ios/Assets/.gitkeep b/packages/google_mlkit_digital_ink_recognition/ios/Assets/.gitkeep
deleted file mode 100644
index e69de29b..00000000
diff --git a/packages/google_mlkit_digital_ink_recognition/ios/Classes/GoogleMlKitDigitalInkRecognitionPlugin.h b/packages/google_mlkit_digital_ink_recognition/ios/Classes/GoogleMlKitDigitalInkRecognitionPlugin.h
deleted file mode 100644
index fb228ecd..00000000
--- a/packages/google_mlkit_digital_ink_recognition/ios/Classes/GoogleMlKitDigitalInkRecognitionPlugin.h
+++ /dev/null
@@ -1,4 +0,0 @@
-#import
-
-@interface GoogleMlKitDigitalInkRecognitionPlugin : NSObject
-@end
diff --git a/packages/google_mlkit_digital_ink_recognition/ios/Classes/GoogleMlKitDigitalInkRecognitionPlugin.m b/packages/google_mlkit_digital_ink_recognition/ios/Classes/GoogleMlKitDigitalInkRecognitionPlugin.m
deleted file mode 100644
index 181a27e3..00000000
--- a/packages/google_mlkit_digital_ink_recognition/ios/Classes/GoogleMlKitDigitalInkRecognitionPlugin.m
+++ /dev/null
@@ -1,146 +0,0 @@
-#import "GoogleMlKitDigitalInkRecognitionPlugin.h"
-#import
-#import
-#import
-
-#define channelName @"google_mlkit_digital_ink_recognizer"
-#define startDigitalInkRecognizer @"vision#startDigitalInkRecognizer"
-#define closeDigitalInkRecognizer @"vision#closeDigitalInkRecognizer"
-#define manageInkModels @"vision#manageInkModels"
-
-@implementation GoogleMlKitDigitalInkRecognitionPlugin {
- NSMutableDictionary *instances;
- GenericModelManager *genericModelManager;
-}
-
-+ (void)registerWithRegistrar:(NSObject*)registrar {
- FlutterMethodChannel* channel = [FlutterMethodChannel
- methodChannelWithName:channelName
- binaryMessenger:[registrar messenger]];
- GoogleMlKitDigitalInkRecognitionPlugin* instance = [[GoogleMlKitDigitalInkRecognitionPlugin alloc] init];
- [registrar addMethodCallDelegate:instance channel:channel];
-}
-
-- (id)init {
- self = [super init];
- if (self)
- instances = [NSMutableDictionary dictionary];
- return self;
-}
-
-- (void)handleMethodCall:(FlutterMethodCall *)call result:(FlutterResult)result {
- if ([call.method isEqualToString:startDigitalInkRecognizer]) {
- [self handleDetection:call result:result];
- } else if ([call.method isEqualToString:manageInkModels]) {
- [self manageModel:call result:result];
- } else if ([call.method isEqualToString:closeDigitalInkRecognizer]) {
- NSString *uid = call.arguments[@"id"];
- [instances removeObjectForKey:uid];
- result(NULL);
- } else {
- result(FlutterMethodNotImplemented);
- }
-}
-
-- (void)handleDetection:(FlutterMethodCall *)call result:(FlutterResult)result {
- NSString *modelTag = call.arguments[@"model"];
-
- MLKDigitalInkRecognitionModelIdentifier *identifier = [MLKDigitalInkRecognitionModelIdentifier modelIdentifierForLanguageTag:modelTag];
- MLKDigitalInkRecognitionModel *model = [[MLKDigitalInkRecognitionModel alloc] initWithModelIdentifier:identifier];
- MLKModelManager *modelManager = [MLKModelManager modelManager];
-
- BOOL isModelDownloaded = [modelManager isModelDownloaded:model];
-
- if (!isModelDownloaded) {
- FlutterError *error = [FlutterError errorWithCode:@"Error Model has not been downloaded yet"
- message:@"Model has not been downloaded yet"
- details:@"Model has not been downloaded yet"];
- result(error);
- return;
- }
-
- NSString *uid = call.arguments[@"id"];
- MLKDigitalInkRecognizer *recognizer = [instances objectForKey:uid];
- if (recognizer == NULL) {
- MLKDigitalInkRecognizerOptions *options = [[MLKDigitalInkRecognizerOptions alloc] initWithModel:model];
- recognizer = [MLKDigitalInkRecognizer digitalInkRecognizerWithOptions:options];
- instances[uid] = recognizer;
- }
-
- NSMutableArray *strokes = [NSMutableArray array];
- NSArray *strokeList = call.arguments[@"ink"][@"strokes"];
- for (NSDictionary *strokeMap in strokeList) {
- NSMutableArray *stroke = [NSMutableArray array];
- NSArray *pointsList = strokeMap[@"points"];
- for (NSDictionary *pointMap in pointsList) {
- NSNumber *x = pointMap[@"x"];
- NSNumber *y = pointMap[@"y"];
- NSNumber *t = pointMap[@"t"];
- MLKStrokePoint *strokePoint = [[MLKStrokePoint alloc] initWithX:x.floatValue y:y.floatValue t:t.longValue];
- [stroke addObject:strokePoint];
- }
- [strokes addObject:[[MLKStroke alloc] initWithPoints:stroke]];
- }
- MLKInk *ink = [[MLKInk alloc] initWithStrokes:strokes];
-
- MLKDigitalInkRecognitionContext *context;
- NSDictionary *contextMap = call.arguments[@"context"];
- if ([contextMap isKindOfClass: [NSDictionary class]]) {
- NSString *preContext = contextMap[@"preContext"];
- if ([preContext isKindOfClass: [NSNull class]]) {
- preContext = @"";
- }
- MLKWritingArea *writingArea;
- NSDictionary *writingAreaMap = contextMap[@"writingArea"];
- if ([writingAreaMap isKindOfClass: [NSDictionary class]]) {
- NSNumber *width = writingAreaMap[@"width"];
- NSNumber *height = writingAreaMap[@"height"];
- writingArea = [[MLKWritingArea alloc] initWithWidth:width.floatValue height:height.floatValue];
- }
- context = [[MLKDigitalInkRecognitionContext alloc] initWithPreContext:preContext writingArea:writingArea];
- }
-
- if (context != NULL) {
- [recognizer recognizeInk:ink
- context:context
- completion:^(MLKDigitalInkRecognitionResult * _Nullable recognitionResult,
- NSError * _Nullable error) {
- [self process:recognitionResult error:error result:result];
- }];
- } else {
- [recognizer recognizeInk:ink
- completion:^(MLKDigitalInkRecognitionResult * _Nullable recognitionResult,
- NSError * _Nullable error) {
- [self process:recognitionResult error:error result:result];
- }];
- }
-}
-
-- (void )process:(MLKDigitalInkRecognitionResult *)recognitionResult
- error:(NSError *)error
- result:(FlutterResult)result {
- if (error) {
- result(getFlutterError(error));
- return;
- } else if (!recognitionResult) {
- result(NULL);
- return;
- }
- NSMutableArray *candidates = [NSMutableArray new];
- for(MLKDigitalInkRecognitionCandidate *candidate in recognitionResult.candidates) {
- NSDictionary *dictionary = @{@"text": candidate.text,
- @"score": @(candidate.score.doubleValue)};
- [candidates addObject:dictionary];
- }
- result(candidates);
-}
-
-- (void)manageModel:(FlutterMethodCall *)call result:(FlutterResult)result {
- NSString *modelTag = call.arguments[@"model"];
- MLKDigitalInkRecognitionModelIdentifier *identifier = [MLKDigitalInkRecognitionModelIdentifier modelIdentifierForLanguageTag:modelTag];
- MLKDigitalInkRecognitionModel *model = [[MLKDigitalInkRecognitionModel alloc] initWithModelIdentifier:identifier];
- genericModelManager = [[GenericModelManager alloc] init];
- [genericModelManager manageModel:model call:call result:result];
-}
-
-@end
diff --git a/packages/google_mlkit_digital_ink_recognition/ios/Classes/GoogleMlKitDigitalInkRecognitionPlugin.swift b/packages/google_mlkit_digital_ink_recognition/ios/Classes/GoogleMlKitDigitalInkRecognitionPlugin.swift
new file mode 100644
index 00000000..b12328ed
--- /dev/null
+++ b/packages/google_mlkit_digital_ink_recognition/ios/Classes/GoogleMlKitDigitalInkRecognitionPlugin.swift
@@ -0,0 +1,148 @@
+import Flutter
+import MLKitCommon
+import MLKitDigitalInkRecognition
+import google_mlkit_commons
+
+@objc
+public class GoogleMlKitDigitalInkRecognitionPlugin: NSObject, FlutterPlugin {
+ private var instances: [String: DigitalInkRecognizer] = [:]
+ private var genericModelManager: GenericModelManager?
+
+ public static func register(with registrar: FlutterPluginRegistrar) {
+ let channel = FlutterMethodChannel(
+ name: "google_mlkit_digital_ink_recognizer",
+ binaryMessenger: registrar.messenger()
+ )
+ let instance = GoogleMlKitDigitalInkRecognitionPlugin()
+ registrar.addMethodCallDelegate(instance, channel: channel)
+ }
+
+ public func handle(_ call: FlutterMethodCall, result: @escaping FlutterResult) {
+ switch call.method {
+ case "vision#startDigitalInkRecognizer":
+ handleDetection(call: call, result: result)
+ case "vision#manageInkModels":
+ manageModel(call: call, result: result)
+ case "vision#closeDigitalInkRecognizer":
+ if let args = call.arguments as? [String: Any], let uid = args["id"] as? String {
+ instances.removeValue(forKey: uid)
+ }
+ result(nil)
+ default:
+ result(FlutterMethodNotImplemented)
+ }
+ }
+
+ private func handleDetection(call: FlutterMethodCall, result: @escaping FlutterResult) {
+ guard let args = call.arguments as? [String: Any],
+ let modelTag = args["model"] as? String,
+ let uid = args["id"] as? String else {
+ result(FlutterError(code: "invalid_args", message: "Missing arguments", details: nil))
+ return
+ }
+
+ guard let identifier = DigitalInkRecognitionModelIdentifier(forLanguageTag: modelTag) else {
+ result(FlutterError(code: "invalid_model", message: "Invalid language tag: \(modelTag)", details: nil))
+ return
+ }
+ let model = DigitalInkRecognitionModel(modelIdentifier: identifier)
+ guard ModelManager.modelManager().isModelDownloaded(model) else {
+ result(FlutterError(
+ code: "Error Model has not been downloaded yet",
+ message: "Model has not been downloaded yet",
+ details: "Model has not been downloaded yet"
+ ))
+ return
+ }
+
+ let recognizer: DigitalInkRecognizer
+ if let existing = instances[uid] {
+ recognizer = existing
+ } else {
+ let options = DigitalInkRecognizerOptions(model: model)
+ recognizer = DigitalInkRecognizer.digitalInkRecognizer(options: options)
+ instances[uid] = recognizer
+ }
+
+ guard let strokeList = args["ink"] as? [String: Any],
+ let strokesData = strokeList["strokes"] as? [[String: Any]] else {
+ result(FlutterError(code: "invalid_args", message: "Missing ink data", details: nil))
+ return
+ }
+ let strokes = strokesData.map { strokeMap -> Stroke in
+ guard let pointsList = strokeMap["points"] as? [[String: Any]] else {
+ return Stroke(points: [])
+ }
+ let points = pointsList.map { pointMap -> StrokePoint in
+ let coordX = (pointMap["x"] as? NSNumber)?.floatValue ?? 0
+ let coordY = (pointMap["y"] as? NSNumber)?.floatValue ?? 0
+ let timeMs = (pointMap["t"] as? NSNumber)?.intValue ?? 0
+ return StrokePoint(x: coordX, y: coordY, t: timeMs)
+ }
+ return Stroke(points: points)
+ }
+ let ink = Ink(strokes: strokes)
+
+ let contextMap = args["context"] as? [String: Any]
+ var context: DigitalInkRecognitionContext?
+ if let ctx = contextMap {
+ let preContext = ctx["preContext"] as? String ?? ""
+ var writingArea: WritingArea?
+ if let writingAreaMap = ctx["writingArea"] as? [String: Any],
+ let width = writingAreaMap["width"] as? NSNumber,
+ let height = writingAreaMap["height"] as? NSNumber {
+ writingArea = WritingArea(width: width.floatValue, height: height.floatValue)
+ }
+ context = DigitalInkRecognitionContext(preContext: preContext, writingArea: writingArea)
+ }
+
+ func process(recognitionResult: DigitalInkRecognitionResult?, error: Error?) {
+ if let error = error {
+ let nsError = error as NSError
+ result(FlutterError(
+ code: "Error \(nsError.code)",
+ message: nsError.domain,
+ details: nsError.localizedDescription
+ ))
+ return
+ }
+ guard let recognitionResult = recognitionResult else {
+ result(nil)
+ return
+ }
+ let candidates = recognitionResult.candidates.map { candidate in
+ [
+ "text": candidate.text,
+ "score": candidate.score?.doubleValue ?? 0
+ ] as [String: Any]
+ }
+ result(candidates)
+ }
+
+ if let ctx = context {
+ recognizer.recognize(ink: ink, context: ctx) { recognitionResult, error in
+ process(recognitionResult: recognitionResult, error: error)
+ }
+ } else {
+ recognizer.recognize(ink: ink) { recognitionResult, error in
+ process(recognitionResult: recognitionResult, error: error)
+ }
+ }
+ }
+
+ private func manageModel(call: FlutterMethodCall, result: @escaping FlutterResult) {
+ guard let args = call.arguments as? [String: Any],
+ let modelTag = args["model"] as? String else {
+ result(FlutterError(code: "invalid_args", message: "Missing model argument", details: nil))
+ return
+ }
+ guard let identifier = DigitalInkRecognitionModelIdentifier(forLanguageTag: modelTag) else {
+ result(FlutterError(code: "invalid_model", message: "Invalid language tag: \(modelTag)", details: nil))
+ return
+ }
+ let model = DigitalInkRecognitionModel(modelIdentifier: identifier)
+ let manager = GenericModelManager()
+ genericModelManager = manager
+ manager.manage(model: model, call: call, result: result)
+ }
+}
diff --git a/packages/google_mlkit_digital_ink_recognition/ios/google_mlkit_digital_ink_recognition.podspec b/packages/google_mlkit_digital_ink_recognition/ios/google_mlkit_digital_ink_recognition.podspec
index 57462718..49807eed 100644
--- a/packages/google_mlkit_digital_ink_recognition/ios/google_mlkit_digital_ink_recognition.podspec
+++ b/packages/google_mlkit_digital_ink_recognition/ios/google_mlkit_digital_ink_recognition.podspec
@@ -10,10 +10,9 @@ Pod::Spec.new do |s|
s.description = pubspec['description']
s.homepage = pubspec['homepage']
s.license = { :file => '../LICENSE' }
- s.authors = 'Multiple Authors'
+ s.authors = 'flutter-ml.dev'
s.source = { :path => '.' }
- s.source_files = 'Classes/**/*'
- s.public_header_files = 'Classes/**/*.h'
+ s.source_files = 'Classes/**/*.swift'
s.dependency 'Flutter'
s.dependency 'GoogleMLKit/DigitalInkRecognition', '~> 9.0.0'
s.dependency 'google_mlkit_commons'
diff --git a/packages/google_mlkit_document_scanner/ios/Assets/.gitkeep b/packages/google_mlkit_document_scanner/ios/Assets/.gitkeep
deleted file mode 100644
index e69de29b..00000000
diff --git a/packages/google_mlkit_document_scanner/ios/Classes/GoogleMlKitDocumentScannerPlugin.h b/packages/google_mlkit_document_scanner/ios/Classes/GoogleMlKitDocumentScannerPlugin.h
deleted file mode 100644
index 337a01a4..00000000
--- a/packages/google_mlkit_document_scanner/ios/Classes/GoogleMlKitDocumentScannerPlugin.h
+++ /dev/null
@@ -1,4 +0,0 @@
-#import
-
-@interface GoogleMlKitDocumentScannerPlugin : NSObject
-@end
diff --git a/packages/google_mlkit_document_scanner/ios/Classes/GoogleMlKitDocumentScannerPlugin.m b/packages/google_mlkit_document_scanner/ios/Classes/GoogleMlKitDocumentScannerPlugin.m
deleted file mode 100644
index 4b10dcab..00000000
--- a/packages/google_mlkit_document_scanner/ios/Classes/GoogleMlKitDocumentScannerPlugin.m
+++ /dev/null
@@ -1,20 +0,0 @@
-#import "GoogleMlKitDocumentScannerPlugin.h"
-
-@implementation GoogleMlKitDocumentScannerPlugin
-+ (void)registerWithRegistrar:(NSObject*)registrar {
- FlutterMethodChannel* channel = [FlutterMethodChannel
- methodChannelWithName:@"google_mlkit_document_scanner"
- binaryMessenger:[registrar messenger]];
- GoogleMlKitDocumentScannerPlugin* instance = [[GoogleMlKitDocumentScannerPlugin alloc] init];
- [registrar addMethodCallDelegate:instance channel:channel];
-}
-
-- (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result {
- if ([@"getPlatformVersion" isEqualToString:call.method]) {
- result([@"iOS " stringByAppendingString:[[UIDevice currentDevice] systemVersion]]);
- } else {
- result(FlutterMethodNotImplemented);
- }
-}
-
-@end
diff --git a/packages/google_mlkit_document_scanner/ios/Classes/GoogleMlKitDocumentScannerPlugin.swift b/packages/google_mlkit_document_scanner/ios/Classes/GoogleMlKitDocumentScannerPlugin.swift
new file mode 100644
index 00000000..fb9595ab
--- /dev/null
+++ b/packages/google_mlkit_document_scanner/ios/Classes/GoogleMlKitDocumentScannerPlugin.swift
@@ -0,0 +1,22 @@
+import Flutter
+import UIKit
+
+@objc
+public class GoogleMlKitDocumentScannerPlugin: NSObject, FlutterPlugin {
+ public static func register(with registrar: FlutterPluginRegistrar) {
+ let channel = FlutterMethodChannel(
+ name: "google_mlkit_document_scanner",
+ binaryMessenger: registrar.messenger()
+ )
+ let instance = GoogleMlKitDocumentScannerPlugin()
+ registrar.addMethodCallDelegate(instance, channel: channel)
+ }
+
+ public func handle(_ call: FlutterMethodCall, result: @escaping FlutterResult) {
+ if call.method == "getPlatformVersion" {
+ result("iOS " + UIDevice.current.systemVersion)
+ } else {
+ result(FlutterMethodNotImplemented)
+ }
+ }
+}
diff --git a/packages/google_mlkit_document_scanner/ios/google_mlkit_document_scanner.podspec b/packages/google_mlkit_document_scanner/ios/google_mlkit_document_scanner.podspec
index 98c46549..dac076d0 100644
--- a/packages/google_mlkit_document_scanner/ios/google_mlkit_document_scanner.podspec
+++ b/packages/google_mlkit_document_scanner/ios/google_mlkit_document_scanner.podspec
@@ -10,10 +10,9 @@ Pod::Spec.new do |s|
s.description = pubspec['description']
s.homepage = pubspec['homepage']
s.license = { :file => '../LICENSE' }
- s.author = 'Multiple Authors'
+ s.author = 'flutter-ml.dev'
s.source = { :path => '.' }
- s.source_files = 'Classes/**/*'
- s.public_header_files = 'Classes/**/*.h'
+ s.source_files = 'Classes/**/*.swift'
s.dependency 'Flutter'
# s.dependency 'GoogleMLKit/DocumentScanner', '~> 5.0.0'
s.platform = :ios, '15.5'
diff --git a/packages/google_mlkit_entity_extraction/ios/Assets/.gitkeep b/packages/google_mlkit_entity_extraction/ios/Assets/.gitkeep
deleted file mode 100644
index e69de29b..00000000
diff --git a/packages/google_mlkit_entity_extraction/ios/Classes/GoogleMlKitEntityExtractionPlugin.h b/packages/google_mlkit_entity_extraction/ios/Classes/GoogleMlKitEntityExtractionPlugin.h
deleted file mode 100644
index 850c6b83..00000000
--- a/packages/google_mlkit_entity_extraction/ios/Classes/GoogleMlKitEntityExtractionPlugin.h
+++ /dev/null
@@ -1,4 +0,0 @@
-#import
-
-@interface GoogleMlKitEntityExtractionPlugin : NSObject
-@end
diff --git a/packages/google_mlkit_entity_extraction/ios/Classes/GoogleMlKitEntityExtractionPlugin.m b/packages/google_mlkit_entity_extraction/ios/Classes/GoogleMlKitEntityExtractionPlugin.m
deleted file mode 100644
index 7cc0e196..00000000
--- a/packages/google_mlkit_entity_extraction/ios/Classes/GoogleMlKitEntityExtractionPlugin.m
+++ /dev/null
@@ -1,213 +0,0 @@
-#import "GoogleMlKitEntityExtractionPlugin.h"
-#import
-#import
-
-#define channelName @"google_mlkit_entity_extractor"
-#define startEntityExtractor @"nlp#startEntityExtractor"
-#define closeEntityExtractor @"nlp#closeEntityExtractor"
-#define manageEntityExtractionModels @"nlp#manageEntityExtractionModels"
-
-@implementation GoogleMlKitEntityExtractionPlugin {
- NSMutableDictionary *instances;
- GenericModelManager *genericModelManager;
-}
-
-+ (void)registerWithRegistrar:(NSObject*)registrar {
- FlutterMethodChannel* channel = [FlutterMethodChannel
- methodChannelWithName:channelName
- binaryMessenger:[registrar messenger]];
- GoogleMlKitEntityExtractionPlugin* instance = [[GoogleMlKitEntityExtractionPlugin alloc] init];
- [registrar addMethodCallDelegate:instance channel:channel];
-}
-
-- (id)init {
- self = [super init];
- if (self)
- instances = [NSMutableDictionary dictionary];
- return self;
-}
-
-- (void)handleMethodCall:(FlutterMethodCall *)call result:(FlutterResult)result {
- if ([call.method isEqualToString:startEntityExtractor]) {
- [self handleDetection:call result:result];
- } else if ([call.method isEqualToString:manageEntityExtractionModels]) {
- [self manageModel:call result:result];
- } else if ([call.method isEqualToString:closeEntityExtractor]) {
- NSString *uid = call.arguments[@"id"];
- [instances removeObjectForKey:uid];
- result(NULL);
- } else {
- result(FlutterMethodNotImplemented);
- }
-}
-
-- (void)handleDetection:(FlutterMethodCall *)call result:(FlutterResult)result {
- NSString *text = call.arguments[@"text"];
-
- NSString *uid = call.arguments[@"id"];
- MLKEntityExtractor *entityExtractor = [instances objectForKey:uid];
- if (entityExtractor == NULL) {
- NSString *language = call.arguments[@"language"];
- MLKEntityExtractorOptions *options = [[MLKEntityExtractorOptions alloc] initWithModelIdentifier:language];
- entityExtractor = [MLKEntityExtractor entityExtractorWithOptions:options];
- instances[uid] = entityExtractor;
- }
-
- MLKEntityExtractionParams *params = [[MLKEntityExtractionParams alloc] init];
- NSDictionary *parameters = call.arguments[@"parameters"];
-
- NSString *timezone = parameters[@"timezone"];
- if ([timezone isKindOfClass: [NSString class]] && timezone.length > 0) {
- params.referenceTimeZone = [NSTimeZone timeZoneWithAbbreviation:timezone];
- }
-
- NSString *time = parameters[@"time"];
- if ([time isKindOfClass: [NSNumber class]]) {
- // NSTimeInterval should is expressed in seconds, not milliseconds
- params.referenceTime = [NSDate dateWithTimeIntervalSince1970: time.doubleValue / 1000];
- }
-
- NSString *locale = parameters[@"locale"];
- if ([locale isKindOfClass: [NSString class]] && locale.length > 0) {
- params.preferredLocale = [NSLocale localeWithLocaleIdentifier:locale];
- }
-
- NSArray *filtersValues = parameters[@"filters"];
- if ([filtersValues isKindOfClass: [NSArray class]] && filtersValues.count > 0) {
- NSMutableSet *filters = [NSMutableSet set];
- for(NSNumber *number in filtersValues) {
- int value = number.intValue;
- switch(value) {
- case 1:
- [filters addObject:MLKEntityExtractionEntityTypeAddress];
- break;
- case 2:
- [filters addObject:MLKEntityExtractionEntityTypeDateTime];
- break;
- case 3:
- [filters addObject:MLKEntityExtractionEntityTypeEmail];
- break;
- case 4:
- [filters addObject:MLKEntityExtractionEntityTypeFlightNumber];
- break;
- case 5:
- [filters addObject:MLKEntityExtractionEntityTypeIBAN];
- break;
- case 6:
- [filters addObject:MLKEntityExtractionEntityTypeISBN];
- break;
- case 7:
- [filters addObject:MLKEntityExtractionEntityTypePaymentCard];
- break;
- case 8:
- [filters addObject:MLKEntityExtractionEntityTypePhone];
- break;
- case 9:
- [filters addObject:MLKEntityExtractionEntityTypeTrackingNumber];
- break;
- case 10:
- [filters addObject:MLKEntityExtractionEntityTypeURL];
- break;
- case 11:
- [filters addObject:MLKEntityExtractionEntityTypeMoney];
- break;
- default:
- break;
- }
- }
- params.typesFilter = filters;
- }
-
- [entityExtractor downloadModelIfNeededWithCompletion:^(NSError *_Nullable error) {
- if (error) {
- result(getFlutterError(error));
- return;
- }
- // Model downloaded successfully. Okay to annotate.
-
- [entityExtractor annotateText:text
- withParams:params
- completion:^(NSArray *_Nullable annotations,
- NSError *_Nullable error) {
- if (error) {
- result(getFlutterError(error));
- return;
- } else if (!annotations) {
- result(NULL);
- return;
- }
-
- NSMutableArray *allAnnotations = [NSMutableArray array];
- for (MLKEntityAnnotation *annotation in annotations) {
- NSMutableDictionary *data = [NSMutableDictionary dictionary];
- data[@"text"] = [text substringWithRange:annotation.range];
- data[@"start"] = @((int)annotation.range.location);
- data[@"end"] = @((int)(annotation.range.location + annotation.range.length));
-
- NSMutableArray *allEntities = [NSMutableArray array];
- NSArray *entities = annotation.entities;
- for (MLKEntity *entity in entities) {
- NSMutableDictionary *entityData = [NSMutableDictionary dictionary];
- int type = 0;
-
- if ([entity.entityType isEqualToString: MLKEntityExtractionEntityTypeAddress]) {
- type = 1;
- } else if ([entity.entityType isEqualToString: MLKEntityExtractionEntityTypeDateTime]) {
- type = 2;
- entityData[@"dateTimeGranularity"] = @(entity.dateTimeEntity.dateTimeGranularity);
- // result is expected in milliseconds, not seconds.
- entityData[@"timestamp"] = @(entity.dateTimeEntity.dateTime.timeIntervalSince1970 * 1000);
- } else if ([entity.entityType isEqualToString: MLKEntityExtractionEntityTypeEmail]) {
- type = 3;
- } else if ([entity.entityType isEqualToString: MLKEntityExtractionEntityTypeFlightNumber]) {
- type = 4;
- entityData[@"code"] = entity.flightNumberEntity.airlineCode;
- entityData[@"number"] = entity.flightNumberEntity.flightNumber;
- } else if ([entity.entityType isEqualToString: MLKEntityExtractionEntityTypeIBAN]) {
- type = 5;
- entityData[@"iban"] = entity.IBANEntity.IBAN;
- entityData[@"code"] = entity.IBANEntity.countryCode;
- } else if ([entity.entityType isEqualToString: MLKEntityExtractionEntityTypeISBN]) {
- type = 6;
- entityData[@"isbn"] = entity.ISBNEntity.ISBN;
- } else if ([entity.entityType isEqualToString: MLKEntityExtractionEntityTypePaymentCard]) {
- type = 7;
- entityData[@"network"] = @(entity.paymentCardEntity.paymentCardNetwork);
- entityData[@"number"] = entity.paymentCardEntity.paymentCardNumber;
- } else if ([entity.entityType isEqualToString: MLKEntityExtractionEntityTypePhone]) {
- type = 8;
- } else if ([entity.entityType isEqualToString: MLKEntityExtractionEntityTypeTrackingNumber]) {
- type = 9;
- entityData[@"carrier"] = @(entity.trackingNumberEntity.parcelCarrier);
- entityData[@"number"] = entity.trackingNumberEntity.parcelTrackingNumber;
- } else if ([entity.entityType isEqualToString: MLKEntityExtractionEntityTypeURL]) {
- type = 10;
- } else if ([entity.entityType isEqualToString: MLKEntityExtractionEntityTypeMoney]) {
- type = 11;
- entityData[@"fraction"] = @(entity.moneyEntity.fractionalPart);
- entityData[@"integer"] = @(entity.moneyEntity.integerPart);
- entityData[@"unnormalized"] = entity.moneyEntity.unnormalizedCurrency;
- }
-
- entityData[@"type"] = @(type);
- entityData[@"raw"] = [NSString stringWithFormat:@"%@", entity];
-
- [allEntities addObject:entityData];
- }
- data[@"entities"] = allEntities;
- [allAnnotations addObject:data];
- }
-
- result(allAnnotations);
- }];
- }];
-}
-
-- (void)manageModel:(FlutterMethodCall *)call result:(FlutterResult)result {
- NSString *modelTag = call.arguments[@"model"];
- MLKEntityExtractionRemoteModel *model = [MLKEntityExtractionRemoteModel entityExtractorRemoteModelWithIdentifier:modelTag];
- genericModelManager = [[GenericModelManager alloc] init];
- [genericModelManager manageModel:model call:call result:result];
-}
-
-@end
diff --git a/packages/google_mlkit_entity_extraction/ios/Classes/GoogleMlKitEntityExtractionPlugin.swift b/packages/google_mlkit_entity_extraction/ios/Classes/GoogleMlKitEntityExtractionPlugin.swift
new file mode 100644
index 00000000..f4b82c0f
--- /dev/null
+++ b/packages/google_mlkit_entity_extraction/ios/Classes/GoogleMlKitEntityExtractionPlugin.swift
@@ -0,0 +1,198 @@
+import Flutter
+import MLKitEntityExtraction
+import google_mlkit_commons
+
+@objc
+public class GoogleMlKitEntityExtractionPlugin: NSObject, FlutterPlugin {
+ private var instances: [String: EntityExtractor] = [:]
+ private var genericModelManager: GenericModelManager?
+
+ public static func register(with registrar: FlutterPluginRegistrar) {
+ let channel = FlutterMethodChannel(
+ name: "google_mlkit_entity_extractor",
+ binaryMessenger: registrar.messenger()
+ )
+ let instance = GoogleMlKitEntityExtractionPlugin()
+ registrar.addMethodCallDelegate(instance, channel: channel)
+ }
+
+ public func handle(_ call: FlutterMethodCall, result: @escaping FlutterResult) {
+ switch call.method {
+ case "nlp#startEntityExtractor":
+ handleDetection(call: call, result: result)
+ case "nlp#manageEntityExtractionModels":
+ manageModel(call: call, result: result)
+ case "nlp#closeEntityExtractor":
+ if let args = call.arguments as? [String: Any], let uid = args["id"] as? String {
+ instances.removeValue(forKey: uid)
+ }
+ result(nil)
+ default:
+ result(FlutterMethodNotImplemented)
+ }
+ }
+
+ private func handleDetection(call: FlutterMethodCall, result: @escaping FlutterResult) {
+ guard let args = call.arguments as? [String: Any],
+ let text = args["text"] as? String,
+ let uid = args["id"] as? String else {
+ result(FlutterError(code: "invalid_args", message: "Missing arguments", details: nil))
+ return
+ }
+
+ let entityExtractor: EntityExtractor
+ if let existing = instances[uid] {
+ entityExtractor = existing
+ } else {
+ guard let language = args["language"] as? String else {
+ result(FlutterError(code: "invalid_args", message: "Missing language", details: nil))
+ return
+ }
+ // EntityExtractionModelIdentifier(rawValue:) is non-failable; invalid tags may fail at runtime.
+ let modelIdentifier = EntityExtractionModelIdentifier(rawValue: language)
+ let options = EntityExtractorOptions(modelIdentifier: modelIdentifier)
+ entityExtractor = EntityExtractor.entityExtractor(options: options)
+ instances[uid] = entityExtractor
+ }
+
+ let params = EntityExtractionParams()
+ let parametersRaw = args["parameters"]
+ let parameters: [String: Any] = (parametersRaw is NSNull) ? [:] : (parametersRaw as? [String: Any]) ?? [:]
+ if let timezone = valueAsString(parameters["timezone"]), !timezone.isEmpty {
+ params.referenceTimeZone = TimeZone(abbreviation: timezone)
+ }
+ if let time = valueAsNumber(parameters["time"]) {
+ params.referenceTime = Date(timeIntervalSince1970: time.doubleValue / 1000)
+ }
+ if let locale = valueAsString(parameters["locale"]), !locale.isEmpty {
+ params.preferredLocale = Locale(identifier: locale)
+ }
+ if let filtersValues = valueAsNumberArray(parameters["filters"]), !filtersValues.isEmpty {
+ let filters: Set = Set(filtersValues.compactMap { numberToEntityType($0.intValue) })
+ params.typesFilter = filters
+ }
+
+ entityExtractor.downloadModelIfNeeded { error in
+ if let error = error as NSError? {
+ result(FlutterError(code: "Error \(error.code)", message: error.domain, details: error.localizedDescription))
+ return
+ }
+ entityExtractor.annotateText(text, params: params) { annotations, error in
+ if let error = error as NSError? {
+ result(FlutterError(code: "Error \(error.code)", message: error.domain, details: error.localizedDescription))
+ return
+ }
+ guard let annotations = annotations else {
+ result(nil)
+ return
+ }
+ let allAnnotations = annotations.map { annotation -> [String: Any] in
+ let range = annotation.range
+ let substring = (text as NSString).substring(with: range)
+ let entities = annotation.entities.map { entity in self.entityToDictionary(text: text, entity: entity) }
+ return [
+ "text": substring,
+ "start": range.location,
+ "end": range.location + range.length,
+ "entities": entities
+ ]
+ }
+ result(allAnnotations)
+ }
+ }
+ }
+
+ private func numberToEntityType(_ value: Int) -> EntityType? {
+ switch value {
+ case 1: return .address
+ case 2: return .dateTime
+ case 3: return .email
+ case 4: return .flightNumber
+ case 5: return .IBAN
+ case 6: return .ISBN
+ case 7: return .paymentCard
+ case 8: return .phone
+ case 9: return .trackingNumber
+ case 10: return .URL
+ case 11: return .money
+ default: return nil
+ }
+ }
+
+ private func entityToDictionary(text: String, entity: Entity) -> [String: Any] {
+ var entityData: [String: Any] = [
+ "type": entityTypeToNumber(entity.entityType),
+ "raw": String(describing: entity)
+ ]
+ if entity.entityType == .dateTime, let dateTimeEntity = entity.dateTimeEntity {
+ entityData["dateTimeGranularity"] = dateTimeEntity.dateTimeGranularity.rawValue
+ entityData["timestamp"] = dateTimeEntity.dateTime.timeIntervalSince1970 * 1000
+ } else if entity.entityType == .flightNumber, let flightEntity = entity.flightNumberEntity {
+ entityData["code"] = flightEntity.airlineCode
+ entityData["number"] = flightEntity.flightNumber
+ } else if entity.entityType == .IBAN, let ibanEntity = entity.ibanEntity {
+ entityData["iban"] = ibanEntity.iban
+ entityData["code"] = ibanEntity.countryCode
+ } else if entity.entityType == .ISBN, let isbnEntity = entity.isbnEntity {
+ entityData["isbn"] = isbnEntity.isbn
+ } else if entity.entityType == .paymentCard, let cardEntity = entity.paymentCardEntity {
+ entityData["network"] = cardEntity.paymentCardNetwork.rawValue
+ entityData["number"] = cardEntity.paymentCardNumber
+ } else if entity.entityType == .trackingNumber, let trackingEntity = entity.trackingNumberEntity {
+ entityData["carrier"] = trackingEntity.parcelCarrier.rawValue
+ entityData["number"] = trackingEntity.parcelTrackingNumber
+ } else if entity.entityType == .money, let moneyEntity = entity.moneyEntity {
+ entityData["fraction"] = moneyEntity.fractionalPart
+ entityData["integer"] = moneyEntity.integerPart
+ entityData["unnormalized"] = moneyEntity.unnormalizedCurrency
+ }
+ return entityData
+ }
+
+ private func entityTypeToNumber(_ type: EntityType) -> Int {
+ switch type {
+ case .address: return 1
+ case .dateTime: return 2
+ case .email: return 3
+ case .flightNumber: return 4
+ case .IBAN: return 5
+ case .ISBN: return 6
+ case .paymentCard: return 7
+ case .phone: return 8
+ case .trackingNumber: return 9
+ case .URL: return 10
+ case .money: return 11
+ default: return 0
+ }
+ }
+
+ /// Treats NSNull and missing values as nil for method-channel backward compatibility (Dart may send null).
+ private func valueAsString(_ value: Any?) -> String? {
+ guard value != nil, value is NSNull == false else { return nil }
+ return value as? String
+ }
+
+ private func valueAsNumber(_ value: Any?) -> NSNumber? {
+ guard value != nil, value is NSNull == false else { return nil }
+ return value as? NSNumber
+ }
+
+ private func valueAsNumberArray(_ value: Any?) -> [NSNumber]? {
+ guard value != nil, value is NSNull == false else { return nil }
+ return value as? [NSNumber]
+ }
+
+ private func manageModel(call: FlutterMethodCall, result: @escaping FlutterResult) {
+ guard let args = call.arguments as? [String: Any],
+ let modelTag = args["model"] as? String else {
+ result(FlutterError(code: "invalid_args", message: "Missing model argument", details: nil))
+ return
+ }
+ // EntityExtractionModelIdentifier(rawValue:) is non-failable; invalid tags may fail when the model is used.
+ let modelIdentifier = EntityExtractionModelIdentifier(rawValue: modelTag)
+ let model = EntityExtractorRemoteModel.entityExtractorRemoteModel(identifier: modelIdentifier)
+ let manager = GenericModelManager()
+ genericModelManager = manager
+ manager.manage(model: model, call: call, result: result)
+ }
+}
diff --git a/packages/google_mlkit_entity_extraction/ios/google_mlkit_entity_extraction.podspec b/packages/google_mlkit_entity_extraction/ios/google_mlkit_entity_extraction.podspec
index f5e8a35e..b33a4ff5 100644
--- a/packages/google_mlkit_entity_extraction/ios/google_mlkit_entity_extraction.podspec
+++ b/packages/google_mlkit_entity_extraction/ios/google_mlkit_entity_extraction.podspec
@@ -10,10 +10,9 @@ Pod::Spec.new do |s|
s.description = pubspec['description']
s.homepage = pubspec['homepage']
s.license = { :file => '../LICENSE' }
- s.authors = 'Multiple Authors'
+ s.authors = 'flutter-ml.dev'
s.source = { :path => '.' }
- s.source_files = 'Classes/**/*'
- s.public_header_files = 'Classes/**/*.h'
+ s.source_files = 'Classes/**/*.swift'
s.dependency 'Flutter'
s.dependency 'GoogleMLKit/EntityExtraction', '~> 9.0.0'
s.dependency 'google_mlkit_commons'
diff --git a/packages/google_mlkit_face_detection/ios/Assets/.gitkeep b/packages/google_mlkit_face_detection/ios/Assets/.gitkeep
deleted file mode 100644
index e69de29b..00000000
diff --git a/packages/google_mlkit_face_detection/ios/Classes/GoogleMlKitFaceDetectionPlugin.h b/packages/google_mlkit_face_detection/ios/Classes/GoogleMlKitFaceDetectionPlugin.h
deleted file mode 100644
index a97068dd..00000000
--- a/packages/google_mlkit_face_detection/ios/Classes/GoogleMlKitFaceDetectionPlugin.h
+++ /dev/null
@@ -1,4 +0,0 @@
-#import
-
-@interface GoogleMlKitFaceDetectionPlugin : NSObject
-@end
diff --git a/packages/google_mlkit_face_detection/ios/Classes/GoogleMlKitFaceDetectionPlugin.m b/packages/google_mlkit_face_detection/ios/Classes/GoogleMlKitFaceDetectionPlugin.m
deleted file mode 100644
index ceec85bb..00000000
--- a/packages/google_mlkit_face_detection/ios/Classes/GoogleMlKitFaceDetectionPlugin.m
+++ /dev/null
@@ -1,196 +0,0 @@
-#import "GoogleMlKitFaceDetectionPlugin.h"
-#import
-#import
-
-#define channelName @"google_mlkit_face_detector"
-#define startFaceDetector @"vision#startFaceDetector"
-#define closeFaceDetector @"vision#closeFaceDetector"
-
-@implementation GoogleMlKitFaceDetectionPlugin {
- NSMutableDictionary *instances;
-}
-
-+ (void)registerWithRegistrar:(NSObject*)registrar {
- FlutterMethodChannel* channel = [FlutterMethodChannel
- methodChannelWithName:channelName
- binaryMessenger:[registrar messenger]];
- GoogleMlKitFaceDetectionPlugin* instance = [[GoogleMlKitFaceDetectionPlugin alloc] init];
- [registrar addMethodCallDelegate:instance channel:channel];
-}
-
-- (id)init {
- self = [super init];
- if (self)
- instances = [NSMutableDictionary dictionary];
- return self;
-}
-
-- (void)handleMethodCall:(FlutterMethodCall *)call result:(FlutterResult)result {
- if ([call.method isEqualToString:startFaceDetector]) {
- [self handleDetection:call result:result];
- } else if ([call.method isEqualToString:closeFaceDetector]) {
- NSString *uid = call.arguments[@"id"];
- [instances removeObjectForKey:uid];
- result(NULL);
- } else {
- result(FlutterMethodNotImplemented);
- }
-}
-
-- (MLKFaceDetector*)initialize:(FlutterMethodCall *)call {
- NSDictionary *dictionary = call.arguments[@"options"];
-
- MLKFaceDetectorOptions *options = [[MLKFaceDetectorOptions alloc] init];
- BOOL enableClassification = [[dictionary objectForKey:@"enableClassification"] boolValue];
- options.classificationMode = enableClassification ? MLKFaceDetectorClassificationModeAll : MLKFaceDetectorClassificationModeNone;
-
- BOOL enableLandmarks = [[dictionary objectForKey:@"enableLandmarks"] boolValue];
- options.landmarkMode = enableLandmarks ? MLKFaceDetectorLandmarkModeAll : MLKFaceDetectorLandmarkModeNone;
-
- BOOL enableContours = [[dictionary objectForKey:@"enableContours"] boolValue];
- options.contourMode = enableContours ? MLKFaceDetectorContourModeAll : MLKFaceDetectorContourModeNone;
-
- BOOL enableTracking = [[dictionary objectForKey:@"enableTracking"] boolValue];
- options.trackingEnabled = enableTracking;
-
- NSNumber *minFaceSize = dictionary[@"minFaceSize"];
- options.minFaceSize = minFaceSize.floatValue;
-
- NSString *mode = dictionary[@"mode"];
- options.performanceMode = [mode isEqualToString:@"accurate"] ? MLKFaceDetectorPerformanceModeAccurate : MLKFaceDetectorPerformanceModeFast;
-
- return [MLKFaceDetector faceDetectorWithOptions:options];
-}
-
-- (void)handleDetection:(FlutterMethodCall *)call result:(FlutterResult)result {
- MLKVisionImage *image = [MLKVisionImage visionImageFromData:call.arguments[@"imageData"]];
-
- NSString *uid = call.arguments[@"id"];
- MLKFaceDetector *detector = [instances objectForKey:uid];
- if (detector == NULL) {
- detector = [self initialize:call];
- instances[uid] = detector;
- }
-
- [detector processImage:image
- completion:^(NSArray *_Nullable faces,
- NSError *_Nullable error) {
- if (error) {
- result(getFlutterError(error));
- return;
- } else if (!faces) {
- result(@[]);
- return;
- }
-
- NSMutableArray *faceData = [NSMutableArray array];
- for (MLKFace *face in faces) {
- id smileProb = face.hasSmilingProbability ? @(face.smilingProbability) : [NSNull null];
- id leftProb =
- face.hasLeftEyeOpenProbability ? @(face.leftEyeOpenProbability) : [NSNull null];
- id rightProb =
- face.hasRightEyeOpenProbability ? @(face.rightEyeOpenProbability) : [NSNull null];
-
- NSDictionary *data = @{
- @"rect" : @{
- @"left" : @(face.frame.origin.x),
- @"top" : @(face.frame.origin.y),
- @"right" : @(face.frame.origin.x + face.frame.size.width),
- @"bottom" : @(face.frame.origin.y + face.frame.size.height)
- },
- @"headEulerAngleX" : face.hasHeadEulerAngleX ? @(face.headEulerAngleX)
- : [NSNull null],
- @"headEulerAngleY" : face.hasHeadEulerAngleY ? @(face.headEulerAngleY)
- : [NSNull null],
- @"headEulerAngleZ" : face.hasHeadEulerAngleZ ? @(face.headEulerAngleZ)
- : [NSNull null],
- @"smilingProbability" : smileProb,
- @"leftEyeOpenProbability" : leftProb,
- @"rightEyeOpenProbability" : rightProb,
- @"trackingId" : face.hasTrackingID ? @(face.trackingID) : [NSNull null],
- @"landmarks" : @{
- @"bottomMouth" : [self getLandmarkPosition:face
- landmark:MLKFaceLandmarkTypeMouthBottom],
- @"rightMouth" : [self getLandmarkPosition:face
- landmark:MLKFaceLandmarkTypeMouthRight],
- @"leftMouth" : [self getLandmarkPosition:face
- landmark:MLKFaceLandmarkTypeMouthLeft],
- @"rightEye" : [self getLandmarkPosition:face
- landmark:MLKFaceLandmarkTypeRightEye],
- @"leftEye" : [self getLandmarkPosition:face
- landmark:MLKFaceLandmarkTypeLeftEye],
- @"rightEar" : [self getLandmarkPosition:face
- landmark:MLKFaceLandmarkTypeRightEar],
- @"leftEar" : [self getLandmarkPosition:face
- landmark:MLKFaceLandmarkTypeLeftEar],
- @"rightCheek" : [self getLandmarkPosition:face
- landmark:MLKFaceLandmarkTypeRightCheek],
- @"leftCheek" : [self getLandmarkPosition:face
- landmark:MLKFaceLandmarkTypeLeftCheek],
- @"noseBase" : [self getLandmarkPosition:face
- landmark:MLKFaceLandmarkTypeNoseBase],
- },
- @"contours" : @{
- @"face" : [self getContourPoints:face contour:MLKFaceContourTypeFace],
- @"leftEyebrowTop" :
- [self getContourPoints:face contour:MLKFaceContourTypeLeftEyebrowTop],
- @"leftEyebrowBottom" :
- [self getContourPoints:face
- contour:MLKFaceContourTypeLeftEyebrowBottom],
- @"rightEyebrowTop" :
- [self getContourPoints:face contour:MLKFaceContourTypeRightEyebrowTop],
- @"rightEyebrowBottom" :
- [self getContourPoints:face
- contour:MLKFaceContourTypeRightEyebrowBottom],
- @"leftEye" : [self getContourPoints:face contour:MLKFaceContourTypeLeftEye],
- @"rightEye" : [self getContourPoints:face
- contour:MLKFaceContourTypeRightEye],
- @"upperLipTop" : [self getContourPoints:face
- contour:MLKFaceContourTypeUpperLipTop],
- @"upperLipBottom" :
- [self getContourPoints:face contour:MLKFaceContourTypeUpperLipBottom],
- @"lowerLipTop" : [self getContourPoints:face
- contour:MLKFaceContourTypeLowerLipTop],
- @"lowerLipBottom" :
- [self getContourPoints:face contour:MLKFaceContourTypeLowerLipBottom],
- @"noseBridge" : [self getContourPoints:face
- contour:MLKFaceContourTypeNoseBridge],
- @"noseBottom" : [self getContourPoints:face
- contour:MLKFaceContourTypeNoseBottom],
- @"leftCheek" : [self getContourPoints:face
- contour:MLKFaceContourTypeLeftCheek],
- @"rightCheek" : [self getContourPoints:face
- contour:MLKFaceContourTypeRightCheek],
- }
- };
- [faceData addObject:data];
- }
-
- result(faceData);
- }];
-}
-
-- (id)getLandmarkPosition:(MLKFace *)face landmark:(MLKFaceLandmarkType)landmarkType {
- MLKFaceLandmark *landmark = [face landmarkOfType:landmarkType];
- if (landmark) {
- return @[ @(landmark.position.x), @(landmark.position.y) ];
- }
- return [NSNull null];
-}
-
-- (id)getContourPoints:(MLKFace *)face contour:(MLKFaceContourType)contourType {
- MLKFaceContour *contour = [face contourOfType:contourType];
- if (contour) {
- NSArray *contourPoints = contour.points;
- NSMutableArray *result = [[NSMutableArray alloc] initWithCapacity:[contourPoints count]];
- for (int i = 0; i < [contourPoints count]; i++) {
- MLKVisionPoint *point = [contourPoints objectAtIndex:i];
- [result insertObject:@[ @(point.x), @(point.y) ] atIndex:i];
- }
- return [result copy];
- }
-
- return [NSNull null];
-}
-
-@end
diff --git a/packages/google_mlkit_face_detection/ios/Classes/GoogleMlKitFaceDetectionPlugin.swift b/packages/google_mlkit_face_detection/ios/Classes/GoogleMlKitFaceDetectionPlugin.swift
new file mode 100644
index 00000000..4e4ecc1c
--- /dev/null
+++ b/packages/google_mlkit_face_detection/ios/Classes/GoogleMlKitFaceDetectionPlugin.swift
@@ -0,0 +1,148 @@
+import Flutter
+import MLKitVision
+import MLKitFaceDetection
+import google_mlkit_commons
+
+@objc
+public class GoogleMlKitFaceDetectionPlugin: NSObject, FlutterPlugin {
+ private var instances: [String: FaceDetector] = [:]
+
+ public static func register(with registrar: FlutterPluginRegistrar) {
+ let channel = FlutterMethodChannel(
+ name: "google_mlkit_face_detector",
+ binaryMessenger: registrar.messenger()
+ )
+ let instance = GoogleMlKitFaceDetectionPlugin()
+ registrar.addMethodCallDelegate(instance, channel: channel)
+ }
+
+ public func handle(_ call: FlutterMethodCall, result: @escaping FlutterResult) {
+ switch call.method {
+ case "vision#startFaceDetector":
+ handleDetection(call: call, result: result)
+ case "vision#closeFaceDetector":
+ if let args = call.arguments as? [String: Any], let uid = args["id"] as? String {
+ instances.removeValue(forKey: uid)
+ }
+ result(nil)
+ default:
+ result(FlutterMethodNotImplemented)
+ }
+ }
+
+ private func initialize(call: FlutterMethodCall) -> FaceDetector? {
+ guard let args = call.arguments as? [String: Any],
+ let dictionary = args["options"] as? [String: Any] else {
+ return nil
+ }
+ let options = FaceDetectorOptions()
+ options.classificationMode = (dictionary["enableClassification"] as? NSNumber)?.boolValue == true ? .all : .none
+ options.landmarkMode = (dictionary["enableLandmarks"] as? NSNumber)?.boolValue == true ? .all : .none
+ options.contourMode = (dictionary["enableContours"] as? NSNumber)?.boolValue == true ? .all : .none
+ options.isTrackingEnabled = (dictionary["enableTracking"] as? NSNumber)?.boolValue ?? false
+ if let minFaceSize = dictionary["minFaceSize"] as? NSNumber {
+ options.minFaceSize = CGFloat(minFaceSize.floatValue)
+ }
+ let mode = dictionary["mode"] as? String ?? "fast"
+ options.performanceMode = mode == "accurate" ? .accurate : .fast
+ return FaceDetector.faceDetector(options: options)
+ }
+
+ private func handleDetection(call: FlutterMethodCall, result: @escaping FlutterResult) {
+ guard let args = call.arguments as? [String: Any],
+ let imageData = args["imageData"] as? [String: Any],
+ let uid = args["id"] as? String else {
+ result(FlutterError(code: "invalid_args", message: "Missing arguments", details: nil))
+ return
+ }
+ guard let image = VisionImage.visionImage(from: imageData) else {
+ result(FlutterError(code: "invalid_image", message: "Invalid or missing image data", details: nil))
+ return
+ }
+
+ let detector: FaceDetector
+ if let existing = instances[uid] {
+ detector = existing
+ } else {
+ guard let newDetector = initialize(call: call) else {
+ result(FlutterError(code: "invalid_args", message: "Invalid options", details: nil))
+ return
+ }
+ detector = newDetector
+ instances[uid] = detector
+ }
+
+ detector.process(image) { faces, error in
+ if let error = error as NSError? {
+ result(FlutterError(code: "Error \(error.code)", message: error.domain, details: error.localizedDescription))
+ return
+ }
+ guard let faces = faces else {
+ result([])
+ return
+ }
+ let faceData = faces.map { face -> [String: Any] in
+ self.faceToDictionary(face)
+ }
+ result(faceData)
+ }
+ }
+
+ private func faceToDictionary(_ face: Face) -> [String: Any] {
+ let data: [String: Any] = [
+ "rect": [
+ "left": face.frame.origin.x,
+ "top": face.frame.origin.y,
+ "right": face.frame.origin.x + face.frame.size.width,
+ "bottom": face.frame.origin.y + face.frame.size.height
+ ],
+ "headEulerAngleX": face.hasHeadEulerAngleX ? face.headEulerAngleX as Any : NSNull(),
+ "headEulerAngleY": face.hasHeadEulerAngleY ? face.headEulerAngleY as Any : NSNull(),
+ "headEulerAngleZ": face.hasHeadEulerAngleZ ? face.headEulerAngleZ as Any : NSNull(),
+ "smilingProbability": face.hasSmilingProbability ? face.smilingProbability as Any : NSNull(),
+ "leftEyeOpenProbability": face.hasLeftEyeOpenProbability ? face.leftEyeOpenProbability as Any : NSNull(),
+ "rightEyeOpenProbability": face.hasRightEyeOpenProbability ? face.rightEyeOpenProbability as Any : NSNull(),
+ "trackingId": face.hasTrackingID ? face.trackingID as Any : NSNull(),
+ "landmarks": [
+ "bottomMouth": getLandmarkPosition(face, landmark: .mouthBottom),
+ "rightMouth": getLandmarkPosition(face, landmark: .mouthRight),
+ "leftMouth": getLandmarkPosition(face, landmark: .mouthLeft),
+ "rightEye": getLandmarkPosition(face, landmark: .rightEye),
+ "leftEye": getLandmarkPosition(face, landmark: .leftEye),
+ "rightEar": getLandmarkPosition(face, landmark: .rightEar),
+ "leftEar": getLandmarkPosition(face, landmark: .leftEar),
+ "rightCheek": getLandmarkPosition(face, landmark: .rightCheek),
+ "leftCheek": getLandmarkPosition(face, landmark: .leftCheek),
+ "noseBase": getLandmarkPosition(face, landmark: .noseBase)
+ ],
+ "contours": [
+ "face": getContourPoints(face, contour: .face),
+ "leftEyebrowTop": getContourPoints(face, contour: .leftEyebrowTop),
+ "leftEyebrowBottom": getContourPoints(face, contour: .leftEyebrowBottom),
+ "rightEyebrowTop": getContourPoints(face, contour: .rightEyebrowTop),
+ "rightEyebrowBottom": getContourPoints(face, contour: .rightEyebrowBottom),
+ "leftEye": getContourPoints(face, contour: .leftEye),
+ "rightEye": getContourPoints(face, contour: .rightEye),
+ "upperLipTop": getContourPoints(face, contour: .upperLipTop),
+ "upperLipBottom": getContourPoints(face, contour: .upperLipBottom),
+ "lowerLipTop": getContourPoints(face, contour: .lowerLipTop),
+ "lowerLipBottom": getContourPoints(face, contour: .lowerLipBottom),
+ "noseBridge": getContourPoints(face, contour: .noseBridge),
+ "noseBottom": getContourPoints(face, contour: .noseBottom),
+ "leftCheek": getContourPoints(face, contour: .leftCheek),
+ "rightCheek": getContourPoints(face, contour: .rightCheek)
+ ]
+ ]
+ return data
+ }
+
+ private func getLandmarkPosition(_ face: Face, landmark: FaceLandmarkType) -> Any {
+ guard let landmarkObj = face.landmark(ofType: landmark) else { return NSNull() }
+ return [NSNumber(value: landmarkObj.position.x), NSNumber(value: landmarkObj.position.y)]
+ }
+
+ private func getContourPoints(_ face: Face, contour: FaceContourType) -> Any {
+ guard let contourObj = face.contour(ofType: contour) else { return NSNull() }
+ return contourObj.points.map { [NSNumber(value: $0.x), NSNumber(value: $0.y)] }
+ }
+}
diff --git a/packages/google_mlkit_face_detection/ios/google_mlkit_face_detection.podspec b/packages/google_mlkit_face_detection/ios/google_mlkit_face_detection.podspec
index 826b7c84..1fe87008 100644
--- a/packages/google_mlkit_face_detection/ios/google_mlkit_face_detection.podspec
+++ b/packages/google_mlkit_face_detection/ios/google_mlkit_face_detection.podspec
@@ -10,10 +10,9 @@ Pod::Spec.new do |s|
s.description = pubspec['description']
s.homepage = pubspec['homepage']
s.license = { :file => '../LICENSE' }
- s.authors = 'Multiple Authors'
+ s.authors = 'flutter-ml.dev'
s.source = { :path => '.' }
- s.source_files = 'Classes/**/*'
- s.public_header_files = 'Classes/**/*.h'
+ s.source_files = 'Classes/**/*.swift'
s.dependency 'Flutter'
s.dependency 'GoogleMLKit/FaceDetection', '~> 9.0.0'
s.dependency 'google_mlkit_commons'
diff --git a/packages/google_mlkit_face_mesh_detection/ios/Assets/.gitkeep b/packages/google_mlkit_face_mesh_detection/ios/Assets/.gitkeep
deleted file mode 100644
index e69de29b..00000000
diff --git a/packages/google_mlkit_face_mesh_detection/ios/Classes/GoogleMlKitFaceMeshDetectionPlugin.h b/packages/google_mlkit_face_mesh_detection/ios/Classes/GoogleMlKitFaceMeshDetectionPlugin.h
deleted file mode 100644
index 7677fc0f..00000000
--- a/packages/google_mlkit_face_mesh_detection/ios/Classes/GoogleMlKitFaceMeshDetectionPlugin.h
+++ /dev/null
@@ -1,4 +0,0 @@
-#import
-
-@interface GoogleMlKitFaceMeshDetectionPlugin : NSObject
-@end
diff --git a/packages/google_mlkit_face_mesh_detection/ios/Classes/GoogleMlKitFaceMeshDetectionPlugin.m b/packages/google_mlkit_face_mesh_detection/ios/Classes/GoogleMlKitFaceMeshDetectionPlugin.m
deleted file mode 100644
index df513652..00000000
--- a/packages/google_mlkit_face_mesh_detection/ios/Classes/GoogleMlKitFaceMeshDetectionPlugin.m
+++ /dev/null
@@ -1,45 +0,0 @@
-#import "GoogleMlKitFaceMeshDetectionPlugin.h"
-#import
-
-#define channelName @"google_mlkit_face_mesh_detector"
-#define startFaceMeshDetector @"vision#startFaceMeshDetector"
-#define closeFaceMeshDetector @"vision#closeFaceMeshDetector"
-
-@implementation GoogleMlKitFaceMeshDetectionPlugin {
- NSMutableDictionary *instances;
-}
-
-+ (void)registerWithRegistrar:(NSObject*)registrar {
- FlutterMethodChannel* channel = [FlutterMethodChannel
- methodChannelWithName:channelName
- binaryMessenger:[registrar messenger]];
- GoogleMlKitFaceMeshDetectionPlugin* instance = [[GoogleMlKitFaceMeshDetectionPlugin alloc] init];
- [registrar addMethodCallDelegate:instance channel:channel];
-}
-
-- (id)init {
- self = [super init];
- if (self)
- instances = [NSMutableDictionary dictionary];
- return self;
-}
-
-- (void)handleMethodCall:(FlutterMethodCall *)call result:(FlutterResult)result {
- if ([call.method isEqualToString:startFaceMeshDetector]) {
- [self handleDetection:call result:result];
- } else if ([call.method isEqualToString:closeFaceMeshDetector]) {
- NSString *uid = call.arguments[@"id"];
- [instances removeObjectForKey:uid];
- result(NULL);
- } else {
- result(FlutterMethodNotImplemented);
- }
-}
-
-- (void)handleDetection:(FlutterMethodCall *)call result:(FlutterResult)result {
- // TODO: waiting for Google to release Face Mesh api for iOS
- // https://developers.google.com/ml-kit/vision/face-mesh-detection
- result(FlutterMethodNotImplemented);
-}
-
-@end
diff --git a/packages/google_mlkit_face_mesh_detection/ios/Classes/GoogleMlKitFaceMeshDetectionPlugin.swift b/packages/google_mlkit_face_mesh_detection/ios/Classes/GoogleMlKitFaceMeshDetectionPlugin.swift
new file mode 100644
index 00000000..d9bc5772
--- /dev/null
+++ b/packages/google_mlkit_face_mesh_detection/ios/Classes/GoogleMlKitFaceMeshDetectionPlugin.swift
@@ -0,0 +1,36 @@
+import Flutter
+import google_mlkit_commons
+
+@objc
+public class GoogleMlKitFaceMeshDetectionPlugin: NSObject, FlutterPlugin {
+ private var instances: [String: Any] = [:]
+
+ public static func register(with registrar: FlutterPluginRegistrar) {
+ let channel = FlutterMethodChannel(
+ name: "google_mlkit_face_mesh_detector",
+ binaryMessenger: registrar.messenger()
+ )
+ let instance = GoogleMlKitFaceMeshDetectionPlugin()
+ registrar.addMethodCallDelegate(instance, channel: channel)
+ }
+
+ public func handle(_ call: FlutterMethodCall, result: @escaping FlutterResult) {
+ switch call.method {
+ case "vision#startFaceMeshDetector":
+ handleDetection(call: call, result: result)
+ case "vision#closeFaceMeshDetector":
+ if let args = call.arguments as? [String: Any], let uid = args["id"] as? String {
+ instances.removeValue(forKey: uid)
+ }
+ result(nil)
+ default:
+ result(FlutterMethodNotImplemented)
+ }
+ }
+
+ private func handleDetection(call: FlutterMethodCall, result: @escaping FlutterResult) {
+ // swiftlint:disable:next todo
+ // TODO: waiting for Google to release Face Mesh API for iOS
+ result(FlutterMethodNotImplemented)
+ }
+}
diff --git a/packages/google_mlkit_face_mesh_detection/ios/google_mlkit_face_mesh_detection.podspec b/packages/google_mlkit_face_mesh_detection/ios/google_mlkit_face_mesh_detection.podspec
index 131727eb..1a543fe6 100644
--- a/packages/google_mlkit_face_mesh_detection/ios/google_mlkit_face_mesh_detection.podspec
+++ b/packages/google_mlkit_face_mesh_detection/ios/google_mlkit_face_mesh_detection.podspec
@@ -10,10 +10,9 @@ Pod::Spec.new do |s|
s.description = pubspec['description']
s.homepage = pubspec['homepage']
s.license = { :file => '../LICENSE' }
- s.authors = 'Multiple Authors'
+ s.authors = 'flutter-ml.dev'
s.source = { :path => '.' }
- s.source_files = 'Classes/**/*'
- s.public_header_files = 'Classes/**/*.h'
+ s.source_files = 'Classes/**/*.swift'
s.dependency 'Flutter'
# s.dependency 'GoogleMLKit/FaceMeshDetection', '~> 5.0.0'
s.dependency 'google_mlkit_commons'
diff --git a/packages/google_mlkit_genai_image_description/ios/Assets/.gitkeep b/packages/google_mlkit_genai_image_description/ios/Assets/.gitkeep
deleted file mode 100644
index e69de29b..00000000
diff --git a/packages/google_mlkit_genai_image_description/ios/Classes/GoogleMlKitGenaiImageDescriptionPlugin.h b/packages/google_mlkit_genai_image_description/ios/Classes/GoogleMlKitGenaiImageDescriptionPlugin.h
deleted file mode 100644
index 5ed8db57..00000000
--- a/packages/google_mlkit_genai_image_description/ios/Classes/GoogleMlKitGenaiImageDescriptionPlugin.h
+++ /dev/null
@@ -1,4 +0,0 @@
-#import
-
-@interface GoogleMlKitGenaiImageDescriptionPlugin : NSObject
-@end
diff --git a/packages/google_mlkit_genai_image_description/ios/Classes/GoogleMlKitGenaiImageDescriptionPlugin.m b/packages/google_mlkit_genai_image_description/ios/Classes/GoogleMlKitGenaiImageDescriptionPlugin.m
deleted file mode 100644
index e8f55e62..00000000
--- a/packages/google_mlkit_genai_image_description/ios/Classes/GoogleMlKitGenaiImageDescriptionPlugin.m
+++ /dev/null
@@ -1,58 +0,0 @@
-#import
-#import "GoogleMlKitGenaiImageDescriptionPlugin.h"
-
-#define channelName @"google_mlkit_genai_image_description"
-#define checkFeatureStatus @"genai#checkFeatureStatus"
-#define downloadFeature @"genai#downloadFeature"
-#define runInference @"genai#runInference"
-#define runInferenceStreaming @"genai#runInferenceStreaming"
-#define closeImageDescriber @"genai#closeImageDescriber"
-
-@implementation GoogleMlKitGenaiImageDescriptionPlugin {
- NSMutableDictionary *instances;
-}
-
-+ (void)registerWithRegistrar:(NSObject*)registrar {
- FlutterMethodChannel* channel = [FlutterMethodChannel
- methodChannelWithName:channelName
- binaryMessenger:[registrar messenger]];
- GoogleMlKitGenaiImageDescriptionPlugin* instance = [[GoogleMlKitGenaiImageDescriptionPlugin alloc] init];
- [registrar addMethodCallDelegate:instance channel:channel];
-}
-
-- (id)init {
- self = [super init];
- if (self)
- instances = [NSMutableDictionary dictionary];
- return self;
-}
-
-- (void)handleMethodCall:(FlutterMethodCall *)call result:(FlutterResult)result {
- if ([call.method isEqualToString:checkFeatureStatus]) {
- // iOS implementation would go here
- // Note: GenAI APIs are currently Android-only
- result([FlutterError errorWithCode:@"UNIMPLEMENTED"
- message:@"GenAI APIs are currently only available on Android"
- details:nil]);
- } else if ([call.method isEqualToString:downloadFeature]) {
- result([FlutterError errorWithCode:@"UNIMPLEMENTED"
- message:@"GenAI APIs are currently only available on Android"
- details:nil]);
- } else if ([call.method isEqualToString:runInference]) {
- result([FlutterError errorWithCode:@"UNIMPLEMENTED"
- message:@"GenAI APIs are currently only available on Android"
- details:nil]);
- } else if ([call.method isEqualToString:runInferenceStreaming]) {
- result([FlutterError errorWithCode:@"UNIMPLEMENTED"
- message:@"GenAI APIs are currently only available on Android"
- details:nil]);
- } else if ([call.method isEqualToString:closeImageDescriber]) {
- NSString *uid = call.arguments[@"id"];
- [instances removeObjectForKey:uid];
- result(NULL);
- } else {
- result(FlutterMethodNotImplemented);
- }
-}
-
-@end
diff --git a/packages/google_mlkit_genai_image_description/ios/Classes/GoogleMlKitGenaiImageDescriptionPlugin.swift b/packages/google_mlkit_genai_image_description/ios/Classes/GoogleMlKitGenaiImageDescriptionPlugin.swift
new file mode 100644
index 00000000..7b96b6d4
--- /dev/null
+++ b/packages/google_mlkit_genai_image_description/ios/Classes/GoogleMlKitGenaiImageDescriptionPlugin.swift
@@ -0,0 +1,34 @@
+import Flutter
+
+@objc
+public class GoogleMlKitGenaiImageDescriptionPlugin: NSObject, FlutterPlugin {
+ private var instances: [String: Any] = [:]
+
+ public static func register(with registrar: FlutterPluginRegistrar) {
+ let channel = FlutterMethodChannel(
+ name: "google_mlkit_genai_image_description",
+ binaryMessenger: registrar.messenger()
+ )
+ let instance = GoogleMlKitGenaiImageDescriptionPlugin()
+ registrar.addMethodCallDelegate(instance, channel: channel)
+ }
+
+ public func handle(_ call: FlutterMethodCall, result: @escaping FlutterResult) {
+ let unimplemented = FlutterError(
+ code: "UNIMPLEMENTED",
+ message: "GenAI APIs are currently only available on Android",
+ details: nil
+ )
+ switch call.method {
+ case "genai#checkFeatureStatus", "genai#downloadFeature", "genai#runInference", "genai#runInferenceStreaming":
+ result(unimplemented)
+ case "genai#closeImageDescriber":
+ if let args = call.arguments as? [String: Any], let uid = args["id"] as? String {
+ instances.removeValue(forKey: uid)
+ }
+ result(nil)
+ default:
+ result(FlutterMethodNotImplemented)
+ }
+ }
+}
diff --git a/packages/google_mlkit_genai_image_description/ios/google_mlkit_genai_image_description.podspec b/packages/google_mlkit_genai_image_description/ios/google_mlkit_genai_image_description.podspec
index c1401a94..4af76e93 100644
--- a/packages/google_mlkit_genai_image_description/ios/google_mlkit_genai_image_description.podspec
+++ b/packages/google_mlkit_genai_image_description/ios/google_mlkit_genai_image_description.podspec
@@ -10,10 +10,9 @@ Pod::Spec.new do |s|
s.description = pubspec['description']
s.homepage = pubspec['homepage']
s.license = { :file => '../LICENSE' }
- s.authors = 'Multiple Authors'
+ s.authors = 'flutter-ml.dev'
s.source = { :path => '.' }
- s.source_files = 'Classes/**/*'
- s.public_header_files = 'Classes/**/*.h'
+ s.source_files = 'Classes/**/*.swift'
s.dependency 'Flutter'
s.platform = :ios, '15.5'
s.ios.deployment_target = '15.5'
diff --git a/packages/google_mlkit_genai_prompt/ios/Assets/.gitkeep b/packages/google_mlkit_genai_prompt/ios/Assets/.gitkeep
deleted file mode 100644
index e69de29b..00000000
diff --git a/packages/google_mlkit_genai_prompt/ios/Classes/GoogleMlKitGenaiPromptPlugin.h b/packages/google_mlkit_genai_prompt/ios/Classes/GoogleMlKitGenaiPromptPlugin.h
deleted file mode 100644
index 259499ba..00000000
--- a/packages/google_mlkit_genai_prompt/ios/Classes/GoogleMlKitGenaiPromptPlugin.h
+++ /dev/null
@@ -1,4 +0,0 @@
-#import
-
-@interface GoogleMlKitGenaiPromptPlugin : NSObject
-@end
diff --git a/packages/google_mlkit_genai_prompt/ios/Classes/GoogleMlKitGenaiPromptPlugin.m b/packages/google_mlkit_genai_prompt/ios/Classes/GoogleMlKitGenaiPromptPlugin.m
deleted file mode 100644
index 836f06d1..00000000
--- a/packages/google_mlkit_genai_prompt/ios/Classes/GoogleMlKitGenaiPromptPlugin.m
+++ /dev/null
@@ -1,58 +0,0 @@
-#import
-#import "GoogleMlKitGenaiPromptPlugin.h"
-
-#define channelName @"google_mlkit_genai_prompt"
-#define checkFeatureStatus @"genai#checkFeatureStatus"
-#define downloadFeature @"genai#downloadFeature"
-#define runInference @"genai#runInference"
-#define runInferenceStreaming @"genai#runInferenceStreaming"
-#define closePrompt @"genai#closePrompt"
-
-@implementation GoogleMlKitGenaiPromptPlugin {
- NSMutableDictionary *instances;
-}
-
-+ (void)registerWithRegistrar:(NSObject*)registrar {
- FlutterMethodChannel* channel = [FlutterMethodChannel
- methodChannelWithName:channelName
- binaryMessenger:[registrar messenger]];
- GoogleMlKitGenaiPromptPlugin* instance = [[GoogleMlKitGenaiPromptPlugin alloc] init];
- [registrar addMethodCallDelegate:instance channel:channel];
-}
-
-- (id)init {
- self = [super init];
- if (self)
- instances = [NSMutableDictionary dictionary];
- return self;
-}
-
-- (void)handleMethodCall:(FlutterMethodCall *)call result:(FlutterResult)result {
- if ([call.method isEqualToString:checkFeatureStatus]) {
- // iOS implementation would go here
- // Note: GenAI APIs are currently Android-only
- result([FlutterError errorWithCode:@"UNIMPLEMENTED"
- message:@"GenAI APIs are currently only available on Android"
- details:nil]);
- } else if ([call.method isEqualToString:downloadFeature]) {
- result([FlutterError errorWithCode:@"UNIMPLEMENTED"
- message:@"GenAI APIs are currently only available on Android"
- details:nil]);
- } else if ([call.method isEqualToString:runInference]) {
- result([FlutterError errorWithCode:@"UNIMPLEMENTED"
- message:@"GenAI APIs are currently only available on Android"
- details:nil]);
- } else if ([call.method isEqualToString:runInferenceStreaming]) {
- result([FlutterError errorWithCode:@"UNIMPLEMENTED"
- message:@"GenAI APIs are currently only available on Android"
- details:nil]);
- } else if ([call.method isEqualToString:closePrompt]) {
- NSString *uid = call.arguments[@"id"];
- [instances removeObjectForKey:uid];
- result(NULL);
- } else {
- result(FlutterMethodNotImplemented);
- }
-}
-
-@end
diff --git a/packages/google_mlkit_genai_prompt/ios/Classes/GoogleMlKitGenaiPromptPlugin.swift b/packages/google_mlkit_genai_prompt/ios/Classes/GoogleMlKitGenaiPromptPlugin.swift
new file mode 100644
index 00000000..df1569be
--- /dev/null
+++ b/packages/google_mlkit_genai_prompt/ios/Classes/GoogleMlKitGenaiPromptPlugin.swift
@@ -0,0 +1,34 @@
+import Flutter
+
+@objc
+public class GoogleMlKitGenaiPromptPlugin: NSObject, FlutterPlugin {
+ private var instances: [String: Any] = [:]
+
+ public static func register(with registrar: FlutterPluginRegistrar) {
+ let channel = FlutterMethodChannel(
+ name: "google_mlkit_genai_prompt",
+ binaryMessenger: registrar.messenger()
+ )
+ let instance = GoogleMlKitGenaiPromptPlugin()
+ registrar.addMethodCallDelegate(instance, channel: channel)
+ }
+
+ public func handle(_ call: FlutterMethodCall, result: @escaping FlutterResult) {
+ let unimplemented = FlutterError(
+ code: "UNIMPLEMENTED",
+ message: "GenAI APIs are currently only available on Android",
+ details: nil
+ )
+ switch call.method {
+ case "genai#checkFeatureStatus", "genai#downloadFeature", "genai#runInference", "genai#runInferenceStreaming":
+ result(unimplemented)
+ case "genai#closePrompt":
+ if let args = call.arguments as? [String: Any], let uid = args["id"] as? String {
+ instances.removeValue(forKey: uid)
+ }
+ result(nil)
+ default:
+ result(FlutterMethodNotImplemented)
+ }
+ }
+}
diff --git a/packages/google_mlkit_genai_prompt/ios/google_mlkit_genai_prompt.podspec b/packages/google_mlkit_genai_prompt/ios/google_mlkit_genai_prompt.podspec
index c1401a94..4af76e93 100644
--- a/packages/google_mlkit_genai_prompt/ios/google_mlkit_genai_prompt.podspec
+++ b/packages/google_mlkit_genai_prompt/ios/google_mlkit_genai_prompt.podspec
@@ -10,10 +10,9 @@ Pod::Spec.new do |s|
s.description = pubspec['description']
s.homepage = pubspec['homepage']
s.license = { :file => '../LICENSE' }
- s.authors = 'Multiple Authors'
+ s.authors = 'flutter-ml.dev'
s.source = { :path => '.' }
- s.source_files = 'Classes/**/*'
- s.public_header_files = 'Classes/**/*.h'
+ s.source_files = 'Classes/**/*.swift'
s.dependency 'Flutter'
s.platform = :ios, '15.5'
s.ios.deployment_target = '15.5'
diff --git a/packages/google_mlkit_genai_proofreading/ios/Assets/.gitkeep b/packages/google_mlkit_genai_proofreading/ios/Assets/.gitkeep
deleted file mode 100644
index e69de29b..00000000
diff --git a/packages/google_mlkit_genai_proofreading/ios/Classes/GoogleMlKitGenaiProofreadingPlugin.h b/packages/google_mlkit_genai_proofreading/ios/Classes/GoogleMlKitGenaiProofreadingPlugin.h
deleted file mode 100644
index a212a74c..00000000
--- a/packages/google_mlkit_genai_proofreading/ios/Classes/GoogleMlKitGenaiProofreadingPlugin.h
+++ /dev/null
@@ -1,4 +0,0 @@
-#import
-
-@interface GoogleMlKitGenaiProofreadingPlugin : NSObject
-@end
diff --git a/packages/google_mlkit_genai_proofreading/ios/Classes/GoogleMlKitGenaiProofreadingPlugin.m b/packages/google_mlkit_genai_proofreading/ios/Classes/GoogleMlKitGenaiProofreadingPlugin.m
deleted file mode 100644
index 65dc95da..00000000
--- a/packages/google_mlkit_genai_proofreading/ios/Classes/GoogleMlKitGenaiProofreadingPlugin.m
+++ /dev/null
@@ -1,58 +0,0 @@
-#import
-#import "GoogleMlKitGenaiProofreadingPlugin.h"
-
-#define channelName @"google_mlkit_genai_proofreading"
-#define checkFeatureStatus @"genai#checkFeatureStatus"
-#define downloadFeature @"genai#downloadFeature"
-#define runInference @"genai#runInference"
-#define runInferenceStreaming @"genai#runInferenceStreaming"
-#define closeProofreader @"genai#closeProofreader"
-
-@implementation GoogleMlKitGenaiProofreadingPlugin {
- NSMutableDictionary *instances;
-}
-
-+ (void)registerWithRegistrar:(NSObject*)registrar {
- FlutterMethodChannel* channel = [FlutterMethodChannel
- methodChannelWithName:channelName
- binaryMessenger:[registrar messenger]];
- GoogleMlKitGenaiProofreadingPlugin* instance = [[GoogleMlKitGenaiProofreadingPlugin alloc] init];
- [registrar addMethodCallDelegate:instance channel:channel];
-}
-
-- (id)init {
- self = [super init];
- if (self)
- instances = [NSMutableDictionary dictionary];
- return self;
-}
-
-- (void)handleMethodCall:(FlutterMethodCall *)call result:(FlutterResult)result {
- if ([call.method isEqualToString:checkFeatureStatus]) {
- // iOS implementation would go here
- // Note: GenAI APIs are currently Android-only
- result([FlutterError errorWithCode:@"UNIMPLEMENTED"
- message:@"GenAI APIs are currently only available on Android"
- details:nil]);
- } else if ([call.method isEqualToString:downloadFeature]) {
- result([FlutterError errorWithCode:@"UNIMPLEMENTED"
- message:@"GenAI APIs are currently only available on Android"
- details:nil]);
- } else if ([call.method isEqualToString:runInference]) {
- result([FlutterError errorWithCode:@"UNIMPLEMENTED"
- message:@"GenAI APIs are currently only available on Android"
- details:nil]);
- } else if ([call.method isEqualToString:runInferenceStreaming]) {
- result([FlutterError errorWithCode:@"UNIMPLEMENTED"
- message:@"GenAI APIs are currently only available on Android"
- details:nil]);
- } else if ([call.method isEqualToString:closeProofreader]) {
- NSString *uid = call.arguments[@"id"];
- [instances removeObjectForKey:uid];
- result(NULL);
- } else {
- result(FlutterMethodNotImplemented);
- }
-}
-
-@end
diff --git a/packages/google_mlkit_genai_proofreading/ios/Classes/GoogleMlKitGenaiProofreadingPlugin.swift b/packages/google_mlkit_genai_proofreading/ios/Classes/GoogleMlKitGenaiProofreadingPlugin.swift
new file mode 100644
index 00000000..a4920d58
--- /dev/null
+++ b/packages/google_mlkit_genai_proofreading/ios/Classes/GoogleMlKitGenaiProofreadingPlugin.swift
@@ -0,0 +1,34 @@
+import Flutter
+
+@objc
+public class GoogleMlKitGenaiProofreadingPlugin: NSObject, FlutterPlugin {
+ private var instances: [String: Any] = [:]
+
+ public static func register(with registrar: FlutterPluginRegistrar) {
+ let channel = FlutterMethodChannel(
+ name: "google_mlkit_genai_proofreading",
+ binaryMessenger: registrar.messenger()
+ )
+ let instance = GoogleMlKitGenaiProofreadingPlugin()
+ registrar.addMethodCallDelegate(instance, channel: channel)
+ }
+
+ public func handle(_ call: FlutterMethodCall, result: @escaping FlutterResult) {
+ let unimplemented = FlutterError(
+ code: "UNIMPLEMENTED",
+ message: "GenAI APIs are currently only available on Android",
+ details: nil
+ )
+ switch call.method {
+ case "genai#checkFeatureStatus", "genai#downloadFeature", "genai#runInference", "genai#runInferenceStreaming":
+ result(unimplemented)
+ case "genai#closeProofreader":
+ if let args = call.arguments as? [String: Any], let uid = args["id"] as? String {
+ instances.removeValue(forKey: uid)
+ }
+ result(nil)
+ default:
+ result(FlutterMethodNotImplemented)
+ }
+ }
+}
diff --git a/packages/google_mlkit_genai_proofreading/ios/google_mlkit_genai_proofreading.podspec b/packages/google_mlkit_genai_proofreading/ios/google_mlkit_genai_proofreading.podspec
index c1401a94..4af76e93 100644
--- a/packages/google_mlkit_genai_proofreading/ios/google_mlkit_genai_proofreading.podspec
+++ b/packages/google_mlkit_genai_proofreading/ios/google_mlkit_genai_proofreading.podspec
@@ -10,10 +10,9 @@ Pod::Spec.new do |s|
s.description = pubspec['description']
s.homepage = pubspec['homepage']
s.license = { :file => '../LICENSE' }
- s.authors = 'Multiple Authors'
+ s.authors = 'flutter-ml.dev'
s.source = { :path => '.' }
- s.source_files = 'Classes/**/*'
- s.public_header_files = 'Classes/**/*.h'
+ s.source_files = 'Classes/**/*.swift'
s.dependency 'Flutter'
s.platform = :ios, '15.5'
s.ios.deployment_target = '15.5'
diff --git a/packages/google_mlkit_genai_rewriting/ios/Assets/.gitkeep b/packages/google_mlkit_genai_rewriting/ios/Assets/.gitkeep
deleted file mode 100644
index e69de29b..00000000
diff --git a/packages/google_mlkit_genai_rewriting/ios/Classes/GoogleMlKitGenaiRewritingPlugin.h b/packages/google_mlkit_genai_rewriting/ios/Classes/GoogleMlKitGenaiRewritingPlugin.h
deleted file mode 100644
index f58bb291..00000000
--- a/packages/google_mlkit_genai_rewriting/ios/Classes/GoogleMlKitGenaiRewritingPlugin.h
+++ /dev/null
@@ -1,4 +0,0 @@
-#import
-
-@interface GoogleMlKitGenaiRewritingPlugin : NSObject
-@end
diff --git a/packages/google_mlkit_genai_rewriting/ios/Classes/GoogleMlKitGenaiRewritingPlugin.m b/packages/google_mlkit_genai_rewriting/ios/Classes/GoogleMlKitGenaiRewritingPlugin.m
deleted file mode 100644
index 0a021bab..00000000
--- a/packages/google_mlkit_genai_rewriting/ios/Classes/GoogleMlKitGenaiRewritingPlugin.m
+++ /dev/null
@@ -1,58 +0,0 @@
-#import
-#import "GoogleMlKitGenaiRewritingPlugin.h"
-
-#define channelName @"google_mlkit_genai_rewriting"
-#define checkFeatureStatus @"genai#checkFeatureStatus"
-#define downloadFeature @"genai#downloadFeature"
-#define runInference @"genai#runInference"
-#define runInferenceStreaming @"genai#runInferenceStreaming"
-#define closeRewriter @"genai#closeRewriter"
-
-@implementation GoogleMlKitGenaiRewritingPlugin {
- NSMutableDictionary *instances;
-}
-
-+ (void)registerWithRegistrar:(NSObject*)registrar {
- FlutterMethodChannel* channel = [FlutterMethodChannel
- methodChannelWithName:channelName
- binaryMessenger:[registrar messenger]];
- GoogleMlKitGenaiRewritingPlugin* instance = [[GoogleMlKitGenaiRewritingPlugin alloc] init];
- [registrar addMethodCallDelegate:instance channel:channel];
-}
-
-- (id)init {
- self = [super init];
- if (self)
- instances = [NSMutableDictionary dictionary];
- return self;
-}
-
-- (void)handleMethodCall:(FlutterMethodCall *)call result:(FlutterResult)result {
- if ([call.method isEqualToString:checkFeatureStatus]) {
- // iOS implementation would go here
- // Note: GenAI APIs are currently Android-only
- result([FlutterError errorWithCode:@"UNIMPLEMENTED"
- message:@"GenAI APIs are currently only available on Android"
- details:nil]);
- } else if ([call.method isEqualToString:downloadFeature]) {
- result([FlutterError errorWithCode:@"UNIMPLEMENTED"
- message:@"GenAI APIs are currently only available on Android"
- details:nil]);
- } else if ([call.method isEqualToString:runInference]) {
- result([FlutterError errorWithCode:@"UNIMPLEMENTED"
- message:@"GenAI APIs are currently only available on Android"
- details:nil]);
- } else if ([call.method isEqualToString:runInferenceStreaming]) {
- result([FlutterError errorWithCode:@"UNIMPLEMENTED"
- message:@"GenAI APIs are currently only available on Android"
- details:nil]);
- } else if ([call.method isEqualToString:closeRewriter]) {
- NSString *uid = call.arguments[@"id"];
- [instances removeObjectForKey:uid];
- result(NULL);
- } else {
- result(FlutterMethodNotImplemented);
- }
-}
-
-@end
diff --git a/packages/google_mlkit_genai_rewriting/ios/Classes/GoogleMlKitGenaiRewritingPlugin.swift b/packages/google_mlkit_genai_rewriting/ios/Classes/GoogleMlKitGenaiRewritingPlugin.swift
new file mode 100644
index 00000000..1230b3ef
--- /dev/null
+++ b/packages/google_mlkit_genai_rewriting/ios/Classes/GoogleMlKitGenaiRewritingPlugin.swift
@@ -0,0 +1,34 @@
+import Flutter
+
+@objc
+public class GoogleMlKitGenaiRewritingPlugin: NSObject, FlutterPlugin {
+ private var instances: [String: Any] = [:]
+
+ public static func register(with registrar: FlutterPluginRegistrar) {
+ let channel = FlutterMethodChannel(
+ name: "google_mlkit_genai_rewriting",
+ binaryMessenger: registrar.messenger()
+ )
+ let instance = GoogleMlKitGenaiRewritingPlugin()
+ registrar.addMethodCallDelegate(instance, channel: channel)
+ }
+
+ public func handle(_ call: FlutterMethodCall, result: @escaping FlutterResult) {
+ let unimplemented = FlutterError(
+ code: "UNIMPLEMENTED",
+ message: "GenAI APIs are currently only available on Android",
+ details: nil
+ )
+ switch call.method {
+ case "genai#checkFeatureStatus", "genai#downloadFeature", "genai#runInference", "genai#runInferenceStreaming":
+ result(unimplemented)
+ case "genai#closeRewriter":
+ if let args = call.arguments as? [String: Any], let uid = args["id"] as? String {
+ instances.removeValue(forKey: uid)
+ }
+ result(nil)
+ default:
+ result(FlutterMethodNotImplemented)
+ }
+ }
+}
diff --git a/packages/google_mlkit_genai_rewriting/ios/google_mlkit_genai_rewriting.podspec b/packages/google_mlkit_genai_rewriting/ios/google_mlkit_genai_rewriting.podspec
index c1401a94..4af76e93 100644
--- a/packages/google_mlkit_genai_rewriting/ios/google_mlkit_genai_rewriting.podspec
+++ b/packages/google_mlkit_genai_rewriting/ios/google_mlkit_genai_rewriting.podspec
@@ -10,10 +10,9 @@ Pod::Spec.new do |s|
s.description = pubspec['description']
s.homepage = pubspec['homepage']
s.license = { :file => '../LICENSE' }
- s.authors = 'Multiple Authors'
+ s.authors = 'flutter-ml.dev'
s.source = { :path => '.' }
- s.source_files = 'Classes/**/*'
- s.public_header_files = 'Classes/**/*.h'
+ s.source_files = 'Classes/**/*.swift'
s.dependency 'Flutter'
s.platform = :ios, '15.5'
s.ios.deployment_target = '15.5'
diff --git a/packages/google_mlkit_genai_speech_recognition/ios/Assets/.gitkeep b/packages/google_mlkit_genai_speech_recognition/ios/Assets/.gitkeep
deleted file mode 100644
index e69de29b..00000000
diff --git a/packages/google_mlkit_genai_speech_recognition/ios/Classes/GoogleMlKitGenaiSpeechRecognitionPlugin.h b/packages/google_mlkit_genai_speech_recognition/ios/Classes/GoogleMlKitGenaiSpeechRecognitionPlugin.h
deleted file mode 100644
index 5f335d35..00000000
--- a/packages/google_mlkit_genai_speech_recognition/ios/Classes/GoogleMlKitGenaiSpeechRecognitionPlugin.h
+++ /dev/null
@@ -1,4 +0,0 @@
-#import
-
-@interface GoogleMlKitGenaiSpeechRecognitionPlugin : NSObject
-@end
diff --git a/packages/google_mlkit_genai_speech_recognition/ios/Classes/GoogleMlKitGenaiSpeechRecognitionPlugin.m b/packages/google_mlkit_genai_speech_recognition/ios/Classes/GoogleMlKitGenaiSpeechRecognitionPlugin.m
deleted file mode 100644
index 0929b823..00000000
--- a/packages/google_mlkit_genai_speech_recognition/ios/Classes/GoogleMlKitGenaiSpeechRecognitionPlugin.m
+++ /dev/null
@@ -1,53 +0,0 @@
-#import
-#import "GoogleMlKitGenaiSpeechRecognitionPlugin.h"
-
-#define channelName @"google_mlkit_genai_speech_recognition"
-#define checkStatus @"genai#checkStatus"
-#define startRecognition @"genai#startRecognition"
-#define stopRecognition @"genai#stopRecognition"
-#define closeSpeechRecognizer @"genai#closeSpeechRecognizer"
-
-@implementation GoogleMlKitGenaiSpeechRecognitionPlugin {
- NSMutableDictionary *instances;
-}
-
-+ (void)registerWithRegistrar:(NSObject*)registrar {
- FlutterMethodChannel* channel = [FlutterMethodChannel
- methodChannelWithName:channelName
- binaryMessenger:[registrar messenger]];
- GoogleMlKitGenaiSpeechRecognitionPlugin* instance = [[GoogleMlKitGenaiSpeechRecognitionPlugin alloc] init];
- [registrar addMethodCallDelegate:instance channel:channel];
-}
-
-- (id)init {
- self = [super init];
- if (self)
- instances = [NSMutableDictionary dictionary];
- return self;
-}
-
-- (void)handleMethodCall:(FlutterMethodCall *)call result:(FlutterResult)result {
- if ([call.method isEqualToString:checkStatus]) {
- // iOS implementation would go here
- // Note: GenAI APIs are currently Android-only
- result([FlutterError errorWithCode:@"UNIMPLEMENTED"
- message:@"GenAI APIs are currently only available on Android"
- details:nil]);
- } else if ([call.method isEqualToString:startRecognition]) {
- result([FlutterError errorWithCode:@"UNIMPLEMENTED"
- message:@"GenAI APIs are currently only available on Android"
- details:nil]);
- } else if ([call.method isEqualToString:stopRecognition]) {
- result([FlutterError errorWithCode:@"UNIMPLEMENTED"
- message:@"GenAI APIs are currently only available on Android"
- details:nil]);
- } else if ([call.method isEqualToString:closeSpeechRecognizer]) {
- NSString *uid = call.arguments[@"id"];
- [instances removeObjectForKey:uid];
- result(NULL);
- } else {
- result(FlutterMethodNotImplemented);
- }
-}
-
-@end
diff --git a/packages/google_mlkit_genai_speech_recognition/ios/Classes/GoogleMlKitGenaiSpeechRecognitionPlugin.swift b/packages/google_mlkit_genai_speech_recognition/ios/Classes/GoogleMlKitGenaiSpeechRecognitionPlugin.swift
new file mode 100644
index 00000000..59a13ea0
--- /dev/null
+++ b/packages/google_mlkit_genai_speech_recognition/ios/Classes/GoogleMlKitGenaiSpeechRecognitionPlugin.swift
@@ -0,0 +1,34 @@
+import Flutter
+
+@objc
+public class GoogleMlKitGenaiSpeechRecognitionPlugin: NSObject, FlutterPlugin {
+ private var instances: [String: Any] = [:]
+
+ public static func register(with registrar: FlutterPluginRegistrar) {
+ let channel = FlutterMethodChannel(
+ name: "google_mlkit_genai_speech_recognition",
+ binaryMessenger: registrar.messenger()
+ )
+ let instance = GoogleMlKitGenaiSpeechRecognitionPlugin()
+ registrar.addMethodCallDelegate(instance, channel: channel)
+ }
+
+ public func handle(_ call: FlutterMethodCall, result: @escaping FlutterResult) {
+ let unimplemented = FlutterError(
+ code: "UNIMPLEMENTED",
+ message: "GenAI APIs are currently only available on Android",
+ details: nil
+ )
+ switch call.method {
+ case "genai#checkStatus", "genai#startRecognition", "genai#stopRecognition":
+ result(unimplemented)
+ case "genai#closeSpeechRecognizer":
+ if let args = call.arguments as? [String: Any], let uid = args["id"] as? String {
+ instances.removeValue(forKey: uid)
+ }
+ result(nil)
+ default:
+ result(FlutterMethodNotImplemented)
+ }
+ }
+}
diff --git a/packages/google_mlkit_genai_speech_recognition/ios/google_mlkit_genai_speech_recognition.podspec b/packages/google_mlkit_genai_speech_recognition/ios/google_mlkit_genai_speech_recognition.podspec
index c1401a94..4af76e93 100644
--- a/packages/google_mlkit_genai_speech_recognition/ios/google_mlkit_genai_speech_recognition.podspec
+++ b/packages/google_mlkit_genai_speech_recognition/ios/google_mlkit_genai_speech_recognition.podspec
@@ -10,10 +10,9 @@ Pod::Spec.new do |s|
s.description = pubspec['description']
s.homepage = pubspec['homepage']
s.license = { :file => '../LICENSE' }
- s.authors = 'Multiple Authors'
+ s.authors = 'flutter-ml.dev'
s.source = { :path => '.' }
- s.source_files = 'Classes/**/*'
- s.public_header_files = 'Classes/**/*.h'
+ s.source_files = 'Classes/**/*.swift'
s.dependency 'Flutter'
s.platform = :ios, '15.5'
s.ios.deployment_target = '15.5'
diff --git a/packages/google_mlkit_genai_summarization/ios/Assets/.gitkeep b/packages/google_mlkit_genai_summarization/ios/Assets/.gitkeep
deleted file mode 100644
index e69de29b..00000000
diff --git a/packages/google_mlkit_genai_summarization/ios/Classes/GoogleMlKitGenaiSummarizationPlugin.h b/packages/google_mlkit_genai_summarization/ios/Classes/GoogleMlKitGenaiSummarizationPlugin.h
deleted file mode 100644
index c16b9822..00000000
--- a/packages/google_mlkit_genai_summarization/ios/Classes/GoogleMlKitGenaiSummarizationPlugin.h
+++ /dev/null
@@ -1,4 +0,0 @@
-#import
-
-@interface GoogleMlKitGenaiSummarizationPlugin : NSObject
-@end
diff --git a/packages/google_mlkit_genai_summarization/ios/Classes/GoogleMlKitGenaiSummarizationPlugin.m b/packages/google_mlkit_genai_summarization/ios/Classes/GoogleMlKitGenaiSummarizationPlugin.m
deleted file mode 100644
index 4511fb5d..00000000
--- a/packages/google_mlkit_genai_summarization/ios/Classes/GoogleMlKitGenaiSummarizationPlugin.m
+++ /dev/null
@@ -1,58 +0,0 @@
-#import
-#import "GoogleMlKitGenaiSummarizationPlugin.h"
-
-#define channelName @"google_mlkit_genai_summarization"
-#define checkFeatureStatus @"genai#checkFeatureStatus"
-#define downloadFeature @"genai#downloadFeature"
-#define runInference @"genai#runInference"
-#define runInferenceStreaming @"genai#runInferenceStreaming"
-#define closeSummarizer @"genai#closeSummarizer"
-
-@implementation GoogleMlKitGenaiSummarizationPlugin {
- NSMutableDictionary *instances;
-}
-
-+ (void)registerWithRegistrar:(NSObject*)registrar {
- FlutterMethodChannel* channel = [FlutterMethodChannel
- methodChannelWithName:channelName
- binaryMessenger:[registrar messenger]];
- GoogleMlKitGenaiSummarizationPlugin* instance = [[GoogleMlKitGenaiSummarizationPlugin alloc] init];
- [registrar addMethodCallDelegate:instance channel:channel];
-}
-
-- (id)init {
- self = [super init];
- if (self)
- instances = [NSMutableDictionary dictionary];
- return self;
-}
-
-- (void)handleMethodCall:(FlutterMethodCall *)call result:(FlutterResult)result {
- if ([call.method isEqualToString:checkFeatureStatus]) {
- // iOS implementation would go here
- // Note: GenAI APIs are currently Android-only
- result([FlutterError errorWithCode:@"UNIMPLEMENTED"
- message:@"GenAI APIs are currently only available on Android"
- details:nil]);
- } else if ([call.method isEqualToString:downloadFeature]) {
- result([FlutterError errorWithCode:@"UNIMPLEMENTED"
- message:@"GenAI APIs are currently only available on Android"
- details:nil]);
- } else if ([call.method isEqualToString:runInference]) {
- result([FlutterError errorWithCode:@"UNIMPLEMENTED"
- message:@"GenAI APIs are currently only available on Android"
- details:nil]);
- } else if ([call.method isEqualToString:runInferenceStreaming]) {
- result([FlutterError errorWithCode:@"UNIMPLEMENTED"
- message:@"GenAI APIs are currently only available on Android"
- details:nil]);
- } else if ([call.method isEqualToString:closeSummarizer]) {
- NSString *uid = call.arguments[@"id"];
- [instances removeObjectForKey:uid];
- result(NULL);
- } else {
- result(FlutterMethodNotImplemented);
- }
-}
-
-@end
diff --git a/packages/google_mlkit_genai_summarization/ios/Classes/GoogleMlKitGenaiSummarizationPlugin.swift b/packages/google_mlkit_genai_summarization/ios/Classes/GoogleMlKitGenaiSummarizationPlugin.swift
new file mode 100644
index 00000000..eae388a7
--- /dev/null
+++ b/packages/google_mlkit_genai_summarization/ios/Classes/GoogleMlKitGenaiSummarizationPlugin.swift
@@ -0,0 +1,34 @@
+import Flutter
+
+@objc
+public class GoogleMlKitGenaiSummarizationPlugin: NSObject, FlutterPlugin {
+ private var instances: [String: Any] = [:]
+
+ public static func register(with registrar: FlutterPluginRegistrar) {
+ let channel = FlutterMethodChannel(
+ name: "google_mlkit_genai_summarization",
+ binaryMessenger: registrar.messenger()
+ )
+ let instance = GoogleMlKitGenaiSummarizationPlugin()
+ registrar.addMethodCallDelegate(instance, channel: channel)
+ }
+
+ public func handle(_ call: FlutterMethodCall, result: @escaping FlutterResult) {
+ let unimplemented = FlutterError(
+ code: "UNIMPLEMENTED",
+ message: "GenAI APIs are currently only available on Android",
+ details: nil
+ )
+ switch call.method {
+ case "genai#checkFeatureStatus", "genai#downloadFeature", "genai#runInference", "genai#runInferenceStreaming":
+ result(unimplemented)
+ case "genai#closeSummarizer":
+ if let args = call.arguments as? [String: Any], let uid = args["id"] as? String {
+ instances.removeValue(forKey: uid)
+ }
+ result(nil)
+ default:
+ result(FlutterMethodNotImplemented)
+ }
+ }
+}
diff --git a/packages/google_mlkit_genai_summarization/ios/google_mlkit_genai_summarization.podspec b/packages/google_mlkit_genai_summarization/ios/google_mlkit_genai_summarization.podspec
index c1401a94..4af76e93 100644
--- a/packages/google_mlkit_genai_summarization/ios/google_mlkit_genai_summarization.podspec
+++ b/packages/google_mlkit_genai_summarization/ios/google_mlkit_genai_summarization.podspec
@@ -10,10 +10,9 @@ Pod::Spec.new do |s|
s.description = pubspec['description']
s.homepage = pubspec['homepage']
s.license = { :file => '../LICENSE' }
- s.authors = 'Multiple Authors'
+ s.authors = 'flutter-ml.dev'
s.source = { :path => '.' }
- s.source_files = 'Classes/**/*'
- s.public_header_files = 'Classes/**/*.h'
+ s.source_files = 'Classes/**/*.swift'
s.dependency 'Flutter'
s.platform = :ios, '15.5'
s.ios.deployment_target = '15.5'
diff --git a/packages/google_mlkit_image_labeling/CHANGELOG.md b/packages/google_mlkit_image_labeling/CHANGELOG.md
index c6896532..40f9dcf1 100644
--- a/packages/google_mlkit_image_labeling/CHANGELOG.md
+++ b/packages/google_mlkit_image_labeling/CHANGELOG.md
@@ -64,7 +64,7 @@
## 0.4.0
* Update dependencies.
-* __BREAKING:__ For remote firebase-hosted models in iOS, you must now explicitly add the `GoogleMLKit/LinkFirebase` pod and preprocessor flag to your Podfile. This removes an unnecessary dependency on FirebaseCore for those who do not need to use remote models. Please see the updated README for instructions.
+* __BREAKING:__ For remote firebase-hosted models in iOS, you must now explicitly add the `GoogleMLKit/LinkFirebase` pod to your Podfile. The Swift implementation uses `canImport(MLKitLinkFirebase)` (no preprocessor macro required). This removes an unnecessary dependency on FirebaseCore for those who do not need to use remote models. Please see the updated README for instructions.
## 0.3.0
diff --git a/packages/google_mlkit_image_labeling/README.md b/packages/google_mlkit_image_labeling/README.md
index 9ab38577..4c47f4f5 100644
--- a/packages/google_mlkit_image_labeling/README.md
+++ b/packages/google_mlkit_image_labeling/README.md
@@ -201,35 +201,19 @@ Google's standalone ML Kit library does **NOT** have any direct dependency with
Additionally, for iOS you have to update your app's Podfile.
-First, include `GoogleMLKit/LinkFirebase` and `Firebase` in your Podfile:
+Include `GoogleMLKit/LinkFirebase` and `Firebase` in your Podfile:
```ruby
platform :ios, '15.5'
...
-# Enable firebase-hosted models #
+# Enable firebase-hosted models (Swift uses canImport(MLKitLinkFirebase) — no preprocessor macro needed)
pod 'GoogleMLKit/LinkFirebase'
pod 'Firebase'
```
-Next, add the preprocessor flag to enable the firebase remote models at compile time. To do that, update your existing `build_configurations` loop in the `post_install` step with the following:
-
-```ruby
-post_install do |installer|
- installer.pods_project.targets.each do |target|
- ... # Here are some configurations automatically generated by flutter
-
- target.build_configurations.each do |config|
- # Enable firebase-hosted ML models
- config.build_settings['GCC_PREPROCESSOR_DEFINITIONS'] ||= [
- '$(inherited)',
- 'MLKIT_FIREBASE_MODELS=1',
- ]
- end
- end
-end
-```
+**Note:** The iOS implementation uses Swift’s `#if canImport(MLKitLinkFirebase)` to enable Firebase-hosted model code. Adding the `GoogleMLKit/LinkFirebase` pod is sufficient; you do not need to set `MLKIT_FIREBASE_MODELS=1` or any other preprocessor definitions. If the pod is not added, the plugin compiles without Firebase support and will return a clear error when Firebase options are used.
#### Usage
diff --git a/packages/google_mlkit_image_labeling/ios/Assets/.gitkeep b/packages/google_mlkit_image_labeling/ios/Assets/.gitkeep
deleted file mode 100644
index e69de29b..00000000
diff --git a/packages/google_mlkit_image_labeling/ios/Classes/GoogleMlKitImageLabelingPlugin.h b/packages/google_mlkit_image_labeling/ios/Classes/GoogleMlKitImageLabelingPlugin.h
deleted file mode 100644
index e569fd89..00000000
--- a/packages/google_mlkit_image_labeling/ios/Classes/GoogleMlKitImageLabelingPlugin.h
+++ /dev/null
@@ -1,4 +0,0 @@
-#import
-
-@interface GoogleMlKitImageLabelingPlugin : NSObject
-@end
diff --git a/packages/google_mlkit_image_labeling/ios/Classes/GoogleMlKitImageLabelingPlugin.m b/packages/google_mlkit_image_labeling/ios/Classes/GoogleMlKitImageLabelingPlugin.m
deleted file mode 100644
index 81e65c14..00000000
--- a/packages/google_mlkit_image_labeling/ios/Classes/GoogleMlKitImageLabelingPlugin.m
+++ /dev/null
@@ -1,166 +0,0 @@
-#import "GoogleMlKitImageLabelingPlugin.h"
-#import
-#import
-#import
-#import
-#import
-
-#if MLKIT_FIREBASE_MODELS
-#import
-#endif
-
-#define channelName @"google_mlkit_image_labeler"
-#define startImageLabelDetector @"vision#startImageLabelDetector"
-#define closeImageLabelDetector @"vision#closeImageLabelDetector"
-#define manageFirebaseModels @"vision#manageFirebaseModels"
-
-@implementation GoogleMlKitImageLabelingPlugin {
- NSMutableDictionary *instances;
- GenericModelManager *genericModelManager;
-}
-
-+ (void)registerWithRegistrar:(NSObject*)registrar {
- FlutterMethodChannel* channel = [FlutterMethodChannel
- methodChannelWithName:channelName
- binaryMessenger:[registrar messenger]];
- GoogleMlKitImageLabelingPlugin* instance = [[GoogleMlKitImageLabelingPlugin alloc] init];
- [registrar addMethodCallDelegate:instance channel:channel];
-}
-
-- (id)init {
- self = [super init];
- if (self)
- instances = [NSMutableDictionary dictionary];
- return self;
-}
-
-- (void)handleMethodCall:(FlutterMethodCall *)call result:(FlutterResult)result {
- if ([call.method isEqualToString:startImageLabelDetector]) {
- [self handleDetection:call result:result];
- } else if ([call.method isEqualToString:closeImageLabelDetector]) {
- NSString *uid = call.arguments[@"id"];
- [instances removeObjectForKey:uid];
- result(NULL);
- } else if ([call.method isEqualToString:manageFirebaseModels]) {
-#if MLKIT_FIREBASE_MODELS
- [self manageModel:call result:result];
-#else
- result([FlutterError errorWithCode:@"ERROR_MISSING_MLKIT_FIREBASE_MODELS" message:@"You must define MLKIT_FIREBASE_MODELS=1 in your Podfile." details:nil]);
-#endif
- } else {
- result(FlutterMethodNotImplemented);
- }
-}
-
-- (void)handleDetection:(FlutterMethodCall *)call result:(FlutterResult)result {
- MLKVisionImage *image = [MLKVisionImage visionImageFromData:call.arguments[@"imageData"]];
-
- NSString *uid = call.arguments[@"id"];
- MLKImageLabeler *labeler = [instances objectForKey:uid];
- if (labeler == NULL) {
- NSDictionary *dictionary = call.arguments[@"options"];
- NSString *type = dictionary[@"type"];
- if ([@"base" isEqualToString:type]) {
- MLKImageLabelerOptions *options = [self getDefaultOptions:dictionary];
- labeler = [MLKImageLabeler imageLabelerWithOptions:options];
- } else if ([@"local" isEqualToString:type]) {
- MLKCustomImageLabelerOptions *options = [self getLocalOptions:dictionary];
- labeler = [MLKImageLabeler imageLabelerWithOptions:options];
- } else if ([@"remote" isEqualToString:type]) {
-#if MLKIT_FIREBASE_MODELS
- MLKCustomImageLabelerOptions *options = [self getRemoteOptions:dictionary];
- if (options == NULL) {
- FlutterError *error = [FlutterError errorWithCode:@"Error Model has not been downloaded yet"
- message:@"Model has not been downloaded yet"
- details:@"Model has not been downloaded yet"];
- result(error);
- return;
- }
- labeler = [MLKImageLabeler imageLabelerWithOptions:options];
-#else
- result([FlutterError errorWithCode:@"ERROR_MISSING_MLKIT_FIREBASE_MODELS" message:@"You must define MLKIT_FIREBASE_MODELS=1 in your Podfile." details:nil]);
-#endif
- } else {
- NSString *error = [NSString stringWithFormat:@"Invalid model type: %@", type];
- result([FlutterError errorWithCode:type
- message:error
- details:error]);
- return;
- }
- instances[uid] = labeler;
- }
-
- [labeler processImage:image
- completion:^(NSArray *_Nullable labels,
- NSError *_Nullable error) {
- if (error) {
- result(getFlutterError(error));
- return;
- } else if (!labels) {
- result(@[]);
- }
-
- NSMutableArray *labelData = [NSMutableArray array];
- for (MLKImageLabel *label in labels) {
- NSDictionary *data = @{
- @"confidence" : @(label.confidence),
- @"index" : @(label.index),
- @"text" : label.text,
- };
- [labelData addObject:data];
- }
-
- result(labelData);
- }];
-}
-
-- (MLKImageLabelerOptions *)getDefaultOptions:(NSDictionary *)optionsData {
- NSNumber *conf = optionsData[@"confidenceThreshold"];
- MLKImageLabelerOptions *options = [MLKImageLabelerOptions new];
- options.confidenceThreshold = conf;
- return options;
-}
-
-- (MLKCustomImageLabelerOptions *)getLocalOptions:(NSDictionary *)optionsData {
- NSNumber *conf = optionsData[@"confidenceThreshold"];
- NSNumber *maxCount = optionsData[@"maxCount"];
- NSString *path = optionsData[@"path"];
-
- MLKLocalModel *localModel = [[MLKLocalModel alloc] initWithPath:path];
- MLKCustomImageLabelerOptions *options = [[MLKCustomImageLabelerOptions alloc] initWithLocalModel:localModel];
- options.confidenceThreshold = conf;
- options.maxResultCount = maxCount.intValue;
- return options;
-}
-
-#if MLKIT_FIREBASE_MODELS
-- (MLKCustomImageLabelerOptions *)getRemoteOptions:(NSDictionary *)optionsData {
- NSNumber *conf = optionsData[@"confidenceThreshold"];
- NSNumber *maxCount = optionsData[@"maxCount"];
- NSString *modelName = optionsData[@"modelName"];
-
- MLKFirebaseModelSource *firebaseModelSource = [[MLKFirebaseModelSource alloc] initWithName:modelName];
- MLKCustomRemoteModel *remoteModel = [[MLKCustomRemoteModel alloc] initWithRemoteModelSource:firebaseModelSource];
-
- MLKModelManager *modelManager = [MLKModelManager modelManager];
- BOOL isModelDownloaded = [modelManager isModelDownloaded:remoteModel];
- if (!isModelDownloaded) {
- return NULL;
- }
-
- MLKCustomImageLabelerOptions *options = [[MLKCustomImageLabelerOptions alloc] initWithRemoteModel:remoteModel];
- options.confidenceThreshold = conf;
- options.maxResultCount = maxCount.intValue;
- return options;
-}
-
-- (void)manageModel:(FlutterMethodCall *)call result:(FlutterResult)result {
- NSString *modelTag = call.arguments[@"model"];
- MLKFirebaseModelSource *firebaseModelSource = [[MLKFirebaseModelSource alloc] initWithName:modelTag];
- MLKCustomRemoteModel *model = [[MLKCustomRemoteModel alloc] initWithRemoteModelSource:firebaseModelSource];
- genericModelManager = [[GenericModelManager alloc] init];
- [genericModelManager manageModel:model call:call result:result];
-}
-#endif
-
-@end
diff --git a/packages/google_mlkit_image_labeling/ios/Classes/GoogleMlKitImageLabelingPlugin.swift b/packages/google_mlkit_image_labeling/ios/Classes/GoogleMlKitImageLabelingPlugin.swift
new file mode 100644
index 00000000..368dce4d
--- /dev/null
+++ b/packages/google_mlkit_image_labeling/ios/Classes/GoogleMlKitImageLabelingPlugin.swift
@@ -0,0 +1,194 @@
+import Flutter
+import MLKitCommon
+import MLKitVision
+import MLKitImageLabeling
+import MLKitImageLabelingCommon
+import MLKitImageLabelingCustom
+import google_mlkit_commons
+
+#if canImport(MLKitLinkFirebase)
+import MLKitLinkFirebase
+#endif
+
+@objc
+public class GoogleMlKitImageLabelingPlugin: NSObject, FlutterPlugin {
+ private var instances: [String: ImageLabeler] = [:]
+ #if canImport(MLKitLinkFirebase)
+ private var genericModelManager: GenericModelManager?
+ #endif
+
+ public static func register(with registrar: FlutterPluginRegistrar) {
+ let channel = FlutterMethodChannel(
+ name: "google_mlkit_image_labeler",
+ binaryMessenger: registrar.messenger()
+ )
+ let instance = GoogleMlKitImageLabelingPlugin()
+ registrar.addMethodCallDelegate(instance, channel: channel)
+ }
+
+ public func handle(_ call: FlutterMethodCall, result: @escaping FlutterResult) {
+ switch call.method {
+ case "vision#startImageLabelDetector":
+ handleDetection(call: call, result: result)
+ case "vision#closeImageLabelDetector":
+ if let args = call.arguments as? [String: Any], let uid = args["id"] as? String {
+ instances.removeValue(forKey: uid)
+ }
+ result(nil)
+ case "vision#manageFirebaseModels":
+ #if canImport(MLKitLinkFirebase)
+ manageModel(call: call, result: result)
+ #else
+ result(FlutterError(
+ code: "ERROR_MISSING_MLKIT_FIREBASE_MODELS",
+ message: "Add the GoogleMLKit/LinkFirebase pod to your Podfile to use Firebase-hosted models.",
+ details: nil
+ ))
+ #endif
+ default:
+ result(FlutterMethodNotImplemented)
+ }
+ }
+
+ private func handleDetection(call: FlutterMethodCall, result: @escaping FlutterResult) {
+ guard let args = call.arguments as? [String: Any],
+ let imageData = args["imageData"] as? [String: Any],
+ let uid = args["id"] as? String else {
+ result(FlutterError(code: "invalid_args", message: "Missing arguments", details: nil))
+ return
+ }
+ guard let image = VisionImage.visionImage(from: imageData) else {
+ result(FlutterError(code: "invalid_image", message: "Invalid or missing image data", details: nil))
+ return
+ }
+
+ let labeler: ImageLabeler
+ if let existing = instances[uid] {
+ labeler = existing
+ } else {
+ guard let dictionary = args["options"] as? [String: Any] else {
+ result(FlutterError(code: "invalid_args", message: "Missing options", details: nil))
+ return
+ }
+ let type = dictionary["type"] as? String ?? "base"
+ switch type {
+ case "base":
+ labeler = ImageLabeler.imageLabeler(options: getDefaultOptions(dictionary))
+ case "local":
+ guard let options = getLocalOptions(dictionary) else {
+ result(FlutterError(code: "invalid_args", message: "Missing path for local model", details: nil))
+ return
+ }
+ labeler = ImageLabeler.imageLabeler(options: options)
+ case "remote":
+ #if canImport(MLKitLinkFirebase)
+ guard let modelName = dictionary["modelName"] as? String, !modelName.isEmpty else {
+ result(FlutterError(
+ code: "invalid_args",
+ message: "Missing or invalid modelName for remote model",
+ details: nil
+ ))
+ return
+ }
+ if let options = getRemoteOptions(dictionary) {
+ labeler = ImageLabeler.imageLabeler(options: options)
+ } else {
+ result(FlutterError(
+ code: "Error Model has not been downloaded yet",
+ message: "Model has not been downloaded yet",
+ details: "Model has not been downloaded yet"
+ ))
+ return
+ }
+ #else
+ result(FlutterError(
+ code: "ERROR_MISSING_MLKIT_FIREBASE_MODELS",
+ message: "Add the GoogleMLKit/LinkFirebase pod to your Podfile to use Firebase-hosted models.",
+ details: nil
+ ))
+ return
+ #endif
+ default:
+ result(FlutterError(
+ code: type,
+ message: "Invalid model type: \(type)",
+ details: "Invalid model type: \(type)"
+ ))
+ return
+ }
+ instances[uid] = labeler
+ }
+
+ labeler.process(image) { labels, error in
+ if let error = error as NSError? {
+ result(FlutterError(code: "Error \(error.code)", message: error.domain, details: error.localizedDescription))
+ return
+ }
+ guard let labels = labels else {
+ result([])
+ return
+ }
+ let labelData = labels.map { label in
+ [
+ "confidence": label.confidence,
+ "index": label.index,
+ "text": label.text
+ ] as [String: Any]
+ }
+ result(labelData)
+ }
+ }
+
+ private func getDefaultOptions(_ optionsData: [String: Any]) -> ImageLabelerOptions {
+ let options = ImageLabelerOptions()
+ if let conf = optionsData["confidenceThreshold"] as? NSNumber {
+ options.confidenceThreshold = conf
+ }
+ return options
+ }
+
+ private func getLocalOptions(_ optionsData: [String: Any]) -> CustomImageLabelerOptions? {
+ guard let path = optionsData["path"] as? String else { return nil }
+ let localModel = LocalModel(path: path)
+ let options = CustomImageLabelerOptions(localModel: localModel)
+ if let conf = optionsData["confidenceThreshold"] as? NSNumber {
+ options.confidenceThreshold = conf
+ }
+ if let maxCount = optionsData["maxCount"] as? NSNumber {
+ options.maxResultCount = maxCount.intValue
+ }
+ return options
+ }
+
+ #if canImport(MLKitLinkFirebase)
+ private func getRemoteOptions(_ optionsData: [String: Any]) -> CustomImageLabelerOptions? {
+ guard let modelName = optionsData["modelName"] as? String else { return nil }
+ let firebaseModelSource = FirebaseModelSource(name: modelName)
+ let remoteModel = CustomRemoteModel(remoteModelSource: firebaseModelSource)
+ guard ModelManager.modelManager().isModelDownloaded(remoteModel) else {
+ return nil
+ }
+ let options = CustomImageLabelerOptions(remoteModel: remoteModel)
+ if let conf = optionsData["confidenceThreshold"] as? NSNumber {
+ options.confidenceThreshold = conf
+ }
+ if let maxCount = optionsData["maxCount"] as? NSNumber {
+ options.maxResultCount = maxCount.intValue
+ }
+ return options
+ }
+
+ private func manageModel(call: FlutterMethodCall, result: @escaping FlutterResult) {
+ guard let args = call.arguments as? [String: Any],
+ let modelTag = args["model"] as? String else {
+ result(FlutterError(code: "invalid_args", message: "Missing model argument", details: nil))
+ return
+ }
+ let firebaseModelSource = FirebaseModelSource(name: modelTag)
+ let model = CustomRemoteModel(remoteModelSource: firebaseModelSource)
+ let manager = GenericModelManager()
+ genericModelManager = manager
+ manager.manage(model: model, call: call, result: result)
+ }
+ #endif
+}
diff --git a/packages/google_mlkit_image_labeling/ios/Classes/MlKitEnums.h b/packages/google_mlkit_image_labeling/ios/Classes/MlKitEnums.h
deleted file mode 100644
index 771b492c..00000000
--- a/packages/google_mlkit_image_labeling/ios/Classes/MlKitEnums.h
+++ /dev/null
@@ -1,10 +0,0 @@
-//
-// MlKitEnums.h
-// Pods
-//
-// Created by Andrew Coutts on 7/13/22.
-//
-
-#ifndef MLKIT_FIREBASE_MODELS
- #define MLKIT_FIREBASE_MODELS 0
-#endif
diff --git a/packages/google_mlkit_image_labeling/ios/google_mlkit_image_labeling.podspec b/packages/google_mlkit_image_labeling/ios/google_mlkit_image_labeling.podspec
index 0a1f703c..8b3ef3bd 100644
--- a/packages/google_mlkit_image_labeling/ios/google_mlkit_image_labeling.podspec
+++ b/packages/google_mlkit_image_labeling/ios/google_mlkit_image_labeling.podspec
@@ -10,10 +10,9 @@ Pod::Spec.new do |s|
s.description = pubspec['description']
s.homepage = pubspec['homepage']
s.license = { :file => '../LICENSE' }
- s.authors = 'Multiple Authors'
+ s.authors = 'flutter-ml.dev'
s.source = { :path => '.' }
- s.source_files = 'Classes/**/*'
- s.public_header_files = 'Classes/**/*.h'
+ s.source_files = 'Classes/**/*.swift'
s.dependency 'Flutter'
s.dependency 'GoogleMLKit/ImageLabeling', '~> 9.0.0'
s.dependency 'GoogleMLKit/ImageLabelingCustom', '~> 9.0.0'
diff --git a/packages/google_mlkit_language_id/ios/Assets/.gitkeep b/packages/google_mlkit_language_id/ios/Assets/.gitkeep
deleted file mode 100644
index e69de29b..00000000
diff --git a/packages/google_mlkit_language_id/ios/Classes/GoogleMlKitLanguageIdPlugin.h b/packages/google_mlkit_language_id/ios/Classes/GoogleMlKitLanguageIdPlugin.h
deleted file mode 100644
index 1132ec3d..00000000
--- a/packages/google_mlkit_language_id/ios/Classes/GoogleMlKitLanguageIdPlugin.h
+++ /dev/null
@@ -1,4 +0,0 @@
-#import
-
-@interface GoogleMlKitLanguageIdPlugin : NSObject
-@end
diff --git a/packages/google_mlkit_language_id/ios/Classes/GoogleMlKitLanguageIdPlugin.m b/packages/google_mlkit_language_id/ios/Classes/GoogleMlKitLanguageIdPlugin.m
deleted file mode 100644
index 057dd3de..00000000
--- a/packages/google_mlkit_language_id/ios/Classes/GoogleMlKitLanguageIdPlugin.m
+++ /dev/null
@@ -1,104 +0,0 @@
-#import "GoogleMlKitLanguageIdPlugin.h"
-#import
-#import
-
-#define channelName @"google_mlkit_language_identifier"
-#define startLanguageIdentifier @"nlp#startLanguageIdentifier"
-#define closeLanguageIdentifier @"nlp#closeLanguageIdentifier"
-
-@implementation GoogleMlKitLanguageIdPlugin {
- NSMutableDictionary *instances;
-}
-
-+ (void)registerWithRegistrar:(NSObject*)registrar {
- FlutterMethodChannel* channel = [FlutterMethodChannel
- methodChannelWithName:channelName
- binaryMessenger:[registrar messenger]];
- GoogleMlKitLanguageIdPlugin* instance = [[GoogleMlKitLanguageIdPlugin alloc] init];
- [registrar addMethodCallDelegate:instance channel:channel];
-}
-
-- (id)init {
- self = [super init];
- if (self)
- instances = [NSMutableDictionary dictionary];
- return self;
-}
-
-- (void)handleMethodCall:(FlutterMethodCall *)call result:(FlutterResult)result {
- if ([call.method isEqualToString:startLanguageIdentifier]) {
- [self handleDetection:call result:result];
- } else if ([call.method isEqualToString:closeLanguageIdentifier]) {
- NSString *uid = call.arguments[@"id"];
- [instances removeObjectForKey:uid];
- result(NULL);
- } else {
- result(FlutterMethodNotImplemented);
- }
-}
-
-- (MLKLanguageIdentification*)initialize:(FlutterMethodCall *)call {
- NSNumber *confidence = call.arguments[@"confidence"];
- MLKLanguageIdentificationOptions *options = [[MLKLanguageIdentificationOptions alloc] initWithConfidenceThreshold:confidence.floatValue];
- return [MLKLanguageIdentification languageIdentificationWithOptions:options];
-}
-
-- (void)handleDetection:(FlutterMethodCall *)call result:(FlutterResult)result {
- NSString *uid = call.arguments[@"id"];
- MLKLanguageIdentification *languageId = [instances objectForKey:uid];
- if (languageId == NULL) {
- languageId = [self initialize:call];
- instances[uid] = languageId;
- }
-
- BOOL possibleLanguages = [call.arguments[@"possibleLanguages"] boolValue];
- NSString *text = call.arguments[@"text"];
- if(possibleLanguages) {
- [self identifyPossibleLanguagesInText:text languageId:languageId result:result];
- } else {
- [self identifyLanguageInText:text languageId:languageId result:result];
- }
-}
-
-// Identifies the possible languages for a given text.
-// For each identified langauge a confidence value is returned as well.
-// Read more here: https://developers.google.com/ml-kit/language/identification/ios
-- (void)identifyPossibleLanguagesInText:(NSString *)text
- languageId:(MLKLanguageIdentification*) languageId
- result:(FlutterResult)result {
- [languageId identifyPossibleLanguagesForText:text
- completion:^(NSArray * _Nonnull identifiedLanguages,
- NSError * _Nullable error) {
- if (error != nil) {
- result(getFlutterError(error));
- return;
- }
- NSMutableArray *resultArray = [NSMutableArray array];
- for (MLKIdentifiedLanguage *language in identifiedLanguages) {
- NSDictionary *data = @{
- @"language" : language.languageTag,
- @"confidence" : [NSNumber numberWithFloat: language.confidence],
- };
- [resultArray addObject:data];
- }
- result(resultArray);
- }];
-}
-
-// Identify the language for a given text.
-// Read more here: https://developers.google.com/ml-kit/language/identification/ios
-- (void)identifyLanguageInText:(NSString *)text
- languageId:(MLKLanguageIdentification*) languageId
- result:(FlutterResult)result {
- [languageId identifyLanguageForText:text
- completion:^(NSString * _Nonnull languageTag,
- NSError * _Nullable error) {
- if (error != nil) {
- result(getFlutterError(error));
- return;
- }
- result(languageTag);
- }];
-}
-
-@end
diff --git a/packages/google_mlkit_language_id/ios/Classes/GoogleMlKitLanguageIdPlugin.swift b/packages/google_mlkit_language_id/ios/Classes/GoogleMlKitLanguageIdPlugin.swift
new file mode 100644
index 00000000..dd42779f
--- /dev/null
+++ b/packages/google_mlkit_language_id/ios/Classes/GoogleMlKitLanguageIdPlugin.swift
@@ -0,0 +1,109 @@
+import Flutter
+import MLKitLanguageID
+import google_mlkit_commons
+
+@objc
+public class GoogleMlKitLanguageIdPlugin: NSObject, FlutterPlugin {
+ private var instances: [String: LanguageIdentification] = [:]
+
+ public static func register(with registrar: FlutterPluginRegistrar) {
+ let channel = FlutterMethodChannel(
+ name: "google_mlkit_language_identifier",
+ binaryMessenger: registrar.messenger()
+ )
+ let instance = GoogleMlKitLanguageIdPlugin()
+ registrar.addMethodCallDelegate(instance, channel: channel)
+ }
+
+ public func handle(_ call: FlutterMethodCall, result: @escaping FlutterResult) {
+ switch call.method {
+ case "nlp#startLanguageIdentifier":
+ handleDetection(call: call, result: result)
+ case "nlp#closeLanguageIdentifier":
+ if let args = call.arguments as? [String: Any], let uid = args["id"] as? String {
+ instances.removeValue(forKey: uid)
+ }
+ result(nil)
+ default:
+ result(FlutterMethodNotImplemented)
+ }
+ }
+
+ private func initialize(call: FlutterMethodCall) -> LanguageIdentification? {
+ guard let args = call.arguments as? [String: Any],
+ let confidence = args["confidence"] as? NSNumber else { return nil }
+ let options = LanguageIdentificationOptions(confidenceThreshold: confidence.floatValue)
+ return LanguageIdentification.languageIdentification(options: options)
+ }
+
+ private func handleDetection(call: FlutterMethodCall, result: @escaping FlutterResult) {
+ guard let args = call.arguments as? [String: Any],
+ let uid = args["id"] as? String,
+ let text = args["text"] as? String else {
+ result(FlutterError(code: "invalid_args", message: "Missing arguments", details: nil))
+ return
+ }
+
+ let languageId: LanguageIdentification
+ if let existing = instances[uid] {
+ languageId = existing
+ } else {
+ guard let newLanguageId = initialize(call: call) else {
+ result(FlutterError(code: "invalid_args", message: "Invalid options", details: nil))
+ return
+ }
+ languageId = newLanguageId
+ instances[uid] = languageId
+ }
+
+ let possibleLanguages = (args["possibleLanguages"] as? NSNumber)?.boolValue ?? false
+ if possibleLanguages {
+ identifyPossibleLanguages(in: text, languageId: languageId, result: result)
+ } else {
+ identifyLanguage(in: text, languageId: languageId, result: result)
+ }
+ }
+
+ private func identifyPossibleLanguages(
+ in text: String,
+ languageId: LanguageIdentification,
+ result: @escaping FlutterResult
+ ) {
+ languageId.identifyPossibleLanguages(for: text) { identifiedLanguages, error in
+ if let error = error as NSError? {
+ result(FlutterError(
+ code: "Error \(error.code)",
+ message: error.domain,
+ details: error.localizedDescription
+ ))
+ return
+ }
+ guard let list = identifiedLanguages else {
+ result([])
+ return
+ }
+ let resultArray = list.map { (lang: IdentifiedLanguage) -> [String: Any] in
+ ["language": lang.languageTag, "confidence": NSNumber(value: lang.confidence)]
+ }
+ result(resultArray)
+ }
+ }
+
+ private func identifyLanguage(
+ in text: String,
+ languageId: LanguageIdentification,
+ result: @escaping FlutterResult
+ ) {
+ languageId.identifyLanguage(for: text) { languageTag, error in
+ if let error = error as NSError? {
+ result(FlutterError(
+ code: "Error \(error.code)",
+ message: error.domain,
+ details: error.localizedDescription
+ ))
+ return
+ }
+ result(languageTag)
+ }
+ }
+}
diff --git a/packages/google_mlkit_language_id/ios/google_mlkit_language_id.podspec b/packages/google_mlkit_language_id/ios/google_mlkit_language_id.podspec
index 87a928ac..330af428 100644
--- a/packages/google_mlkit_language_id/ios/google_mlkit_language_id.podspec
+++ b/packages/google_mlkit_language_id/ios/google_mlkit_language_id.podspec
@@ -10,10 +10,9 @@ Pod::Spec.new do |s|
s.description = pubspec['description']
s.homepage = pubspec['homepage']
s.license = { :file => '../LICENSE' }
- s.authors = 'Multiple Authors'
+ s.authors = 'flutter-ml.dev'
s.source = { :path => '.' }
- s.source_files = 'Classes/**/*'
- s.public_header_files = 'Classes/**/*.h'
+ s.source_files = 'Classes/**/*.swift'
s.dependency 'Flutter'
s.dependency 'GoogleMLKit/LanguageID', '~> 9.0.0'
s.dependency 'google_mlkit_commons'
diff --git a/packages/google_mlkit_object_detection/CHANGELOG.md b/packages/google_mlkit_object_detection/CHANGELOG.md
index d65c6250..7e0b5841 100644
--- a/packages/google_mlkit_object_detection/CHANGELOG.md
+++ b/packages/google_mlkit_object_detection/CHANGELOG.md
@@ -64,7 +64,7 @@
## 0.5.0
* Update dependencies.
-* __BREAKING:__ For remote firebase-hosted models in iOS, you must now explicitly add the `GoogleMLKit/LinkFirebase` pod and preprocessor flag to your Podfile. This removes an unnecessary dependency on FirebaseCore for those who do not need to use remote models. Please see the updated README for instructions.
+* __BREAKING:__ For remote firebase-hosted models in iOS, you must now explicitly add the `GoogleMLKit/LinkFirebase` pod to your Podfile. The Swift implementation uses `canImport(MLKitLinkFirebase)` (no preprocessor macro required). This removes an unnecessary dependency on FirebaseCore for those who do not need to use remote models. Please see the updated README for instructions.
## 0.4.0
diff --git a/packages/google_mlkit_object_detection/README.md b/packages/google_mlkit_object_detection/README.md
index 53fa3146..99124b9b 100644
--- a/packages/google_mlkit_object_detection/README.md
+++ b/packages/google_mlkit_object_detection/README.md
@@ -222,35 +222,19 @@ Google's standalone ML Kit library does **NOT** have any direct dependency with
Additionally, for iOS you have to update your app's Podfile.
-First, include `GoogleMLKit/LinkFirebase` and `Firebase` in your Podfile:
+Include `GoogleMLKit/LinkFirebase` and `Firebase` in your Podfile:
```ruby
platform :ios, '15.5'
...
-# Enable firebase-hosted models #
+# Enable firebase-hosted models (Swift uses canImport(MLKitLinkFirebase) — no preprocessor macro needed)
pod 'GoogleMLKit/LinkFirebase'
pod 'Firebase'
```
-Next, add the preprocessor flag to enable the firebase remote models at compile time. To do that, update your existing `build_configurations` loop in the `post_install` step with the following:
-
-```ruby
-post_install do |installer|
- installer.pods_project.targets.each do |target|
- ... # Here are some configurations automatically generated by flutter
-
- target.build_configurations.each do |config|
- # Enable firebase-hosted ML models
- config.build_settings['GCC_PREPROCESSOR_DEFINITIONS'] ||= [
- '$(inherited)',
- 'MLKIT_FIREBASE_MODELS=1',
- ]
- end
- end
-end
-```
+**Note:** The iOS implementation uses Swift’s `#if canImport(MLKitLinkFirebase)` to enable Firebase-hosted model code. Adding the `GoogleMLKit/LinkFirebase` pod is sufficient; you do not need to set `MLKIT_FIREBASE_MODELS=1` or any other preprocessor definitions. If the pod is not added, the plugin compiles without Firebase support and will return a clear error when Firebase options are used.
#### Usage
diff --git a/packages/google_mlkit_object_detection/ios/Assets/.gitkeep b/packages/google_mlkit_object_detection/ios/Assets/.gitkeep
deleted file mode 100644
index e69de29b..00000000
diff --git a/packages/google_mlkit_object_detection/ios/Classes/GoogleMlKitObjectDetectionPlugin.h b/packages/google_mlkit_object_detection/ios/Classes/GoogleMlKitObjectDetectionPlugin.h
deleted file mode 100644
index dbd9b36e..00000000
--- a/packages/google_mlkit_object_detection/ios/Classes/GoogleMlKitObjectDetectionPlugin.h
+++ /dev/null
@@ -1,4 +0,0 @@
-#import
-
-@interface GoogleMlKitObjectDetectionPlugin : NSObject
-@end
diff --git a/packages/google_mlkit_object_detection/ios/Classes/GoogleMlKitObjectDetectionPlugin.m b/packages/google_mlkit_object_detection/ios/Classes/GoogleMlKitObjectDetectionPlugin.m
deleted file mode 100644
index e2e1b8d6..00000000
--- a/packages/google_mlkit_object_detection/ios/Classes/GoogleMlKitObjectDetectionPlugin.m
+++ /dev/null
@@ -1,199 +0,0 @@
-#import "GoogleMlKitObjectDetectionPlugin.h"
-#import
-#import
-#import
-#import
-#import
-
-#if MLKIT_FIREBASE_MODELS
-#import
-#endif
-
-#define channelName @"google_mlkit_object_detector"
-#define startObjectDetector @"vision#startObjectDetector"
-#define closeObjectDetector @"vision#closeObjectDetector"
-#define manageFirebaseModels @"vision#manageFirebaseModels"
-
-@implementation GoogleMlKitObjectDetectionPlugin {
- NSMutableDictionary *instances;
- GenericModelManager *genericModelManager;
-}
-
-+ (void)registerWithRegistrar:(NSObject*)registrar {
- FlutterMethodChannel* channel = [FlutterMethodChannel
- methodChannelWithName:channelName
- binaryMessenger:[registrar messenger]];
- GoogleMlKitObjectDetectionPlugin* instance = [[GoogleMlKitObjectDetectionPlugin alloc] init];
- [registrar addMethodCallDelegate:instance channel:channel];
-}
-
-- (id)init {
- self = [super init];
- if (self)
- instances = [NSMutableDictionary dictionary];
- return self;
-}
-
-- (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result {
- if ([call.method isEqualToString:startObjectDetector]) {
- [self handleDetection:call result:result];
- } else if ([call.method isEqualToString:closeObjectDetector]) {
- NSString *uid = call.arguments[@"id"];
- [instances removeObjectForKey:uid];
- result(NULL);
- } else if ([call.method isEqualToString:manageFirebaseModels]) {
-#if MLKIT_FIREBASE_MODELS
- [self manageModel:call result:result];
-#else
- result([FlutterError errorWithCode:@"ERROR_MISSING_MLKIT_FIREBASE_MODELS" message:@"You must define MLKIT_FIREBASE_MODELS=1 in your Podfile." details:nil]);
-#endif
- } else {
- result(FlutterMethodNotImplemented);
- }
-}
-
-- (void)handleDetection:(FlutterMethodCall *)call result:(FlutterResult)result {
- MLKVisionImage *image = [MLKVisionImage visionImageFromData:call.arguments[@"imageData"]];
-
- NSString *uid = call.arguments[@"id"];
- MLKObjectDetector *objectDetector = [instances objectForKey:uid];
- if (objectDetector == NULL) {
- NSDictionary *dictionary = call.arguments[@"options"];
- NSString *type = dictionary[@"type"];
- if ([@"base" isEqualToString:type]) {
- MLKObjectDetectorOptions *options = [self getDefaultOptions:dictionary];
- objectDetector = [MLKObjectDetector objectDetectorWithOptions:options];
- } else if ([@"local" isEqualToString:type]) {
- MLKCustomObjectDetectorOptions *options = [self getLocalOptions:dictionary];
- objectDetector = [MLKObjectDetector objectDetectorWithOptions:options];
- } else if ([@"remote" isEqualToString:type]) {
-#if MLKIT_FIREBASE_MODELS
- MLKCustomObjectDetectorOptions *options = [self getRemoteOptions:dictionary];
- if (options == NULL) {
- FlutterError *error = [FlutterError errorWithCode:@"Error Model has not been downloaded yet"
- message:@"Model has not been downloaded yet"
- details:@"Model has not been downloaded yet"];
- result(error);
- return;
- }
- objectDetector = [MLKObjectDetector objectDetectorWithOptions:options];
-#else
- result([FlutterError errorWithCode:@"ERROR_MISSING_MLKIT_FIREBASE_MODELS" message:@"You must define MLKIT_FIREBASE_MODELS=1 in your Podfile." details:nil]);
-#endif
- } else {
- NSString *error = [NSString stringWithFormat:@"Invalid model type: %@", type];
- result([FlutterError errorWithCode:type
- message:error
- details:error]);
- return;
- }
- instances[uid] = objectDetector;
- }
-
- [objectDetector processImage:image
- completion:^(NSArray *_Nullable objects,
- NSError *_Nullable error) {
- if (error) {
- result(getFlutterError(error));
- return;
- } else if (!objects) {
- result(@[]);
- return;
- }
-
- NSMutableArray *objectsData = [NSMutableArray array];
- for (MLKObject *object in objects) {
- NSMutableArray *labels = [NSMutableArray array];
- for (MLKObjectLabel *label in object.labels) {
- [labels addObject:@{
- @"index" : @(label.index),
- @"text" : label.text,
- @"confidence" : @(label.confidence),
- }];
- }
- NSMutableDictionary *data = [NSMutableDictionary dictionaryWithDictionary:@{
- @"rect" : @{
- @"left" : @(object.frame.origin.x),
- @"top" : @(object.frame.origin.y),
- @"right" : @(object.frame.origin.x + object.frame.size.width),
- @"bottom" : @(object.frame.origin.y + object.frame.size.height)
- },
- @"labels" : labels
- }];
- if (object.trackingID != NULL) {
- data[@"trackingId"] = object.trackingID;
- }
- [objectsData addObject:data];
- }
-
- result(objectsData);
- }];
-}
-
-- (MLKObjectDetectorOptions *)getDefaultOptions:(NSDictionary *)dictionary {
- NSNumber *mode = dictionary[@"mode"];
- BOOL classify = [[dictionary objectForKey:@"classify"] boolValue];
- BOOL multiple = [[dictionary objectForKey:@"multiple"] boolValue];
-
- MLKObjectDetectorOptions *options = [[MLKObjectDetectorOptions alloc] init];
- options.detectorMode = mode.intValue == 0 ? MLKObjectDetectorModeStream : MLKObjectDetectorModeSingleImage;
- options.shouldEnableClassification = classify;
- options.shouldEnableMultipleObjects = multiple;
- return options;
-}
-
-- (MLKCustomObjectDetectorOptions *)getLocalOptions:(NSDictionary *)dictionary {
- NSNumber *mode = dictionary[@"mode"];
- BOOL classify = [[dictionary objectForKey:@"classify"] boolValue];
- BOOL multiple = [[dictionary objectForKey:@"multiple"] boolValue];
- NSNumber *threshold = dictionary[@"threshold"];
- NSNumber *maxLabels = dictionary[@"maxLabels"];
- NSString *path = dictionary[@"path"];
-
- MLKLocalModel *localModel = [[MLKLocalModel alloc] initWithPath:path];
- MLKCustomObjectDetectorOptions *options = [[MLKCustomObjectDetectorOptions alloc] initWithLocalModel:localModel];
- options.detectorMode = mode.intValue == 0 ? MLKObjectDetectorModeStream : MLKObjectDetectorModeSingleImage;
- options.shouldEnableClassification = classify;
- options.shouldEnableMultipleObjects = multiple;
- options.classificationConfidenceThreshold = threshold;
- options.maxPerObjectLabelCount = maxLabels.integerValue;
- return options;
-}
-
-#if MLKIT_FIREBASE_MODELS
-- (MLKCustomObjectDetectorOptions *)getRemoteOptions:(NSDictionary *)dictionary {
- NSNumber *mode = dictionary[@"mode"];
- BOOL classify = [[dictionary objectForKey:@"classify"] boolValue];
- BOOL multiple = [[dictionary objectForKey:@"multiple"] boolValue];
- NSNumber *threshold = dictionary[@"threshold"];
- NSNumber *maxLabels = dictionary[@"maxLabels"];
- NSString *modelName = dictionary[@"modelName"];
-
- MLKFirebaseModelSource *firebaseModelSource = [[MLKFirebaseModelSource alloc] initWithName:modelName];
- MLKCustomRemoteModel *remoteModel = [[MLKCustomRemoteModel alloc] initWithRemoteModelSource:firebaseModelSource];
-
- MLKModelManager *modelManager = [MLKModelManager modelManager];
- BOOL isModelDownloaded = [modelManager isModelDownloaded:remoteModel];
- if (!isModelDownloaded) {
- return NULL;
- }
-
- MLKCustomObjectDetectorOptions *options = [[MLKCustomObjectDetectorOptions alloc] initWithRemoteModel:remoteModel];
- options.detectorMode = mode.intValue == 0 ? MLKObjectDetectorModeStream : MLKObjectDetectorModeSingleImage;
- options.shouldEnableClassification = classify;
- options.shouldEnableMultipleObjects = multiple;
- options.classificationConfidenceThreshold = threshold;
- options.maxPerObjectLabelCount = maxLabels.integerValue;
- return options;
-}
-
-- (void)manageModel:(FlutterMethodCall *)call result:(FlutterResult)result {
- NSString *modelTag = call.arguments[@"model"];
- MLKFirebaseModelSource *firebaseModelSource = [[MLKFirebaseModelSource alloc] initWithName:modelTag];
- MLKCustomRemoteModel *model = [[MLKCustomRemoteModel alloc] initWithRemoteModelSource:firebaseModelSource];
- genericModelManager = [[GenericModelManager alloc] init];
- [genericModelManager manageModel:model call:call result:result];
-}
-#endif
-
-@end
diff --git a/packages/google_mlkit_object_detection/ios/Classes/GoogleMlKitObjectDetectionPlugin.swift b/packages/google_mlkit_object_detection/ios/Classes/GoogleMlKitObjectDetectionPlugin.swift
new file mode 100644
index 00000000..d92df9b3
--- /dev/null
+++ b/packages/google_mlkit_object_detection/ios/Classes/GoogleMlKitObjectDetectionPlugin.swift
@@ -0,0 +1,222 @@
+import Flutter
+import MLKitCommon
+import MLKitVision
+import MLKitObjectDetection
+import MLKitObjectDetectionCommon
+import MLKitObjectDetectionCustom
+import google_mlkit_commons
+
+#if canImport(MLKitLinkFirebase)
+import MLKitLinkFirebase
+#endif
+
+@objc
+public class GoogleMlKitObjectDetectionPlugin: NSObject, FlutterPlugin {
+ private var instances: [String: ObjectDetector] = [:]
+ #if canImport(MLKitLinkFirebase)
+ private var genericModelManager: GenericModelManager?
+ #endif
+
+ public static func register(with registrar: FlutterPluginRegistrar) {
+ let channel = FlutterMethodChannel(
+ name: "google_mlkit_object_detector",
+ binaryMessenger: registrar.messenger()
+ )
+ let instance = GoogleMlKitObjectDetectionPlugin()
+ registrar.addMethodCallDelegate(instance, channel: channel)
+ }
+
+ public func handle(_ call: FlutterMethodCall, result: @escaping FlutterResult) {
+ switch call.method {
+ case "vision#startObjectDetector":
+ handleDetection(call: call, result: result)
+ case "vision#closeObjectDetector":
+ if let args = call.arguments as? [String: Any], let uid = args["id"] as? String {
+ instances.removeValue(forKey: uid)
+ }
+ result(nil)
+ case "vision#manageFirebaseModels":
+ #if canImport(MLKitLinkFirebase)
+ manageModel(call: call, result: result)
+ #else
+ result(FlutterError(
+ code: "ERROR_MISSING_MLKIT_FIREBASE_MODELS",
+ message: "Add the GoogleMLKit/LinkFirebase pod to your Podfile to use Firebase-hosted models.",
+ details: nil
+ ))
+ #endif
+ default:
+ result(FlutterMethodNotImplemented)
+ }
+ }
+
+ private func handleDetection(call: FlutterMethodCall, result: @escaping FlutterResult) {
+ guard let args = call.arguments as? [String: Any],
+ let imageData = args["imageData"] as? [String: Any],
+ let uid = args["id"] as? String else {
+ result(FlutterError(code: "invalid_args", message: "Missing arguments", details: nil))
+ return
+ }
+ guard let image = VisionImage.visionImage(from: imageData) else {
+ result(FlutterError(code: "invalid_image", message: "Invalid or missing image data", details: nil))
+ return
+ }
+
+ let objectDetector: ObjectDetector
+ if let existing = instances[uid] {
+ objectDetector = existing
+ } else {
+ guard let dictionary = args["options"] as? [String: Any] else {
+ result(FlutterError(code: "invalid_args", message: "Missing options", details: nil))
+ return
+ }
+ guard let detector = createDetector(from: dictionary, result: result) else { return }
+ objectDetector = detector
+ instances[uid] = objectDetector
+ }
+
+ objectDetector.process(image) { objects, error in
+ if let error = error as NSError? {
+ result(FlutterError(code: "Error \(error.code)", message: error.domain, details: error.localizedDescription))
+ return
+ }
+ guard let objects = objects else {
+ result([])
+ return
+ }
+ let objectsData = objects.map { object -> [String: Any] in
+ let labels = object.labels.map { label in
+ [
+ "index": label.index,
+ "text": label.text,
+ "confidence": label.confidence
+ ] as [String: Any]
+ }
+ var data: [String: Any] = [
+ "rect": [
+ "left": Double(object.frame.origin.x),
+ "top": Double(object.frame.origin.y),
+ "right": Double(object.frame.origin.x + object.frame.size.width),
+ "bottom": Double(object.frame.origin.y + object.frame.size.height)
+ ] as [String: Double],
+ "labels": labels
+ ]
+ if let trackingID = object.trackingID {
+ data["trackingId"] = trackingID
+ }
+ return data
+ }
+ result(objectsData)
+ }
+ }
+
+ private func createDetector(from dictionary: [String: Any], result: @escaping FlutterResult) -> ObjectDetector? {
+ let type = dictionary["type"] as? String ?? "base"
+ switch type {
+ case "base":
+ return ObjectDetector.objectDetector(options: getDefaultOptions(dictionary))
+ case "local":
+ guard let options = getLocalOptions(dictionary) else {
+ result(FlutterError(code: "invalid_args", message: "Missing path for local model", details: nil))
+ return nil
+ }
+ return ObjectDetector.objectDetector(options: options)
+ case "remote":
+ #if canImport(MLKitLinkFirebase)
+ guard let modelName = dictionary["modelName"] as? String, !modelName.isEmpty else {
+ result(FlutterError(
+ code: "invalid_args",
+ message: "Missing or invalid modelName for remote model",
+ details: nil
+ ))
+ return nil
+ }
+ if let options = getRemoteOptions(dictionary) {
+ return ObjectDetector.objectDetector(options: options)
+ }
+ result(FlutterError(
+ code: "Error Model has not been downloaded yet",
+ message: "Model has not been downloaded yet",
+ details: "Model has not been downloaded yet"
+ ))
+ return nil
+ #else
+ result(FlutterError(
+ code: "ERROR_MISSING_MLKIT_FIREBASE_MODELS",
+ message: "Add the GoogleMLKit/LinkFirebase pod to your Podfile to use Firebase-hosted models.",
+ details: nil
+ ))
+ return nil
+ #endif
+ default:
+ result(FlutterError(
+ code: type,
+ message: "Invalid model type: \(type)",
+ details: "Invalid model type: \(type)"
+ ))
+ return nil
+ }
+ }
+
+ private func getDefaultOptions(_ dictionary: [String: Any]) -> ObjectDetectorOptions {
+ let options = ObjectDetectorOptions()
+ let mode = (dictionary["mode"] as? NSNumber)?.intValue ?? 0
+ options.detectorMode = mode == 0 ? ObjectDetectorMode.stream : ObjectDetectorMode.singleImage
+ options.shouldEnableClassification = (dictionary["classify"] as? NSNumber)?.boolValue ?? false
+ options.shouldEnableMultipleObjects = (dictionary["multiple"] as? NSNumber)?.boolValue ?? false
+ return options
+ }
+
+ private func getLocalOptions(_ dictionary: [String: Any]) -> CustomObjectDetectorOptions? {
+ guard let path = dictionary["path"] as? String else { return nil }
+ let localModel = LocalModel(path: path)
+ let options = CustomObjectDetectorOptions(localModel: localModel)
+ let mode = (dictionary["mode"] as? NSNumber)?.intValue ?? 0
+ options.detectorMode = mode == 0 ? ObjectDetectorMode.stream : ObjectDetectorMode.singleImage
+ options.shouldEnableClassification = (dictionary["classify"] as? NSNumber)?.boolValue ?? false
+ options.shouldEnableMultipleObjects = (dictionary["multiple"] as? NSNumber)?.boolValue ?? false
+ if let threshold = dictionary["threshold"] as? NSNumber {
+ options.classificationConfidenceThreshold = threshold
+ }
+ if let maxLabels = dictionary["maxLabels"] as? NSNumber {
+ options.maxPerObjectLabelCount = maxLabels.intValue
+ }
+ return options
+ }
+
+ #if canImport(MLKitLinkFirebase)
+ private func getRemoteOptions(_ dictionary: [String: Any]) -> CustomObjectDetectorOptions? {
+ guard let modelName = dictionary["modelName"] as? String else { return nil }
+ let firebaseModelSource = FirebaseModelSource(name: modelName)
+ let remoteModel = CustomRemoteModel(remoteModelSource: firebaseModelSource)
+ guard ModelManager.modelManager().isModelDownloaded(remoteModel) else {
+ return nil
+ }
+ let options = CustomObjectDetectorOptions(remoteModel: remoteModel)
+ let mode = (dictionary["mode"] as? NSNumber)?.intValue ?? 0
+ options.detectorMode = mode == 0 ? ObjectDetectorMode.stream : ObjectDetectorMode.singleImage
+ options.shouldEnableClassification = (dictionary["classify"] as? NSNumber)?.boolValue ?? false
+ options.shouldEnableMultipleObjects = (dictionary["multiple"] as? NSNumber)?.boolValue ?? false
+ if let threshold = dictionary["threshold"] as? NSNumber {
+ options.classificationConfidenceThreshold = threshold
+ }
+ if let maxLabels = dictionary["maxLabels"] as? NSNumber {
+ options.maxPerObjectLabelCount = maxLabels.intValue
+ }
+ return options
+ }
+
+ private func manageModel(call: FlutterMethodCall, result: @escaping FlutterResult) {
+ guard let args = call.arguments as? [String: Any],
+ let modelTag = args["model"] as? String else {
+ result(FlutterError(code: "invalid_args", message: "Missing model argument", details: nil))
+ return
+ }
+ let firebaseModelSource = FirebaseModelSource(name: modelTag)
+ let model = CustomRemoteModel(remoteModelSource: firebaseModelSource)
+ let manager = GenericModelManager()
+ genericModelManager = manager
+ manager.manage(model: model, call: call, result: result)
+ }
+ #endif
+}
diff --git a/packages/google_mlkit_object_detection/ios/Classes/MlKitEnums.h b/packages/google_mlkit_object_detection/ios/Classes/MlKitEnums.h
deleted file mode 100644
index 771b492c..00000000
--- a/packages/google_mlkit_object_detection/ios/Classes/MlKitEnums.h
+++ /dev/null
@@ -1,10 +0,0 @@
-//
-// MlKitEnums.h
-// Pods
-//
-// Created by Andrew Coutts on 7/13/22.
-//
-
-#ifndef MLKIT_FIREBASE_MODELS
- #define MLKIT_FIREBASE_MODELS 0
-#endif
diff --git a/packages/google_mlkit_object_detection/ios/google_mlkit_object_detection.podspec b/packages/google_mlkit_object_detection/ios/google_mlkit_object_detection.podspec
index 948fbf88..6190c5f4 100644
--- a/packages/google_mlkit_object_detection/ios/google_mlkit_object_detection.podspec
+++ b/packages/google_mlkit_object_detection/ios/google_mlkit_object_detection.podspec
@@ -10,10 +10,9 @@ Pod::Spec.new do |s|
s.description = pubspec['description']
s.homepage = pubspec['homepage']
s.license = { :file => '../LICENSE' }
- s.authors = 'Multiple Authors'
+ s.authors = 'flutter-ml.dev'
s.source = { :path => '.' }
- s.source_files = 'Classes/**/*'
- s.public_header_files = 'Classes/**/*.h'
+ s.source_files = 'Classes/**/*.swift'
s.dependency 'Flutter'
s.dependency 'GoogleMLKit/ObjectDetection', '~> 9.0.0'
s.dependency 'GoogleMLKit/ObjectDetectionCustom', '~> 9.0.0'
diff --git a/packages/google_mlkit_pose_detection/ios/Assets/.gitkeep b/packages/google_mlkit_pose_detection/ios/Assets/.gitkeep
deleted file mode 100644
index e69de29b..00000000
diff --git a/packages/google_mlkit_pose_detection/ios/Classes/GoogleMlKitPoseDetectionPlugin.h b/packages/google_mlkit_pose_detection/ios/Classes/GoogleMlKitPoseDetectionPlugin.h
deleted file mode 100644
index ff17fcfe..00000000
--- a/packages/google_mlkit_pose_detection/ios/Classes/GoogleMlKitPoseDetectionPlugin.h
+++ /dev/null
@@ -1,4 +0,0 @@
-#import
-
-@interface GoogleMlKitPoseDetectionPlugin : NSObject
-@end
diff --git a/packages/google_mlkit_pose_detection/ios/Classes/GoogleMlKitPoseDetectionPlugin.m b/packages/google_mlkit_pose_detection/ios/Classes/GoogleMlKitPoseDetectionPlugin.m
deleted file mode 100644
index 2cfc6afa..00000000
--- a/packages/google_mlkit_pose_detection/ios/Classes/GoogleMlKitPoseDetectionPlugin.m
+++ /dev/null
@@ -1,140 +0,0 @@
-#import "GoogleMlKitPoseDetectionPlugin.h"
-#import
-#import
-#import
-#import
-
-#define channelName @"google_mlkit_pose_detector"
-#define startPoseDetector @"vision#startPoseDetector"
-#define closePoseDetector @"vision#closePoseDetector"
-
-@implementation GoogleMlKitPoseDetectionPlugin {
- NSMutableDictionary *instances;
-}
-
-+ (void)registerWithRegistrar:(NSObject*)registrar {
- FlutterMethodChannel* channel = [FlutterMethodChannel
- methodChannelWithName:channelName
- binaryMessenger:[registrar messenger]];
- GoogleMlKitPoseDetectionPlugin* instance = [[GoogleMlKitPoseDetectionPlugin alloc] init];
- [registrar addMethodCallDelegate:instance channel:channel];
-}
-
-- (id)init {
- self = [super init];
- if (self)
- instances = [NSMutableDictionary dictionary];
- return self;
-}
-
-- (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result {
- if ([call.method isEqualToString:startPoseDetector]) {
- [self handleDetection:call result:result];
- } else if ([call.method isEqualToString:closePoseDetector]) {
- NSString *uid = call.arguments[@"id"];
- [instances removeObjectForKey:uid];
- result(NULL);
- } else {
- result(FlutterMethodNotImplemented);
- }
-}
-
-- (MLKPoseDetector*)initialize:(FlutterMethodCall *)call {
- NSDictionary *options = call.arguments[@"options"];
- NSString *mode = options[@"mode"];
- MLKPoseDetectorMode detectorMode = MLKPoseDetectorModeStream;
- if ([mode isEqualToString:@"single"]) {
- detectorMode = MLKPoseDetectorModeSingleImage;
- }
-
- NSString *model = options[@"model"];
- if ([model isEqualToString:@"base"]) {
- MLKPoseDetectorOptions *options = [[MLKPoseDetectorOptions alloc] init];
- options.detectorMode = detectorMode;
- return [MLKPoseDetector poseDetectorWithOptions:options];
- } else {
- MLKAccuratePoseDetectorOptions *options =
- [[MLKAccuratePoseDetectorOptions alloc] init];
- options.detectorMode = detectorMode;
- return [MLKPoseDetector poseDetectorWithOptions:options];
- }
-}
-
-- (void)handleDetection:(FlutterMethodCall *)call result:(FlutterResult)result {
- MLKVisionImage *image = [MLKVisionImage visionImageFromData:call.arguments[@"imageData"]];
-
- NSString *uid = call.arguments[@"id"];
- MLKPoseDetector *detector = [instances objectForKey:uid];
- if (detector == NULL) {
- detector = [self initialize:call];
- instances[uid] = detector;
- }
-
- [detector processImage:image
- completion:^(NSArray * _Nullable poses,
- NSError * _Nullable error) {
- if (error) {
- result(getFlutterError(error));
- return;
- } else if (!poses || poses.count == 0) {
- result(@[]);
- return;
- }
-
- NSMutableArray *array = [NSMutableArray array];
- for (MLKPose *pose in poses) {
- NSMutableArray *landmarks = [NSMutableArray array];
- for (MLKPoseLandmark *landmark in pose.landmarks) {
- NSMutableDictionary *dictionary = [NSMutableDictionary new];
- dictionary[@"type"] = [self poseLandmarkTypeToNumber:landmark.type];
- dictionary[@"x"] = @(landmark.position.x);
- dictionary[@"y"] = @(landmark.position.y);
- dictionary[@"z"] = @(landmark.position.z);
- dictionary[@"likelihood"] = @(landmark.inFrameLikelihood);
- [landmarks addObject:dictionary];
- }
- [array addObject:landmarks];
- }
- result(array);
- }];
-}
-
-- (NSNumber*)poseLandmarkTypeToNumber:(MLKPoseLandmarkType)landmarkType {
- NSMutableDictionary *types = [NSMutableDictionary new];
- types[MLKPoseLandmarkTypeNose] = @0;
- types[MLKPoseLandmarkTypeLeftEyeInner] = @1;
- types[MLKPoseLandmarkTypeLeftEye] = @2;
- types[MLKPoseLandmarkTypeLeftEyeOuter] = @3;
- types[MLKPoseLandmarkTypeRightEyeInner] = @4;
- types[MLKPoseLandmarkTypeRightEye] = @5;
- types[MLKPoseLandmarkTypeRightEyeOuter] = @6;
- types[MLKPoseLandmarkTypeLeftEar] = @7;
- types[MLKPoseLandmarkTypeRightEar] = @8;
- types[MLKPoseLandmarkTypeMouthLeft] = @9;
- types[MLKPoseLandmarkTypeMouthRight] = @10;
- types[MLKPoseLandmarkTypeLeftShoulder] = @11;
- types[MLKPoseLandmarkTypeRightShoulder] = @12;
- types[MLKPoseLandmarkTypeLeftElbow] = @13;
- types[MLKPoseLandmarkTypeRightElbow] = @14;
- types[MLKPoseLandmarkTypeLeftWrist] = @15;
- types[MLKPoseLandmarkTypeRightWrist] = @16;
- types[MLKPoseLandmarkTypeLeftPinkyFinger] = @17;
- types[MLKPoseLandmarkTypeRightPinkyFinger] = @18;
- types[MLKPoseLandmarkTypeLeftIndexFinger] = @19;
- types[MLKPoseLandmarkTypeRightIndexFinger] = @20;
- types[MLKPoseLandmarkTypeLeftThumb] = @21;
- types[MLKPoseLandmarkTypeRightThumb] = @22;
- types[MLKPoseLandmarkTypeLeftHip] = @23;
- types[MLKPoseLandmarkTypeRightHip] = @24;
- types[MLKPoseLandmarkTypeLeftKnee] = @25;
- types[MLKPoseLandmarkTypeRightKnee] = @26;
- types[MLKPoseLandmarkTypeLeftAnkle] = @27;
- types[MLKPoseLandmarkTypeRightAnkle] = @28;
- types[MLKPoseLandmarkTypeLeftHeel] = @29;
- types[MLKPoseLandmarkTypeRightHeel] = @30;
- types[MLKPoseLandmarkTypeLeftToe] = @31;
- types[MLKPoseLandmarkTypeRightToe] = @32;
- return types[landmarkType];
-}
-
-@end
diff --git a/packages/google_mlkit_pose_detection/ios/Classes/GoogleMlKitPoseDetectionPlugin.swift b/packages/google_mlkit_pose_detection/ios/Classes/GoogleMlKitPoseDetectionPlugin.swift
new file mode 100644
index 00000000..955b9366
--- /dev/null
+++ b/packages/google_mlkit_pose_detection/ios/Classes/GoogleMlKitPoseDetectionPlugin.swift
@@ -0,0 +1,136 @@
+import Flutter
+import MLKitVision
+import MLKitPoseDetection
+import MLKitPoseDetectionCommon
+import MLKitPoseDetectionAccurate
+import google_mlkit_commons
+
+@objc
+public class GoogleMlKitPoseDetectionPlugin: NSObject, FlutterPlugin {
+ private var instances: [String: PoseDetector] = [:]
+
+ private static let landmarkTypeToNumber: [PoseLandmarkType: Int] = [
+ .nose: 0,
+ .leftEyeInner: 1,
+ .leftEye: 2,
+ .leftEyeOuter: 3,
+ .rightEyeInner: 4,
+ .rightEye: 5,
+ .rightEyeOuter: 6,
+ .leftEar: 7,
+ .rightEar: 8,
+ .mouthLeft: 9,
+ .mouthRight: 10,
+ .leftShoulder: 11,
+ .rightShoulder: 12,
+ .leftElbow: 13,
+ .rightElbow: 14,
+ .leftWrist: 15,
+ .rightWrist: 16,
+ .leftPinkyFinger: 17,
+ .rightPinkyFinger: 18,
+ .leftIndexFinger: 19,
+ .rightIndexFinger: 20,
+ .leftThumb: 21,
+ .rightThumb: 22,
+ .leftHip: 23,
+ .rightHip: 24,
+ .leftKnee: 25,
+ .rightKnee: 26,
+ .leftAnkle: 27,
+ .rightAnkle: 28,
+ .leftHeel: 29,
+ .rightHeel: 30,
+ .leftToe: 31,
+ .rightToe: 32
+ ]
+
+ public static func register(with registrar: FlutterPluginRegistrar) {
+ let channel = FlutterMethodChannel(
+ name: "google_mlkit_pose_detector",
+ binaryMessenger: registrar.messenger()
+ )
+ let instance = GoogleMlKitPoseDetectionPlugin()
+ registrar.addMethodCallDelegate(instance, channel: channel)
+ }
+
+ public func handle(_ call: FlutterMethodCall, result: @escaping FlutterResult) {
+ switch call.method {
+ case "vision#startPoseDetector":
+ handleDetection(call: call, result: result)
+ case "vision#closePoseDetector":
+ if let args = call.arguments as? [String: Any], let uid = args["id"] as? String {
+ instances.removeValue(forKey: uid)
+ }
+ result(nil)
+ default:
+ result(FlutterMethodNotImplemented)
+ }
+ }
+
+ private func initialize(call: FlutterMethodCall) -> PoseDetector? {
+ guard let args = call.arguments as? [String: Any],
+ let optionsDict = args["options"] as? [String: Any] else { return nil }
+ let mode = optionsDict["mode"] as? String ?? "stream"
+ let detectorMode: PoseDetectorMode = mode == "single" ? .singleImage : .stream
+ let model = optionsDict["model"] as? String ?? "base"
+
+ if model == "base" {
+ let options = PoseDetectorOptions()
+ options.detectorMode = detectorMode
+ return PoseDetector.poseDetector(options: options)
+ } else {
+ let options = AccuratePoseDetectorOptions()
+ options.detectorMode = detectorMode
+ return PoseDetector.poseDetector(options: options)
+ }
+ }
+
+ private func handleDetection(call: FlutterMethodCall, result: @escaping FlutterResult) {
+ guard let args = call.arguments as? [String: Any],
+ let imageData = args["imageData"] as? [String: Any],
+ let uid = args["id"] as? String else {
+ result(FlutterError(code: "invalid_args", message: "Missing arguments", details: nil))
+ return
+ }
+ guard let image = VisionImage.visionImage(from: imageData) else {
+ result(FlutterError(code: "invalid_image", message: "Invalid or missing image data", details: nil))
+ return
+ }
+
+ let detector: PoseDetector
+ if let existing = instances[uid] {
+ detector = existing
+ } else {
+ guard let newDetector = initialize(call: call) else {
+ result(FlutterError(code: "invalid_args", message: "Invalid options", details: nil))
+ return
+ }
+ detector = newDetector
+ instances[uid] = detector
+ }
+
+ detector.process(image) { poses, error in
+ if let error = error as NSError? {
+ result(FlutterError(code: "Error \(error.code)", message: error.domain, details: error.localizedDescription))
+ return
+ }
+ guard let poses = poses, !poses.isEmpty else {
+ result([])
+ return
+ }
+ let array = poses.map { pose -> [[String: Any]] in
+ pose.landmarks.map { landmark in
+ [
+ "type": Self.landmarkTypeToNumber[landmark.type] ?? -1,
+ "x": landmark.position.x,
+ "y": landmark.position.y,
+ "z": landmark.position.z,
+ "likelihood": landmark.inFrameLikelihood
+ ] as [String: Any]
+ }
+ }
+ result(array)
+ }
+ }
+}
diff --git a/packages/google_mlkit_pose_detection/ios/google_mlkit_pose_detection.podspec b/packages/google_mlkit_pose_detection/ios/google_mlkit_pose_detection.podspec
index c0eda885..e71db861 100644
--- a/packages/google_mlkit_pose_detection/ios/google_mlkit_pose_detection.podspec
+++ b/packages/google_mlkit_pose_detection/ios/google_mlkit_pose_detection.podspec
@@ -10,10 +10,9 @@ Pod::Spec.new do |s|
s.description = pubspec['description']
s.homepage = pubspec['homepage']
s.license = { :file => '../LICENSE' }
- s.authors = 'Multiple Authors'
+ s.authors = 'flutter-ml.dev'
s.source = { :path => '.' }
- s.source_files = 'Classes/**/*'
- s.public_header_files = 'Classes/**/*.h'
+ s.source_files = 'Classes/**/*.swift'
s.dependency 'Flutter'
s.dependency 'GoogleMLKit/PoseDetection', '~> 9.0.0'
s.dependency 'GoogleMLKit/PoseDetectionAccurate', '~> 9.0.0'
diff --git a/packages/google_mlkit_selfie_segmentation/ios/Assets/.gitkeep b/packages/google_mlkit_selfie_segmentation/ios/Assets/.gitkeep
deleted file mode 100644
index e69de29b..00000000
diff --git a/packages/google_mlkit_selfie_segmentation/ios/Classes/GoogleMlKitSelfieSegmentationPlugin.h b/packages/google_mlkit_selfie_segmentation/ios/Classes/GoogleMlKitSelfieSegmentationPlugin.h
deleted file mode 100644
index 4182bff2..00000000
--- a/packages/google_mlkit_selfie_segmentation/ios/Classes/GoogleMlKitSelfieSegmentationPlugin.h
+++ /dev/null
@@ -1,4 +0,0 @@
-#import
-
-@interface GoogleMlKitSelfieSegmentationPlugin : NSObject
-@end
diff --git a/packages/google_mlkit_selfie_segmentation/ios/Classes/GoogleMlKitSelfieSegmentationPlugin.m b/packages/google_mlkit_selfie_segmentation/ios/Classes/GoogleMlKitSelfieSegmentationPlugin.m
deleted file mode 100644
index 7cf51212..00000000
--- a/packages/google_mlkit_selfie_segmentation/ios/Classes/GoogleMlKitSelfieSegmentationPlugin.m
+++ /dev/null
@@ -1,99 +0,0 @@
-#import "GoogleMlKitSelfieSegmentationPlugin.h"
-#import
-#import
-#import
-
-#define channelName @"google_mlkit_selfie_segmenter"
-#define startSelfieSegmenter @"vision#startSelfieSegmenter"
-#define closeSelfieSegmenter @"vision#closeSelfieSegmenter"
-
-@implementation GoogleMlKitSelfieSegmentationPlugin {
- NSMutableDictionary *instances;
-}
-
-+ (void)registerWithRegistrar:(NSObject*)registrar {
- FlutterMethodChannel* channel = [FlutterMethodChannel
- methodChannelWithName:channelName
- binaryMessenger:[registrar messenger]];
- GoogleMlKitSelfieSegmentationPlugin* instance = [[GoogleMlKitSelfieSegmentationPlugin alloc] init];
- [registrar addMethodCallDelegate:instance channel:channel];
-}
-
-- (id)init {
- self = [super init];
- if (self)
- instances = [NSMutableDictionary dictionary];
- return self;
-}
-
-- (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result {
- if ([call.method isEqualToString:startSelfieSegmenter]) {
- [self handleDetection:call result:result];
- } else if ([call.method isEqualToString:closeSelfieSegmenter]) {
- NSString *uid = call.arguments[@"id"];
- [instances removeObjectForKey:uid];
- result(NULL);
- } else {
- result(FlutterMethodNotImplemented);
- }
-}
-
-- (MLKSegmenter*)initialize:(FlutterMethodCall *)call {
- BOOL isStream = [[call.arguments objectForKey:@"isStream"] boolValue];
- BOOL enableRawSizeMask = [[call.arguments objectForKey:@"enableRawSizeMask"] boolValue];
-
- MLKSelfieSegmenterOptions *options = [[MLKSelfieSegmenterOptions alloc] init];
- options.segmenterMode = isStream ? MLKSegmenterModeStream : MLKSegmenterModeSingleImage;
- options.shouldEnableRawSizeMask = enableRawSizeMask;
-
- return [MLKSegmenter segmenterWithOptions:options];
-}
-
-- (void)handleDetection:(FlutterMethodCall *)call result:(FlutterResult)result {
- MLKVisionImage *image = [MLKVisionImage visionImageFromData:call.arguments[@"imageData"]];
-
- NSString *uid = call.arguments[@"id"];
- MLKSegmenter *segmenter = [instances objectForKey:uid];
- if (segmenter == NULL) {
- segmenter = [self initialize:call];
- instances[uid] = segmenter;
- }
-
- [segmenter processImage:image
- completion:^(MLKSegmentationMask * _Nullable mask,
- NSError * _Nullable error) {
- if (error) {
- result(getFlutterError(error));
- return;
- } else if (mask == NULL) {
- result(NULL);
- return;
- }
-
- size_t width = CVPixelBufferGetWidth(mask.buffer);
- size_t height = CVPixelBufferGetHeight(mask.buffer);
-
- CVPixelBufferLockBaseAddress(mask.buffer, kCVPixelBufferLock_ReadOnly);
- size_t maskBytesPerRow = CVPixelBufferGetBytesPerRow(mask.buffer);
- float *maskAddress = (float *)CVPixelBufferGetBaseAddress(mask.buffer);
-
- NSMutableArray *confidences = [NSMutableArray array];
- for (int row = 0; row < height; ++row) {
- for (int col = 0; col < width; ++col) {
- // Gets the confidence of the pixel in the mask being in the foreground.
- float confidence = maskAddress[col];
- [confidences addObject:@(confidence)];
- }
- maskAddress += maskBytesPerRow / sizeof(float);
- }
-
- NSMutableDictionary *dictionary = [NSMutableDictionary new];
- dictionary[@"width"] = @(width);
- dictionary[@"height"] = @(height);
- dictionary[@"confidences"] = confidences;
-
- result(dictionary);
- }];
-}
-
-@end
diff --git a/packages/google_mlkit_selfie_segmentation/ios/Classes/GoogleMlKitSelfieSegmentationPlugin.swift b/packages/google_mlkit_selfie_segmentation/ios/Classes/GoogleMlKitSelfieSegmentationPlugin.swift
new file mode 100644
index 00000000..5b2ee4c5
--- /dev/null
+++ b/packages/google_mlkit_selfie_segmentation/ios/Classes/GoogleMlKitSelfieSegmentationPlugin.swift
@@ -0,0 +1,103 @@
+import Flutter
+import MLKitVision
+import MLKitSegmentationSelfie
+import MLKitSegmentationCommon
+import google_mlkit_commons
+
+@objc
+public class GoogleMlKitSelfieSegmentationPlugin: NSObject, FlutterPlugin {
+ private var instances: [String: Segmenter] = [:]
+
+ public static func register(with registrar: FlutterPluginRegistrar) {
+ let channel = FlutterMethodChannel(
+ name: "google_mlkit_selfie_segmenter",
+ binaryMessenger: registrar.messenger()
+ )
+ let instance = GoogleMlKitSelfieSegmentationPlugin()
+ registrar.addMethodCallDelegate(instance, channel: channel)
+ }
+
+ public func handle(_ call: FlutterMethodCall, result: @escaping FlutterResult) {
+ switch call.method {
+ case "vision#startSelfieSegmenter":
+ handleDetection(call: call, result: result)
+ case "vision#closeSelfieSegmenter":
+ if let args = call.arguments as? [String: Any], let uid = args["id"] as? String {
+ instances.removeValue(forKey: uid)
+ }
+ result(nil)
+ default:
+ result(FlutterMethodNotImplemented)
+ }
+ }
+
+ private func initialize(call: FlutterMethodCall) -> Segmenter? {
+ guard let args = call.arguments as? [String: Any] else { return nil }
+ let isStream = (args["isStream"] as? NSNumber)?.boolValue ?? false
+ let enableRawSizeMask = (args["enableRawSizeMask"] as? NSNumber)?.boolValue ?? false
+ let options = SelfieSegmenterOptions()
+ options.segmenterMode = isStream ? .stream : .singleImage
+ options.shouldEnableRawSizeMask = enableRawSizeMask
+ return Segmenter.segmenter(options: options)
+ }
+
+ private func handleDetection(call: FlutterMethodCall, result: @escaping FlutterResult) {
+ guard let args = call.arguments as? [String: Any],
+ let imageData = args["imageData"] as? [String: Any],
+ let uid = args["id"] as? String else {
+ result(FlutterError(code: "invalid_args", message: "Missing arguments", details: nil))
+ return
+ }
+ guard let image = VisionImage.visionImage(from: imageData) else {
+ result(FlutterError(code: "invalid_image", message: "Invalid or missing image data", details: nil))
+ return
+ }
+
+ let segmenter: Segmenter
+ if let existing = instances[uid] {
+ segmenter = existing
+ } else {
+ guard let newSegmenter = initialize(call: call) else {
+ result(FlutterError(code: "invalid_args", message: "Invalid options", details: nil))
+ return
+ }
+ segmenter = newSegmenter
+ instances[uid] = segmenter
+ }
+
+ segmenter.process(image) { mask, error in
+ if let error = error as NSError? {
+ result(FlutterError(code: "Error \(error.code)", message: error.domain, details: error.localizedDescription))
+ return
+ }
+ guard let mask = mask else {
+ result(nil)
+ return
+ }
+ let width = CVPixelBufferGetWidth(mask.buffer)
+ let height = CVPixelBufferGetHeight(mask.buffer)
+ CVPixelBufferLockBaseAddress(mask.buffer, .readOnly)
+ let maskBytesPerRow = CVPixelBufferGetBytesPerRow(mask.buffer)
+ guard let baseAddress = CVPixelBufferGetBaseAddress(mask.buffer) else {
+ CVPixelBufferUnlockBaseAddress(mask.buffer, .readOnly)
+ result(FlutterError(code: "error", message: "Failed to get pixel buffer base address", details: nil))
+ return
+ }
+ let maskAddress = baseAddress.assumingMemoryBound(to: Float.self)
+ var confidences: [NSNumber] = []
+ var rowAddress = maskAddress
+ for _ in 0...size)
+ }
+ CVPixelBufferUnlockBaseAddress(mask.buffer, .readOnly)
+ result([
+ "width": width,
+ "height": height,
+ "confidences": confidences
+ ])
+ }
+ }
+}
diff --git a/packages/google_mlkit_selfie_segmentation/ios/google_mlkit_selfie_segmentation.podspec b/packages/google_mlkit_selfie_segmentation/ios/google_mlkit_selfie_segmentation.podspec
index 47e89760..17c323d0 100644
--- a/packages/google_mlkit_selfie_segmentation/ios/google_mlkit_selfie_segmentation.podspec
+++ b/packages/google_mlkit_selfie_segmentation/ios/google_mlkit_selfie_segmentation.podspec
@@ -10,10 +10,9 @@ Pod::Spec.new do |s|
s.description = pubspec['description']
s.homepage = pubspec['homepage']
s.license = { :file => '../LICENSE' }
- s.authors = 'Multiple Authors'
+ s.authors = 'flutter-ml.dev'
s.source = { :path => '.' }
- s.source_files = 'Classes/**/*'
- s.public_header_files = 'Classes/**/*.h'
+ s.source_files = 'Classes/**/*.swift'
s.dependency 'Flutter'
s.dependency 'GoogleMLKit/SegmentationSelfie', '~> 9.0.0'
s.dependency 'google_mlkit_commons'
diff --git a/packages/google_mlkit_smart_reply/ios/Assets/.gitkeep b/packages/google_mlkit_smart_reply/ios/Assets/.gitkeep
deleted file mode 100644
index e69de29b..00000000
diff --git a/packages/google_mlkit_smart_reply/ios/Classes/GoogleMlKitSmartReplyPlugin.h b/packages/google_mlkit_smart_reply/ios/Classes/GoogleMlKitSmartReplyPlugin.h
deleted file mode 100644
index 68560921..00000000
--- a/packages/google_mlkit_smart_reply/ios/Classes/GoogleMlKitSmartReplyPlugin.h
+++ /dev/null
@@ -1,4 +0,0 @@
-#import
-
-@interface GoogleMlKitSmartReplyPlugin : NSObject
-@end
diff --git a/packages/google_mlkit_smart_reply/ios/Classes/GoogleMlKitSmartReplyPlugin.m b/packages/google_mlkit_smart_reply/ios/Classes/GoogleMlKitSmartReplyPlugin.m
deleted file mode 100644
index 459ffcfe..00000000
--- a/packages/google_mlkit_smart_reply/ios/Classes/GoogleMlKitSmartReplyPlugin.m
+++ /dev/null
@@ -1,88 +0,0 @@
-#import "GoogleMlKitSmartReplyPlugin.h"
-#import
-#import
-
-#define channelName @"google_mlkit_smart_reply"
-#define startSmartReply @"nlp#startSmartReply"
-#define closeSmartReply @"nlp#closeSmartReply"
-
-@implementation GoogleMlKitSmartReplyPlugin {
- NSMutableDictionary *instances;
-}
-
-+ (void)registerWithRegistrar:(NSObject*)registrar {
- FlutterMethodChannel* channel = [FlutterMethodChannel
- methodChannelWithName:channelName
- binaryMessenger:[registrar messenger]];
- GoogleMlKitSmartReplyPlugin* instance = [[GoogleMlKitSmartReplyPlugin alloc] init];
- [registrar addMethodCallDelegate:instance channel:channel];
-}
-
-- (id)init {
- self = [super init];
- if (self)
- instances = [NSMutableDictionary dictionary];
- return self;
-}
-
-- (void)handleMethodCall:(FlutterMethodCall *)call result:(FlutterResult)result {
- if ([call.method isEqualToString:startSmartReply]) {
- [self handleStartSmartReply:call result:result];
- } else if ([call.method isEqualToString:closeSmartReply]) {
- NSString *uid = call.arguments[@"id"];
- [instances removeObjectForKey:uid];
- result(NULL);
- } else {
- result(FlutterMethodNotImplemented);
- }
-}
-
-- (void)handleStartSmartReply:(FlutterMethodCall *)call result:(FlutterResult)result {
- NSMutableArray *conversation = [NSMutableArray array];
- NSArray *json = call.arguments[@"conversation"];
- for (NSDictionary *object in json) {
- NSString *text = object[@"message"];
- NSNumber *timestamp = object[@"timestamp"];
- NSString *userId = object[@"userId"];
- BOOL isLocalUser = [userId isEqualToString: @"local"];
-
- MLKTextMessage *message = [[MLKTextMessage alloc]
- initWithText: text
- timestamp:timestamp.doubleValue
- userID:userId
- isLocalUser:isLocalUser];
- [conversation addObject:message];
- }
-
- NSString *uid = call.arguments[@"id"];
- MLKSmartReply *smartReply = [instances objectForKey:uid];
- if (smartReply == NULL) {
- smartReply = [MLKSmartReply smartReply];
- instances[uid] = smartReply;
- }
-
- [smartReply suggestRepliesForMessages:conversation
- completion:^(MLKSmartReplySuggestionResult * _Nullable smartReplySuggestionResult,
- NSError * _Nullable error) {
- if (error) {
- result(getFlutterError(error));
- return;
- } else if (!smartReplySuggestionResult) {
- result(NULL);
- return;
- }
-
- NSMutableDictionary *suggestionResult = [NSMutableDictionary dictionary];
- suggestionResult[@"status"] = @(smartReplySuggestionResult.status);
- if (smartReplySuggestionResult.status == MLKSmartReplyResultStatusSuccess) {
- NSMutableArray *suggestions = [NSMutableArray array];
- for (MLKSmartReplySuggestion *suggestion in smartReplySuggestionResult.suggestions) {
- [suggestions addObject:suggestion.text];
- }
- suggestionResult[@"suggestions"] = suggestions;
- }
- result(suggestionResult);
- }];
-}
-
-@end
diff --git a/packages/google_mlkit_smart_reply/ios/Classes/GoogleMlKitSmartReplyPlugin.swift b/packages/google_mlkit_smart_reply/ios/Classes/GoogleMlKitSmartReplyPlugin.swift
new file mode 100644
index 00000000..90c9d46c
--- /dev/null
+++ b/packages/google_mlkit_smart_reply/ios/Classes/GoogleMlKitSmartReplyPlugin.swift
@@ -0,0 +1,83 @@
+import Flutter
+import MLKitSmartReply
+import google_mlkit_commons
+
+@objc
+public class GoogleMlKitSmartReplyPlugin: NSObject, FlutterPlugin {
+ private var instances: [String: SmartReply] = [:]
+
+ public static func register(with registrar: FlutterPluginRegistrar) {
+ let channel = FlutterMethodChannel(
+ name: "google_mlkit_smart_reply",
+ binaryMessenger: registrar.messenger()
+ )
+ let instance = GoogleMlKitSmartReplyPlugin()
+ registrar.addMethodCallDelegate(instance, channel: channel)
+ }
+
+ public func handle(_ call: FlutterMethodCall, result: @escaping FlutterResult) {
+ switch call.method {
+ case "nlp#startSmartReply":
+ handleStartSmartReply(call: call, result: result)
+ case "nlp#closeSmartReply":
+ if let args = call.arguments as? [String: Any], let uid = args["id"] as? String {
+ instances.removeValue(forKey: uid)
+ }
+ result(nil)
+ default:
+ result(FlutterMethodNotImplemented)
+ }
+ }
+
+ private func handleStartSmartReply(call: FlutterMethodCall, result: @escaping FlutterResult) {
+ guard let args = call.arguments as? [String: Any],
+ let json = args["conversation"] as? [[String: Any]],
+ let uid = args["id"] as? String else {
+ result(FlutterError(code: "invalid_args", message: "Missing arguments", details: nil))
+ return
+ }
+ var conversation: [TextMessage] = []
+ for object in json {
+ guard let text = object["message"] as? String,
+ let timestampNum = object["timestamp"] as? NSNumber,
+ let userId = object["userId"] as? String else { continue }
+ let timestamp = timestampNum.doubleValue
+ let isLocalUser = userId == "local"
+ let message = TextMessage(
+ text: text,
+ timestamp: timestamp,
+ userID: userId,
+ isLocalUser: isLocalUser
+ )
+ conversation.append(message)
+ }
+
+ let smartReply: SmartReply
+ if let existing = instances[uid] {
+ smartReply = existing
+ } else {
+ smartReply = SmartReply.smartReply()
+ instances[uid] = smartReply
+ }
+
+ smartReply.suggestReplies(for: conversation) { suggestionResult, error in
+ if let error = error as NSError? {
+ result(FlutterError(
+ code: "Error \(error.code)",
+ message: error.domain,
+ details: error.localizedDescription
+ ))
+ return
+ }
+ guard let suggestionResult = suggestionResult else {
+ result(nil)
+ return
+ }
+ var dict: [String: Any] = ["status": suggestionResult.status.rawValue]
+ if suggestionResult.status.rawValue == 0 { // MLKSmartReplyResultStatusSuccess
+ dict["suggestions"] = suggestionResult.suggestions.map { $0.text }
+ }
+ result(dict)
+ }
+ }
+}
diff --git a/packages/google_mlkit_smart_reply/ios/google_mlkit_smart_reply.podspec b/packages/google_mlkit_smart_reply/ios/google_mlkit_smart_reply.podspec
index cb03f48d..39e96a34 100644
--- a/packages/google_mlkit_smart_reply/ios/google_mlkit_smart_reply.podspec
+++ b/packages/google_mlkit_smart_reply/ios/google_mlkit_smart_reply.podspec
@@ -10,10 +10,9 @@ Pod::Spec.new do |s|
s.description = pubspec['description']
s.homepage = pubspec['homepage']
s.license = { :file => '../LICENSE' }
- s.authors = 'Multiple Authors'
+ s.authors = 'flutter-ml.dev'
s.source = { :path => '.' }
- s.source_files = 'Classes/**/*'
- s.public_header_files = 'Classes/**/*.h'
+ s.source_files = 'Classes/**/*.swift'
s.dependency 'Flutter'
s.dependency 'GoogleMLKit/SmartReply', '~> 9.0.0'
s.dependency 'google_mlkit_commons'
diff --git a/packages/google_mlkit_subject_segmentation/ios/Assets/.gitkeep b/packages/google_mlkit_subject_segmentation/ios/Assets/.gitkeep
deleted file mode 100644
index e69de29b..00000000
diff --git a/packages/google_mlkit_subject_segmentation/ios/Classes/GoogleMlKitSubjectSegmentationPlugin.h b/packages/google_mlkit_subject_segmentation/ios/Classes/GoogleMlKitSubjectSegmentationPlugin.h
deleted file mode 100644
index d4c491c5..00000000
--- a/packages/google_mlkit_subject_segmentation/ios/Classes/GoogleMlKitSubjectSegmentationPlugin.h
+++ /dev/null
@@ -1,4 +0,0 @@
-#import
-
-@interface GoogleMlKitSubjectSegmentationPlugin : NSObject
-@end
diff --git a/packages/google_mlkit_subject_segmentation/ios/Classes/GoogleMlKitSubjectSegmentationPlugin.m b/packages/google_mlkit_subject_segmentation/ios/Classes/GoogleMlKitSubjectSegmentationPlugin.m
deleted file mode 100644
index 8c703c58..00000000
--- a/packages/google_mlkit_subject_segmentation/ios/Classes/GoogleMlKitSubjectSegmentationPlugin.m
+++ /dev/null
@@ -1,16 +0,0 @@
-#import "GoogleMlKitSubjectSegmentationPlugin.h"
-
-@implementation GoogleMlKitSubjectSegmentationPlugin
-+ (void)registerWithRegistrar:(NSObject*)registrar {
- FlutterMethodChannel* channel = [FlutterMethodChannel
- methodChannelWithName:@"google_mlkit_subject_segmentation"
- binaryMessenger:[registrar messenger]];
- GoogleMlKitSubjectSegmentationPlugin* instance = [[GoogleMlKitSubjectSegmentationPlugin alloc] init];
- [registrar addMethodCallDelegate:instance channel:channel];
-}
-
-- (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result {
- result(FlutterMethodNotImplemented);
-}
-
-@end
diff --git a/packages/google_mlkit_subject_segmentation/ios/Classes/GoogleMlKitSubjectSegmentationPlugin.swift b/packages/google_mlkit_subject_segmentation/ios/Classes/GoogleMlKitSubjectSegmentationPlugin.swift
new file mode 100644
index 00000000..db17c8d2
--- /dev/null
+++ b/packages/google_mlkit_subject_segmentation/ios/Classes/GoogleMlKitSubjectSegmentationPlugin.swift
@@ -0,0 +1,17 @@
+import Flutter
+
+@objc
+public class GoogleMlKitSubjectSegmentationPlugin: NSObject, FlutterPlugin {
+ public static func register(with registrar: FlutterPluginRegistrar) {
+ let channel = FlutterMethodChannel(
+ name: "google_mlkit_subject_segmentation",
+ binaryMessenger: registrar.messenger()
+ )
+ let instance = GoogleMlKitSubjectSegmentationPlugin()
+ registrar.addMethodCallDelegate(instance, channel: channel)
+ }
+
+ public func handle(_ call: FlutterMethodCall, result: @escaping FlutterResult) {
+ result(FlutterMethodNotImplemented)
+ }
+}
diff --git a/packages/google_mlkit_subject_segmentation/ios/google_mlkit_subject_segmentation.podspec b/packages/google_mlkit_subject_segmentation/ios/google_mlkit_subject_segmentation.podspec
index 1abf19c2..04c00a39 100644
--- a/packages/google_mlkit_subject_segmentation/ios/google_mlkit_subject_segmentation.podspec
+++ b/packages/google_mlkit_subject_segmentation/ios/google_mlkit_subject_segmentation.podspec
@@ -10,10 +10,9 @@ Pod::Spec.new do |s|
s.description = pubspec['description']
s.homepage = pubspec['homepage']
s.license = { :file => '../LICENSE' }
- s.author = 'Multiple Authors'
+ s.author = 'flutter-ml.dev'
s.source = { :path => '.' }
- s.source_files = 'Classes/**/*'
- s.public_header_files = 'Classes/**/*.h'
+ s.source_files = 'Classes/**/*.swift'
s.dependency 'Flutter'
# s.dependency 'GoogleMLKit/SubjectSegmentation', '~> 6.0.0'
s.platform = :ios, '15.5'
diff --git a/packages/google_mlkit_text_recognition/ios/Assets/.gitkeep b/packages/google_mlkit_text_recognition/ios/Assets/.gitkeep
deleted file mode 100644
index e69de29b..00000000
diff --git a/packages/google_mlkit_text_recognition/ios/Classes/GoogleMlKitTextRecognitionPlugin.h b/packages/google_mlkit_text_recognition/ios/Classes/GoogleMlKitTextRecognitionPlugin.h
deleted file mode 100644
index 36d3a2e9..00000000
--- a/packages/google_mlkit_text_recognition/ios/Classes/GoogleMlKitTextRecognitionPlugin.h
+++ /dev/null
@@ -1,4 +0,0 @@
-#import
-
-@interface GoogleMlKitTextRecognitionPlugin : NSObject
-@end
diff --git a/packages/google_mlkit_text_recognition/ios/Classes/GoogleMlKitTextRecognitionPlugin.m b/packages/google_mlkit_text_recognition/ios/Classes/GoogleMlKitTextRecognitionPlugin.m
deleted file mode 100644
index b388ff3a..00000000
--- a/packages/google_mlkit_text_recognition/ios/Classes/GoogleMlKitTextRecognitionPlugin.m
+++ /dev/null
@@ -1,205 +0,0 @@
-#import "GoogleMlKitTextRecognitionPlugin.h"
-#import
-#import
-#if __has_include()
-#import
-#define _has_chinese 1
-#endif
-#if __has_include()
-#import
-#define _has_devanagari 1
-#endif
-#if __has_include()
-#import
-#define _has_japanese 1
-#endif
-#if __has_include()
-#import
-#define _has_korean 1
-#endif
-#import
-
-#define channelName @"google_mlkit_text_recognizer"
-#define startTextRecognizer @"vision#startTextRecognizer"
-#define closeTextRecognizer @"vision#closeTextRecognizer"
-
-@implementation GoogleMlKitTextRecognitionPlugin {
- NSMutableDictionary *instances;
-}
-
-+ (void)registerWithRegistrar:(NSObject*)registrar {
- FlutterMethodChannel* channel = [FlutterMethodChannel
- methodChannelWithName:channelName
- binaryMessenger:[registrar messenger]];
- GoogleMlKitTextRecognitionPlugin* instance = [[GoogleMlKitTextRecognitionPlugin alloc] init];
- [registrar addMethodCallDelegate:instance channel:channel];
-}
-
-- (id)init {
- self = [super init];
- if (self)
- instances = [NSMutableDictionary dictionary];
- return self;
-}
-
-- (void)handleMethodCall:(FlutterMethodCall *)call result:(FlutterResult)result {
- if ([call.method isEqualToString:startTextRecognizer]) {
- [self handleDetection:call result:result];
- } else if ([call.method isEqualToString:closeTextRecognizer]) {
- NSString *uid = call.arguments[@"id"];
- [instances removeObjectForKey:uid];
- result(NULL);
- } else {
- result(FlutterMethodNotImplemented);
- }
-}
-
-- (MLKTextRecognizer*)initialize:(FlutterMethodCall *)call {
- NSNumber *scriptValue = call.arguments[@"script"];
- switch(scriptValue.intValue) {
- default : {
- MLKTextRecognizerOptions *latinOptions = [[MLKTextRecognizerOptions alloc] init];
- return [MLKTextRecognizer textRecognizerWithOptions:latinOptions];
- }
-#ifdef _has_chinese
- case 1 : {
- MLKChineseTextRecognizerOptions *chineseOptions = [[MLKChineseTextRecognizerOptions alloc] init];
- return [MLKTextRecognizer textRecognizerWithOptions:chineseOptions];
- }
-#endif
-#ifdef _has_devanagari
- case 2 : {
- MLKDevanagariTextRecognizerOptions *devanagariOptions = [[MLKDevanagariTextRecognizerOptions alloc] init];
- return [MLKTextRecognizer textRecognizerWithOptions:devanagariOptions];
- }
-#endif
-#ifdef _has_japanese
- case 3 : {
- MLKJapaneseTextRecognizerOptions *japaneseOptions = [[MLKJapaneseTextRecognizerOptions alloc] init];
- return [MLKTextRecognizer textRecognizerWithOptions:japaneseOptions];
- }
-#endif
-#ifdef _has_korean
- case 4 : {
- MLKKoreanTextRecognizerOptions *koreanOptions = [[MLKKoreanTextRecognizerOptions alloc] init];
- return [MLKTextRecognizer textRecognizerWithOptions:koreanOptions];
- }
-#endif
- }
-}
-
-- (void)handleDetection:(FlutterMethodCall *)call result:(FlutterResult)result {
- MLKVisionImage *image = [MLKVisionImage visionImageFromData:call.arguments[@"imageData"]];
-
- NSString *uid = call.arguments[@"id"];
- MLKTextRecognizer *textRecognizer = [instances objectForKey:uid];
- if (textRecognizer == NULL) {
- textRecognizer = [self initialize:call];
- instances[uid] = textRecognizer;
- }
-
- [textRecognizer processImage:image
- completion:^(MLKText *_Nullable visionText,
- NSError *_Nullable error) {
- if (error) {
- result(getFlutterError(error));
- return;
- } else if (!visionText) {
- result(@{@"text" : @"", @"blocks" : @[]});
- return;
- }
-
- NSMutableDictionary *textResult = [NSMutableDictionary dictionary];
- textResult[@"text"] = visionText.text;
-
- NSMutableArray *textBlocks = [NSMutableArray array];
- for (MLKTextBlock *block in visionText.blocks) {
- NSMutableDictionary *blockData = [NSMutableDictionary dictionary];
-
- [self addData:blockData
- cornerPoints:block.cornerPoints
- frame:block.frame
- languages:block.recognizedLanguages
- text:block.text
- confidence:[NSNull null]
- angle:[NSNull null]];
-
- NSMutableArray *textLines = [NSMutableArray array];
- for (MLKTextLine *line in block.lines) {
- NSMutableDictionary *lineData = [NSMutableDictionary dictionary];
-
- [self addData:lineData
- cornerPoints:line.cornerPoints
- frame:line.frame
- languages:line.recognizedLanguages
- text:line.text
- confidence:[NSNull null] // TODO: replace with actual value once it is supported by Google's native API
- angle:[NSNull null]]; // TODO: replace with actual value once it is supported by Google's native API
- // API: https://developers.google.com/ml-kit/reference/ios/mlkittextrecognitioncommon/api/reference/Classes/MLKTextLine
-
- NSMutableArray *elementsData = [NSMutableArray array];
- for (MLKTextElement *element in line.elements) {
- NSMutableDictionary *elementData = [NSMutableDictionary dictionary];
-
- [self addData:elementData
- cornerPoints:element.cornerPoints
- frame:element.frame
- languages:element.recognizedLanguages
- text:element.text
- confidence:[NSNull null] // TODO: replace with actual value once it is supported by Google's native API
- angle:[NSNull null]]; // TODO: replace with actual value once it is supported by Google's native API
- // API: https://developers.google.com/ml-kit/reference/ios/mlkittextrecognitioncommon/api/reference/Classes/MLKTextElement
-
- // TODO: add when Google's native API supports it
- elementData[@"symbols"] = @[];
- [elementsData addObject:elementData];
- }
-
- lineData[@"elements"] = elementsData;
- [textLines addObject:lineData];
- }
-
- blockData[@"lines"] = textLines;
- [textBlocks addObject:blockData];
- }
-
- textResult[@"blocks"] = textBlocks;
- result(textResult);
- }];
-}
-
-- (void)addData:(NSMutableDictionary *)addTo
- cornerPoints:(NSArray *)cornerPoints
- frame:(CGRect)frame
- languages:(NSArray *)languages
- text:(NSString *)text
- confidence:(NSNumber *)confidence
- angle:(NSNumber *)angle {
- NSMutableArray *points = [NSMutableArray array];
- for (NSValue *point in cornerPoints) {
- [points addObject:@{ @"x" : @(point.CGPointValue.x),
- @"y" : @(point.CGPointValue.y)}];
- }
-
- NSMutableArray *allLanguageData = [NSMutableArray array];
- for (MLKTextRecognizedLanguage *language in languages) {
- if (language.languageCode != NULL)
- [allLanguageData addObject: language.languageCode];
- }
-
- [addTo addEntriesFromDictionary:@{
- @"points" : points,
- @"rect" : @{
- @"left" : @(frame.origin.x),
- @"top" : @(frame.origin.y),
- @"right" : @(frame.origin.x + frame.size.width),
- @"bottom" : @(frame.origin.y + frame.size.height)
- },
- @"recognizedLanguages" : allLanguageData,
- @"text" : text,
- @"confidence" : confidence,
- @"angle" : angle,
- }];
-}
-
-@end
diff --git a/packages/google_mlkit_text_recognition/ios/Classes/GoogleMlKitTextRecognitionPlugin.swift b/packages/google_mlkit_text_recognition/ios/Classes/GoogleMlKitTextRecognitionPlugin.swift
new file mode 100644
index 00000000..fb7e74f6
--- /dev/null
+++ b/packages/google_mlkit_text_recognition/ios/Classes/GoogleMlKitTextRecognitionPlugin.swift
@@ -0,0 +1,179 @@
+import Flutter
+import MLKitVision
+import MLKitTextRecognition
+import MLKitTextRecognitionCommon
+import google_mlkit_commons
+#if canImport(MLKitTextRecognitionChinese)
+import MLKitTextRecognitionChinese
+#endif
+#if canImport(MLKitTextRecognitionDevanagari)
+import MLKitTextRecognitionDevanagari
+#endif
+#if canImport(MLKitTextRecognitionJapanese)
+import MLKitTextRecognitionJapanese
+#endif
+#if canImport(MLKitTextRecognitionKorean)
+import MLKitTextRecognitionKorean
+#endif
+
+@objc
+public class GoogleMlKitTextRecognitionPlugin: NSObject, FlutterPlugin {
+ private var instances: [String: TextRecognizer] = [:]
+
+ public static func register(with registrar: FlutterPluginRegistrar) {
+ let channel = FlutterMethodChannel(
+ name: "google_mlkit_text_recognizer",
+ binaryMessenger: registrar.messenger()
+ )
+ let instance = GoogleMlKitTextRecognitionPlugin()
+ registrar.addMethodCallDelegate(instance, channel: channel)
+ }
+
+ public func handle(_ call: FlutterMethodCall, result: @escaping FlutterResult) {
+ switch call.method {
+ case "vision#startTextRecognizer":
+ handleDetection(call: call, result: result)
+ case "vision#closeTextRecognizer":
+ if let args = call.arguments as? [String: Any], let uid = args["id"] as? String {
+ instances.removeValue(forKey: uid)
+ }
+ result(nil)
+ default:
+ result(FlutterMethodNotImplemented)
+ }
+ }
+
+ private func initialize(call: FlutterMethodCall) -> TextRecognizer {
+ let scriptIndex = (call.arguments as? [String: Any])?["script"] as? Int ?? 0
+ switch scriptIndex {
+ case 0:
+ return TextRecognizer.textRecognizer(options: TextRecognizerOptions())
+ case 1:
+ #if canImport(MLKitTextRecognitionChinese)
+ return TextRecognizer.textRecognizer(options: ChineseTextRecognizerOptions())
+ #else
+ return TextRecognizer.textRecognizer(options: TextRecognizerOptions())
+ #endif
+ case 2:
+ #if canImport(MLKitTextRecognitionDevanagari)
+ return TextRecognizer.textRecognizer(options: DevanagariTextRecognizerOptions())
+ #else
+ return TextRecognizer.textRecognizer(options: TextRecognizerOptions())
+ #endif
+ case 3:
+ #if canImport(MLKitTextRecognitionJapanese)
+ return TextRecognizer.textRecognizer(options: JapaneseTextRecognizerOptions())
+ #else
+ return TextRecognizer.textRecognizer(options: TextRecognizerOptions())
+ #endif
+ case 4:
+ #if canImport(MLKitTextRecognitionKorean)
+ return TextRecognizer.textRecognizer(options: KoreanTextRecognizerOptions())
+ #else
+ return TextRecognizer.textRecognizer(options: TextRecognizerOptions())
+ #endif
+ default:
+ return TextRecognizer.textRecognizer(options: TextRecognizerOptions())
+ }
+ }
+
+ private func handleDetection(call: FlutterMethodCall, result: @escaping FlutterResult) {
+ guard let args = call.arguments as? [String: Any],
+ let imageData = args["imageData"] as? [String: Any],
+ let uid = args["id"] as? String else {
+ result(FlutterError(code: "invalid_args", message: "Missing arguments", details: nil))
+ return
+ }
+ guard let image = VisionImage.visionImage(from: imageData) else {
+ result(FlutterError(code: "invalid_image", message: "Invalid or missing image data", details: nil))
+ return
+ }
+
+ let recognizer: TextRecognizer
+ if let existing = instances[uid] {
+ recognizer = existing
+ } else {
+ recognizer = initialize(call: call)
+ instances[uid] = recognizer
+ }
+
+ recognizer.process(image) { visionText, error in
+ if let error = error as NSError? {
+ result(FlutterError(code: "Error \(error.code)", message: error.domain, details: error.localizedDescription))
+ return
+ }
+ guard let visionText = visionText else {
+ result(["text": "", "blocks": [] as [[String: Any]]])
+ return
+ }
+ var textBlocks: [[String: Any]] = []
+ for block in visionText.blocks {
+ var blockData = self.addData(
+ cornerPoints: block.cornerPoints,
+ frame: block.frame,
+ languages: block.recognizedLanguages,
+ text: block.text,
+ confidence: nil,
+ angle: nil
+ )
+ var textLines: [[String: Any]] = []
+ for line in block.lines {
+ var lineData = self.addData(
+ cornerPoints: line.cornerPoints,
+ frame: line.frame,
+ languages: line.recognizedLanguages,
+ text: line.text,
+ confidence: nil,
+ angle: nil
+ )
+ var elementsData: [[String: Any]] = []
+ for element in line.elements {
+ var elementData = self.addData(
+ cornerPoints: element.cornerPoints,
+ frame: element.frame,
+ languages: element.recognizedLanguages,
+ text: element.text,
+ confidence: nil,
+ angle: nil
+ )
+ elementData["symbols"] = [] as [[String: Any]]
+ elementsData.append(elementData)
+ }
+ lineData["elements"] = elementsData
+ textLines.append(lineData)
+ }
+ blockData["lines"] = textLines
+ textBlocks.append(blockData)
+ }
+ result(["text": visionText.text, "blocks": textBlocks])
+ }
+ }
+
+ private func addData(
+ cornerPoints: [NSValue],
+ frame: CGRect,
+ languages: [TextRecognizedLanguage],
+ text: String,
+ confidence: NSNumber?,
+ angle: NSNumber?
+ ) -> [String: Any] {
+ let points = cornerPoints.map { (point: NSValue) -> [String: Double] in
+ let cgPoint = point.cgPointValue
+ return ["x": Double(cgPoint.x), "y": Double(cgPoint.y)]
+ }
+ let allLanguageData = languages.compactMap { $0.languageCode }
+ return [
+ "points": points,
+ "rect": [
+ "left": Double(frame.origin.x),
+ "top": Double(frame.origin.y),
+ "right": Double(frame.origin.x + frame.size.width),
+ "bottom": Double(frame.origin.y + frame.size.height)
+ ] as [String: Double],
+ "recognizedLanguages": allLanguageData,
+ "text": text,
+ "confidence": confidence ?? NSNull(),
+ "angle": angle ?? NSNull()
+ ]
+ }
+}
diff --git a/packages/google_mlkit_text_recognition/ios/google_mlkit_text_recognition.podspec b/packages/google_mlkit_text_recognition/ios/google_mlkit_text_recognition.podspec
index 0bbfe386..c98523b0 100644
--- a/packages/google_mlkit_text_recognition/ios/google_mlkit_text_recognition.podspec
+++ b/packages/google_mlkit_text_recognition/ios/google_mlkit_text_recognition.podspec
@@ -10,10 +10,9 @@ Pod::Spec.new do |s|
s.description = pubspec['description']
s.homepage = pubspec['homepage']
s.license = { :file => '../LICENSE' }
- s.authors = 'Multiple Authors'
+ s.authors = 'flutter-ml.dev'
s.source = { :path => '.' }
- s.source_files = 'Classes/**/*'
- s.public_header_files = 'Classes/**/*.h'
+ s.source_files = 'Classes/**/*.swift'
s.dependency 'Flutter'
s.dependency 'GoogleMLKit/TextRecognition', '~> 9.0.0'
s.dependency 'google_mlkit_commons'
diff --git a/packages/google_mlkit_translation/ios/Assets/.gitkeep b/packages/google_mlkit_translation/ios/Assets/.gitkeep
deleted file mode 100644
index e69de29b..00000000
diff --git a/packages/google_mlkit_translation/ios/Classes/GoogleMlKitTranslationPlugin.h b/packages/google_mlkit_translation/ios/Classes/GoogleMlKitTranslationPlugin.h
deleted file mode 100644
index 47976ede..00000000
--- a/packages/google_mlkit_translation/ios/Classes/GoogleMlKitTranslationPlugin.h
+++ /dev/null
@@ -1,4 +0,0 @@
-#import
-
-@interface GoogleMlKitTranslationPlugin : NSObject
-@end
diff --git a/packages/google_mlkit_translation/ios/Classes/GoogleMlKitTranslationPlugin.m b/packages/google_mlkit_translation/ios/Classes/GoogleMlKitTranslationPlugin.m
deleted file mode 100644
index 3136b306..00000000
--- a/packages/google_mlkit_translation/ios/Classes/GoogleMlKitTranslationPlugin.m
+++ /dev/null
@@ -1,88 +0,0 @@
-#import "GoogleMlKitTranslationPlugin.h"
-#import
-#import
-
-#define channelName @"google_mlkit_on_device_translator"
-#define startLanguageTranslator @"nlp#startLanguageTranslator"
-#define closeLanguageTranslator @"nlp#closeLanguageTranslator"
-#define manageLanguageModelModels @"nlp#manageLanguageModelModels"
-
-@implementation GoogleMlKitTranslationPlugin {
- NSMutableDictionary *instances;
- GenericModelManager *genericModelManager;
-}
-
-+ (void)registerWithRegistrar:(NSObject*)registrar {
- FlutterMethodChannel* channel = [FlutterMethodChannel
- methodChannelWithName:channelName
- binaryMessenger:[registrar messenger]];
- GoogleMlKitTranslationPlugin* instance = [[GoogleMlKitTranslationPlugin alloc] init];
- [registrar addMethodCallDelegate:instance channel:channel];
-}
-
-- (id)init {
- self = [super init];
- if (self)
- instances = [NSMutableDictionary dictionary];
- return self;
-}
-
-- (void)handleMethodCall:(FlutterMethodCall *)call result:(FlutterResult)result {
- if ([call.method isEqualToString:startLanguageTranslator]) {
- [self handleTranslation:call result:result];
- } else if ([call.method isEqualToString:manageLanguageModelModels]) {
- [self manageModel:call result:result];
- } else if ([call.method isEqualToString:closeLanguageTranslator]) {
- NSString *uid = call.arguments[@"id"];
- [instances removeObjectForKey:uid];
- result(NULL);
- } else {
- result(FlutterMethodNotImplemented);
- }
-}
-
-- (MLKTranslator*)initialize:(FlutterMethodCall *)call {
- NSString *source = call.arguments[@"source"];
- NSString *target = call.arguments[@"target"];
- MLKTranslatorOptions *options = [[MLKTranslatorOptions alloc] initWithSourceLanguage:source
- targetLanguage:target];
- return [MLKTranslator translatorWithOptions:options];
-}
-
-- (void)handleTranslation:(FlutterMethodCall *)call result:(FlutterResult)result {
- NSString *text = call.arguments[@"text"];
-
- NSString *uid = call.arguments[@"id"];
- MLKTranslator *translator = [instances objectForKey:uid];
- if (translator == NULL) {
- translator = [self initialize:call];
- instances[uid] = translator;
- }
-
- [translator downloadModelIfNeededWithCompletion:^(NSError *_Nullable error) {
- if (error) {
- result(getFlutterError(error));
- return;
- }
- // Model downloaded successfully. Okay to start translating.
-
- [translator translateText:text
- completion:^(NSString *_Nullable translatedText,
- NSError *_Nullable error) {
- if (error) {
- result(getFlutterError(error));
- return;
- }
- result(translatedText);
- }];
- }];
-}
-
-- (void)manageModel:(FlutterMethodCall *)call result:(FlutterResult)result {
- NSString *modelTag = call.arguments[@"model"];
- MLKTranslateRemoteModel *model = [MLKTranslateRemoteModel translateRemoteModelWithLanguage:modelTag];
- genericModelManager = [[GenericModelManager alloc] init];
- [genericModelManager manageModel:model call:call result:result];
-}
-
-@end
diff --git a/packages/google_mlkit_translation/ios/Classes/GoogleMlKitTranslationPlugin.swift b/packages/google_mlkit_translation/ios/Classes/GoogleMlKitTranslationPlugin.swift
new file mode 100644
index 00000000..5a827f3c
--- /dev/null
+++ b/packages/google_mlkit_translation/ios/Classes/GoogleMlKitTranslationPlugin.swift
@@ -0,0 +1,100 @@
+import Flutter
+import MLKitTranslate
+import google_mlkit_commons
+
+@objc
+public class GoogleMlKitTranslationPlugin: NSObject, FlutterPlugin {
+ private var instances: [String: Translator] = [:]
+ private var genericModelManager: GenericModelManager?
+
+ public static func register(with registrar: FlutterPluginRegistrar) {
+ let channel = FlutterMethodChannel(
+ name: "google_mlkit_on_device_translator",
+ binaryMessenger: registrar.messenger()
+ )
+ let instance = GoogleMlKitTranslationPlugin()
+ registrar.addMethodCallDelegate(instance, channel: channel)
+ }
+
+ public func handle(_ call: FlutterMethodCall, result: @escaping FlutterResult) {
+ switch call.method {
+ case "nlp#startLanguageTranslator":
+ handleTranslation(call: call, result: result)
+ case "nlp#manageLanguageModelModels":
+ manageModel(call: call, result: result)
+ case "nlp#closeLanguageTranslator":
+ if let args = call.arguments as? [String: Any], let uid = args["id"] as? String {
+ instances.removeValue(forKey: uid)
+ }
+ result(nil)
+ default:
+ result(FlutterMethodNotImplemented)
+ }
+ }
+
+ private func initialize(call: FlutterMethodCall) -> Translator? {
+ guard let args = call.arguments as? [String: Any],
+ let sourceTag = args["source"] as? String,
+ let targetTag = args["target"] as? String else {
+ return nil
+ }
+ // TranslateLanguage(rawValue:) is non-failable; invalid tags may fail at runtime when the translator is used.
+ let sourceLang = TranslateLanguage(rawValue: sourceTag)
+ let targetLang = TranslateLanguage(rawValue: targetTag)
+ let options = TranslatorOptions(sourceLanguage: sourceLang, targetLanguage: targetLang)
+ return Translator.translator(options: options)
+ }
+
+ private func handleTranslation(call: FlutterMethodCall, result: @escaping FlutterResult) {
+ guard let args = call.arguments as? [String: Any],
+ let text = args["text"] as? String,
+ let uid = args["id"] as? String else {
+ result(FlutterError(code: "invalid_args", message: "Missing arguments", details: nil))
+ return
+ }
+
+ let translator: Translator
+ if let existing = instances[uid] {
+ translator = existing
+ } else {
+ guard let newTranslator = initialize(call: call) else {
+ result(FlutterError(
+ code: "invalid_args",
+ message: "Missing or invalid source/target language",
+ details: nil
+ ))
+ return
+ }
+ translator = newTranslator
+ instances[uid] = translator
+ }
+
+ translator.downloadModelIfNeeded { error in
+ if let error = error as NSError? {
+ result(FlutterError(code: "Error \(error.code)", message: error.domain, details: error.localizedDescription))
+ return
+ }
+ translator.translate(text) { translatedText, error in
+ if let error = error as NSError? {
+ result(FlutterError(code: "Error \(error.code)", message: error.domain, details: error.localizedDescription))
+ return
+ }
+ result(translatedText ?? "")
+ }
+ }
+ }
+
+ private func manageModel(call: FlutterMethodCall, result: @escaping FlutterResult) {
+ guard let args = call.arguments as? [String: Any],
+ let modelTag = args["model"] as? String else {
+ result(FlutterError(code: "invalid_args", message: "Missing model argument", details: nil))
+ return
+ }
+ // TranslateLanguage(rawValue:) is non-failable; invalid tags may fail when the model is used.
+ let lang = TranslateLanguage(rawValue: modelTag)
+ let model = TranslateRemoteModel.translateRemoteModel(language: lang)
+ let manager = GenericModelManager()
+ genericModelManager = manager
+ manager.manage(model: model, call: call, result: result)
+ }
+}
diff --git a/packages/google_mlkit_translation/ios/google_mlkit_translation.podspec b/packages/google_mlkit_translation/ios/google_mlkit_translation.podspec
index 9783c557..98ed5b49 100644
--- a/packages/google_mlkit_translation/ios/google_mlkit_translation.podspec
+++ b/packages/google_mlkit_translation/ios/google_mlkit_translation.podspec
@@ -10,10 +10,9 @@ Pod::Spec.new do |s|
s.description = pubspec['description']
s.homepage = pubspec['homepage']
s.license = { :file => '../LICENSE' }
- s.authors = 'Multiple Authors'
+ s.authors = 'flutter-ml.dev'
s.source = { :path => '.' }
- s.source_files = 'Classes/**/*'
- s.public_header_files = 'Classes/**/*.h'
+ s.source_files = 'Classes/**/*.swift'
s.dependency 'Flutter'
s.dependency 'GoogleMLKit/Translate', '~> 9.0.0'
s.dependency 'google_mlkit_commons'
diff --git a/scripts/analyze.sh b/scripts/analyze.sh
index b93bfb7d..8ad10a64 100755
--- a/scripts/analyze.sh
+++ b/scripts/analyze.sh
@@ -15,4 +15,21 @@ else
ktlint --format
fi
+# Lint Swift code (iOS plugins) with SwiftLint (https://github.com/realm/SwiftLint)
+# Apple Silicon Homebrew installs to /opt/homebrew/bin
+if [[ "$(uname -m)" == arm64 ]]; then
+ export PATH="/opt/homebrew/bin:$PATH"
+fi
+if ! command -v swiftlint &>/dev/null; then
+ echo "swiftlint is not installed. Install it with:"
+ echo " brew install swiftlint"
+ exit 1
+fi
+swiftlint lint --fix
+SWIFTLINT_OUTPUT=$(swiftlint lint 2>&1) || true
+echo "$SWIFTLINT_OUTPUT"
+if echo "$SWIFTLINT_OUTPUT" | grep -qE "Found [1-9][0-9]* violations?"; then
+ exit 1
+fi
+
printf '\033[34m%s\033[0m\n' "All checks passed: formatting and linting successful."