diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml
index 6432dba5c3c..34f86f2a9b0 100644
--- a/.github/workflows/docs.yml
+++ b/.github/workflows/docs.yml
@@ -25,4 +25,4 @@ jobs:
command: sudo npm install --global spellchecker-cli
- name: Spell check
run: |
- spellchecker --quiet --files="docs/**/*.md" --dictionaries="./.spellcheck.dict.txt" --reports="spelling.json" --plugins spell indefinite-article repeated-words syntax-mentions syntax-urls frontmatter
+ yarn lint:spellcheck
diff --git a/.github/workflows/scripts/start-firestore-emulator.sh b/.github/workflows/scripts/start-firestore-emulator.sh
index bb487fb0150..a6b8959e6ad 100644
--- a/.github/workflows/scripts/start-firestore-emulator.sh
+++ b/.github/workflows/scripts/start-firestore-emulator.sh
@@ -4,11 +4,15 @@ if ! [ -x "$(command -v firebase)" ]; then
exit 1
fi
-firebase emulators:start --only firestore &
+EMU_START_COMMAND="firebase emulators:start --only firestore"
-until curl --output /dev/null --silent --fail http://localhost:8080; do
- echo "Waiting for Firestore emulator to come online..."
- sleep 2
-done
-
-echo "Firestore emulator is online!"
\ No newline at end of file
+if [ "$1" == "--no-daemon" ]; then
+ $EMU_START_COMMAND
+else
+ $EMU_START_COMMAND &
+ until curl --output /dev/null --silent --fail http://localhost:8080; do
+ echo "Waiting for Firestore emulator to come online..."
+ sleep 2
+ done
+ echo "Firestore emulator is online!"
+fi
\ No newline at end of file
diff --git a/.github/workflows/tests_e2e.yml b/.github/workflows/tests_e2e.yml
index 122429cfb35..b0d2c2134f8 100644
--- a/.github/workflows/tests_e2e.yml
+++ b/.github/workflows/tests_e2e.yml
@@ -51,7 +51,7 @@ jobs:
command: npm i -g firebase-tools
- name: Start Firestore Emulator
- run: cd ./.github/workflows/scripts && sh ./start-firestore-emulator.sh
+ run: yarn tests:emulator:start-ci
- name: Get yarn cache directory path
id: yarn-cache-dir-path
@@ -107,10 +107,10 @@ jobs:
timeout_minutes: 10
retry_wait_seconds: 60
max_attempts: 3
- command: echo "y" | $ANDROID_HOME/tools/bin/sdkmanager --install "system-images;android-28;google_apis;x86_64"
+ command: echo "y" | $ANDROID_HOME/tools/bin/sdkmanager --install "system-images;android-30;google_apis;x86_64"
- name: Create Emulator
- run: echo "no" | $ANDROID_HOME/tools/bin/avdmanager create avd --force --name TestingAVD --device "Nexus 5X" -k 'system-images;android-28;google_apis;x86_64' -g google_apis
+ run: echo "no" | $ANDROID_HOME/tools/bin/avdmanager create avd --force --name TestingAVD --device "Nexus 5X" -k 'system-images;android-30;google_apis;x86_64' -g google_apis
# These Emulator start steps are the current best practice to do retries on multi-line commands with persistent (nohup) processes
- name: Start Android Emulator
@@ -225,7 +225,7 @@ jobs:
command: npm i -g firebase-tools
- name: Start Firestore Emulator
- run: cd ./.github/workflows/scripts && sh ./start-firestore-emulator.sh
+ run: yarn tests:emulator:start-ci
- name: Get Xcode version
id: xcode-version
diff --git a/.gitignore b/.gitignore
index 3de0732a0b4..7da02a3c46b 100644
--- a/.gitignore
+++ b/.gitignore
@@ -559,6 +559,7 @@ app.admob.js
app.smartreply.js
eslint-report.json
yarn.lock
+spelling.json
# Gatsby / Website
website/.cache
diff --git a/.spellcheck.dict.txt b/.spellcheck.dict.txt
index fbeff0e2359..3e07999a581 100644
--- a/.spellcheck.dict.txt
+++ b/.spellcheck.dict.txt
@@ -66,6 +66,8 @@ launchProperties
learnt
Lerna
MDX
+MLKit
+mlkit
mono-repo
Multidex
multidex
@@ -96,6 +98,7 @@ PRs
PubSub
qa
react-native-firebase
+react-native-mlkit
realtime
Realtime
remarketing
diff --git a/README.md b/README.md
index eec2c136494..01501b6089d 100644
--- a/README.md
+++ b/README.md
@@ -54,8 +54,7 @@ The main package that you interface with is `App` (`@react-native-firebase/app`)
| [Dynamic Links](/packages/dynamic-links) | [](https://www.npmjs.com/package/@react-native-firebase/dynamic-links) |
| [In-app Messaging](/packages/in-app-messaging) | [](https://www.npmjs.com/package/@react-native-firebase/in-app-messaging) |
| [Instance ID](/packages/iid) | [](https://www.npmjs.com/package/@react-native-firebase/iid) |
-| [ML Kit Natural Language](/packages/ml-natural-language) | [](https://www.npmjs.com/package/@react-native-firebase/ml-natural-language) |
-| [ML Kit Vision](/packages/ml-vision) | [](https://www.npmjs.com/package/@react-native-firebase/ml-vision) |
+| [ML](/packages/ml) | [](https://www.npmjs.com/package/@react-native-firebase/ml) |
| [Performance Monitoring](/packages/perf) | [](https://www.npmjs.com/package/@react-native-firebase/perf) |
| [Realtime Database](/packages/database) | [](https://www.npmjs.com/package/@react-native-firebase/database) |
| [Remote Config](/packages/remote-config) | [](https://www.npmjs.com/package/@react-native-firebase/remote-config) |
diff --git a/docs/app/usage.md b/docs/app/usage.md
index 4a915a4512a..78db960528f 100644
--- a/docs/app/usage.md
+++ b/docs/app/usage.md
@@ -27,8 +27,7 @@ Currently, the native Firebase SDKs only provide functionality for creating seco
- [Cloud Functions](/functions)
- [Cloud Storage](/storage).
- [Instance ID](/iid).
-- [ML Kit Natural Language](/ml-language).
-- [ML Kit Vision](/ml-vision).
+- [ML](/ml).
- [Remote Config](/remote-config).
## Initializing secondary apps
diff --git a/docs/firestore/usage/index.md b/docs/firestore/usage/index.md
index 57b115405b6..345fbddc986 100644
--- a/docs/firestore/usage/index.md
+++ b/docs/firestore/usage/index.md
@@ -330,9 +330,6 @@ Cloud Firestore does not support the following types of queries:
- Queries with range filters on different fields, as described in the previous section.
- Logical OR queries. In this case, you should create a separate query for each OR condition and merge the query results in your app.
-- Queries with a `!=` clause. In this case, you should split the query into a greater-than query and a less-than query.
- For example, although the query clause `where("age", "!=", "30")` is not supported, you can get the same result set by
- combining two queries, one with the clause `where("age", "<", "30")` and one with the clause `where("age", ">", 30)`.
## Writing Data
diff --git a/docs/in-app-messaging/usage/index.md b/docs/in-app-messaging/usage/index.md
index d7afd80c69e..659c7cb06d7 100644
--- a/docs/in-app-messaging/usage/index.md
+++ b/docs/in-app-messaging/usage/index.md
@@ -2,7 +2,7 @@
title: In App Messaging
description: Installation and getting started with In App Messaging.
icon: //static.invertase.io/assets/firebase/in-app-messaging.svg
-next: /ml-natural-language/usage
+next: /ml/usage
previous: /iid/usage
---
diff --git a/docs/index.md b/docs/index.md
index e9462d16389..cd8632037b4 100644
--- a/docs/index.md
+++ b/docs/index.md
@@ -186,15 +186,15 @@ project.ext {
// Overriding Build/Android SDK Versions
android : [
minSdk : 16,
- targetSdk : 29,
- compileSdk: 29,
- buildTools: "29.0.3"
+ targetSdk : 30,
+ compileSdk: 30,
+ buildTools: "30.0.2"
],
// Overriding Library SDK Versions
firebase: [
// Override Firebase SDK Version
- bom : "25.12.0"
+ bom : "26.0.0"
],
],
])
@@ -209,7 +209,7 @@ Open your projects `/ios/Podfile` and add any of the globals shown below to the
```ruby
# Override Firebase SDK Version
-$FirebaseSDKVersion = '6.34.0'
+$FirebaseSDKVersion = '7.0.0'
```
Once changed, reinstall your projects pods via pod install and rebuild your project with `npx react-native run-ios`.
diff --git a/docs/migrating-to-v6.md b/docs/migrating-to-v6.md
index d17d1567d73..05a41de4893 100644
--- a/docs/migrating-to-v6.md
+++ b/docs/migrating-to-v6.md
@@ -26,7 +26,7 @@ been approved before being released.
We have also ensured the release is compatible with some of the popular tooling in the React Native community, such as
[autolinking](https://github.com/react-native-community/cli/blob/master/docs/autolinking.md) & [TypeScript](https://facebook.github.io/react-native/blog/2018/05/07/using-typescript-with-react-native).
-Version 6 also brings support for previously unsupported modules such as [Firebase ML Kit](https://firebase.google.com/docs/ml-kit).
+Version 6 also brings support for previously unsupported modules such as [Firebase ML](https://firebase.google.com/docs/ml).
## NPM dependency changes
@@ -238,26 +238,25 @@ yarn add @react-native-firebase/auth
Install the modules required for your application:
-| Module | NPM Package |
-| ------------------------------------------------------------ | ------------------------------------------ |
-| AdMob | @react-native-firebase/admob |
-| Analytics | @react-native-firebase/analytics |
-| App | @react-native-firebase/app |
-| App Invites | @react-native-firebase/invites |
-| Authentication | @react-native-firebase/auth |
-| Cloud Firestore | @react-native-firebase/firestore |
-| Cloud Functions | @react-native-firebase/functions |
-| Cloud Messaging | @react-native-firebase/messaging |
-| Cloud Storage | @react-native-firebase/storage |
-| Crashlytics | @react-native-firebase/crashlytics |
-| Dynamic Links | @react-native-firebase/dynamic-links |
-| In-app Messaging | @react-native-firebase/in-app-messaging |
-| Instance ID | @react-native-firebase/iid |
-| ML Kit Natural Language | @react-native-firebase/ml-natural-language |
-| ML Kit Vision | @react-native-firebase/ml-vision |
-| Performance Monitoring | @react-native-firebase/perf |
-| Realtime Database | @react-native-firebase/database |
-| Remote Config | @react-native-firebase/remote-config |
+| Module | NPM Package |
+| ------------------------------------------------------------ | --------------------------------------- |
+| AdMob | @react-native-firebase/admob |
+| Analytics | @react-native-firebase/analytics |
+| App | @react-native-firebase/app |
+| App Invites | @react-native-firebase/invites |
+| Authentication | @react-native-firebase/auth |
+| Cloud Firestore | @react-native-firebase/firestore |
+| Cloud Functions | @react-native-firebase/functions |
+| Cloud Messaging | @react-native-firebase/messaging |
+| Cloud Storage | @react-native-firebase/storage |
+| Crashlytics | @react-native-firebase/crashlytics |
+| Dynamic Links | @react-native-firebase/dynamic-links |
+| In-app Messaging | @react-native-firebase/in-app-messaging |
+| Instance ID | @react-native-firebase/iid |
+| ML | @react-native-firebase/ml |
+| Performance Monitoring | @react-native-firebase/perf |
+| Realtime Database | @react-native-firebase/database |
+| Remote Config | @react-native-firebase/remote-config |
Users on React Native version 0.60+, the modules will be automatically linked. For users on a lower version,
see the module specific pages for manual installation guides.
@@ -394,9 +393,9 @@ No breaking changes.
### Notifications
-Device-local notification APIs are not actually Firebase APIs at the same time they are very difficult to maintain.
+Device-local notification APIs are not actually Firebase APIs at the same time they are very difficult to maintain.
-For these reasons the notifications package has been removed from react-native-firebase for versions 6 and higher.
+For these reasons the notifications package has been removed from react-native-firebase for versions 6 and higher.
How to migrate: If you use device-local notification APIs and user-visible notifications in your app you will want to integrate a separate library that gives you access to device-local notification APIs. Many people have reported success with each of https://notifee.app, https://wix.github.io/react-native-notifications and https://github.com/zo0r/react-native-push-notification
@@ -454,14 +453,8 @@ How to migrate: If you use device-local notification APIs and user-visible notif
- `firebase.utils.Native` is now deprecated and will be removed in a later release, please rename usages of this to `firebase.utils.FilePath`.
- `firebase.utils.Native.*` some properties have been renamed and deprecated and will be removed in a later release, follow the in-app console warnings on how to migrate.
-### ML Kit Natural Language
+### ML
-`@react-native-firebase/ml-natural-language`
-
-This is a new module. See documentation for usage.
-
-### ML Kit Vision
-
-`@react-native-firebase/ml-vision`
+`@react-native-firebase/ml`
This is a new module. See documentation for usage.
diff --git a/docs/ml-natural-language/index.md b/docs/ml-natural-language/index.md
deleted file mode 100644
index 144141eb6dc..00000000000
--- a/docs/ml-natural-language/index.md
+++ /dev/null
@@ -1,3 +0,0 @@
----
-redirect: /ml-natural-language/usage
----
diff --git a/docs/ml-natural-language/usage/index.md b/docs/ml-natural-language/usage/index.md
deleted file mode 100644
index 303b24cc1a4..00000000000
--- a/docs/ml-natural-language/usage/index.md
+++ /dev/null
@@ -1,154 +0,0 @@
----
-title: ML Natural Language
-description: Installation and getting started with ML Natural Language.
-icon: //static.invertase.io/assets/firebase/ml-kit.svg
-next: /ml-vision/usage
-previous: /in-app-messaging/usage
----
-
-# Installation
-
-This module requires that the `@react-native-firebase/app` module is already setup and installed. To install the "app" module, view the
-[Getting Started](/) documentation.
-
-```bash
-# Install & setup the app module
-yarn add @react-native-firebase/app
-
-# Install the ml-natural-language module
-yarn add @react-native-firebase/ml-natural-language
-
-# If you're developing your app using iOS, run this command
-cd ios/ && pod install
-```
-
-If you're using an older version of React Native without autolinking support, or wish to integrate into an existing project,
-you can follow the manual installation steps for [iOS](/ml-natural-language/usage/installation/ios) and [Android](/ml-natural-language/usage/installation/android).
-
-# What does it do
-
-The React Native Firebase ML Natural Language module supports [Smart Replies](https://firebase.google.com/docs/ml-kit/generate-smart-replies)
-& [Language Identification](https://firebase.google.com/docs/ml-kit/identify-languages) provided by Firebase ML kit.
-At this moment, the [Translation](https://firebase.google.com/docs/ml-kit/translation) module is not supported
-
-
-
-Smart reply can automatically generate relevant replies to messages. It helps your users respond to messages quickly,
-and makes it easier to reply to messages on devices with limited input capabilities.
-
-Language identification can be used to determine the language of a string of text. It can be useful when working with
-user-provided text, which often doesn't come with any language information.
-
-# Usage
-
-Each services requires enabling before it can be used within your app. The sections below show how to enable the models
-for each service and usage examples of each.
-
-## Smart Replies
-
-The [Smart Replies](https://firebase.google.com/docs/ml-kit/generate-smart-replies) service from Firebase allows you to
-generate suggested replies based on a list of on-going conversation data.
-
-Before using the API, the Smart Reply model must be installed on your device. To enable installation of the model, set
-the `ml_natural_language_smart_reply_model` to `true` in your `firebase.json` file:
-
-```json
-// /firebase.json
-{
- "react-native": {
- "ml_natural_language_smart_reply_model": true
- }
-}
-```
-
-Once added, rebuild your application:
-
-```bash
-// For Android
-npx react-native run-android
-
-// For iOS
-cd ios/ && pod install
-npx react-native run-ios
-```
-
-Once complete, the `suggestReplies` method allows you to generate potential replies by providing it with an array of text input(s)
-which may generate three responses per input as example below:
-
-```jsx
-const replies = await firebase
- .naturalLanguage()
- .suggestReplies([
- { text: 'Hey, long time no speak!' },
- { text: 'I know right, it has been a while..', userId: '123', isLocalUser: false },
- { text: 'We should catchup some time!' },
- { text: 'Definitely, how about we go for lunch this week?', userId: '123', isLocalUser: false },
- ]);
-
-replies.forEach(reply => {
- console.log(reply.text);
-});
-```
-
-Each array item an is an instance of a [`TextMessage`](/reference/ml-natural-language/textmessage). At a minimum you
-must provide the a `text` property. To help the Machine Learning service identify various users in the conversation, you
-can set the `isLocalUser` flag to `false` if the message is from an external user, along with a unique ID.
-
-Once returned, if the service is able to generate suggested replies you can iterate over the response to extract the `text`
-property from the returned [`SuggestedReply`](/reference/ml-natural-language/suggestedreply) instance.
-
-## Identify language
-
-The [Language Identification](https://firebase.google.com/docs/ml-kit/identify-languages) service from Firebase allows you to
-identify a language from any given string of text.
-
-Before using the API, the Language Identification model must be installed on your device. To enable installation of the model, set
-the `ml_natural_language_language_id_model` to `true` in your `firebase.json` file:
-
-```json
-// /firebase.json
-{
- "react-native": {
- "ml_natural_language_language_id_model": true
- }
-}
-```
-
-Once added, rebuild your application:
-
-```bash
-// For Android
-npx react-native run-android
-
-// For iOS
-cd ios/ && pod install
-npx react-native run-ios
-```
-
-The `identifyLanguage` method allows then allows you to identify a language, for example:
-
-```jsx
-const language = await firebase.naturalLanguage().identifyLanguage('Hello there. General Kenobi.');
-
-console.log('Identified language: ', language); // en
-```
-
-# firebase.json
-
-Add any of the keys indicated below to your `firebase.json` file at the root of your project directory, and set them to
-true to enable them. All models and APIs are disabled (false) by default.
-
-> If you are manually linking on iOS (e.g. not using CocoaPods) then it's up to you to manage these models and dependencies
-> yourself - firebase.json support is only for Android and iOS (via Pods).
-
-```json
-// /firebase.json
-{
- "react-native": {
- // Language Identification
- "ml_natural_language_language_id_model": true,
- // Smart Replies
- "ml_natural_language_smart_reply_model": true
- }
-}
-```
diff --git a/docs/ml-natural-language/usage/installation/android.md b/docs/ml-natural-language/usage/installation/android.md
deleted file mode 100644
index e8defa3b8e0..00000000000
--- a/docs/ml-natural-language/usage/installation/android.md
+++ /dev/null
@@ -1,59 +0,0 @@
----
-title: Android Installation
-description: Manually integrate ML Kit Natural Language into your Android application.
-next: /ml-natural-language/usage/installation/ios
-previous: /ml-natural-language/usage
----
-
-# Android Manual Installation
-
-The following steps are only required if your environment does not have access to React Native
-auto-linking.
-
-## 1. Update Gradle Settings
-
-Add the following to your projects `/android/settings.gradle` file:
-
-```groovy
-include ':@react-native-firebase_ml-natural-language'
-project(':@react-native-firebase_ml-natural-language').projectDir = new File(rootProject.projectDir, './../node_modules/@react-native-firebase/ml-natural-language/android')
-```
-
-## 2. Update Gradle Dependencies
-
-Add the React Native Functions module dependency to your `/android/app/build.gradle` file:
-
-```groovy
-// ..
-dependencies {
- // ..
- implementation project(path: ":@react-native-firebase_ml-natural-language")
-}
-```
-
-## 3. Add package to the Android Application
-
-Import and apply the React Native Firebase module package to your `/android/app/src/main/java/**/MainApplication.java` file:
-
-Import the package:
-
-```java
-import io.invertase.firebase.perf.ReactNativeFirebaseMLNaturalLanguagePackage;
-```
-
-Add the package to the registry:
-
-```java
-protected List getPackages() {
- return Arrays.asList(
- new MainReactPackage(),
- new ReactNativeFirebaseMLNaturalLanguagePackage(),
-```
-
-## 4. Rebuild the project
-
-Once the above steps have been completed, rebuild your Android project:
-
-```bash
-npx react-native run-android
-```
diff --git a/docs/ml-natural-language/usage/installation/ios.md b/docs/ml-natural-language/usage/installation/ios.md
deleted file mode 100644
index 5defdecaf57..00000000000
--- a/docs/ml-natural-language/usage/installation/ios.md
+++ /dev/null
@@ -1,37 +0,0 @@
----
-title: iOS Installation
-description: Manually integrate ML Kit Natural Language APIs into your iOS application.
-next: /ml-natural-language/usage/installation/android
-previous: /ml-natural-language/usage
----
-
-# iOS Manual Installation
-
-The following steps are only required if your environment does not have access to React Native
-auto-linking.
-
-## 1. Add the Pod
-
-Add the `RNFBMLNaturalLanguage` Pod to your projects `/ios/Podfile`:
-
-```ruby
-target 'app' do
- # ...
- pod 'RNFBMLNaturalLanguage', :path => '../node_modules/@react-native-firebase/ml-natural-language'
-end
-```
-
-## 2. Update Pods & rebuild the project
-
-You may need to update your local Pods in order for the `RNFBMLNaturalLanguage` Pod to be installed in your project:
-
-```bash
-$ cd ios/
-$ pod install --repo-update
-```
-
-Once the Pods have installed locally, rebuild your iOS project:
-
-```bash
-npx react-native run-ios
-```
diff --git a/docs/ml-vision/barcode-scanning.md b/docs/ml-vision/barcode-scanning.md
deleted file mode 100644
index 30864b44984..00000000000
--- a/docs/ml-vision/barcode-scanning.md
+++ /dev/null
@@ -1,104 +0,0 @@
----
-title: Barcode Scanning
-description: Get started with ML Kit Vision Barcode Scanning.
-next: /ml-vision/image-labeling
-previous: /ml-vision/landmark-recognition
----
-
-Barcode scanning can read data encoded using most standard barcode formats. Barcode scanning happens on the device,
-and doesn't require a network connection. It's a convenient way to pass information from the real world to your app.
-
-The Machine Learning service is only offered on the device, and no cloud service exists.
-
-Given an image file, the Barcode Scanning service will attempt to recognize one or more barcodes, offering information
-such as:
-
-- The 4-point coordinates of the barcodes on the image.
-- The type of barcode (e.g. a phone number, contact information, calendar invite etc).
-
-To view the full list of information available, view the [`VisionBarcode`](/reference/ml-vision/visionbarcode) documentation.
-
-# On-device Barcode Scanning
-
-## Enable the model
-
-To enable the mode, set the `ml_vision_barcode_model` key to `true` in your `firebase.json` file:
-
-```json
-// /firebase.json
-{
- "react-native": {
- "ml_vision_barcode_model": true
- }
-}
-```
-
-Once complete, rebuild your application:
-
-```bash
-# For Android
-npx react-native run-android
-
-# For iOS
-cd ios/ && pod install --repo-update
-npx react-native run-ios
-```
-
-## Process
-
-Once the model has been downloaded, call the `barcodeDetectorProcessImage` method with a path to a local file on your device:
-
-```js
-import { utils } from '@react-native-firebase/app';
-import vision, { VisionBarcodeValueType } from '@react-native-firebase/ml-vision';
-
-async function processBarcodes(localPath) {
- const barcodes = await vision().barcodeDetectorProcessImage(localPath);
-
- barcodes.forEach(barcode => {
- if (barcode.valueType === VisionBarcodeValueType.CALENDAR_EVENT) {
- console.log('Barcode is a calendar event: ', barcode.calendarEvent);
- }
-
- if (barcode.valueType === VisionBarcodeValueType.CONTACT_INFO) {
- console.log('Barcode contains contact info: ', barcode.contactInfo);
- }
-
- if (barcode.valueType === VisionBarcodeValueType.DRIVER_LICENSE) {
- console.log('Barcode contains drivers license info: ', barcode.driverLicense);
- }
-
- if (barcode.valueType === VisionBarcodeValueType.EMAIL) {
- console.log('Barcode contains email address info: ', barcode.email);
- }
-
- if (barcode.valueType === VisionBarcodeValueType.GEO) {
- console.log('Barcode contains location info: ', barcode.geoPoint);
- }
-
- if (barcode.valueType === VisionBarcodeValueType.PHONE) {
- console.log('Barcode contains phone number info: ', barcode.phone);
- }
-
- if (barcode.valueType === VisionBarcodeValueType.SMS) {
- console.log('Barcode contains SMS info: ', barcode.sms);
- }
-
- if (barcode.valueType === VisionBarcodeValueType.URL) {
- console.log('Barcode contains URL info: ', barcode.url);
- }
-
- if (barcode.valueType === VisionBarcodeValueType.WIFI) {
- console.log('Barcode contains WIFI info: ', barcode.wifi);
- }
- });
-}
-
-// Local path to file on the device
-const localFile = `${utils.FilePath.PICTURES_DIRECTORY}/barcode-document.jpg`;
-
-processBarcodes(localFile).then(() => console.log('Finished processing file.'));
-```
-
-To learn about the types of information the barcode scanner can return, view the
-[`VisionBarcode`](/reference/ml-vision/visionbarcode) documentation.
diff --git a/docs/ml-vision/face-detection.md b/docs/ml-vision/face-detection.md
deleted file mode 100644
index f6a3a8e0576..00000000000
--- a/docs/ml-vision/face-detection.md
+++ /dev/null
@@ -1,83 +0,0 @@
----
-title: Face Detection
-description: Get started with ML Kit Vision Face Detection.
-next: /remote-config/usage
-previous: /ml-vision/face-detection
----
-
-Face detection can detect faces in an image, identify key facial features, and get the contours of detected faces.
-This provides information needed to perform tasks like embellishing selfies and portraits, or generating avatars
-from a user's photo.
-
-The Machine Learning service is only offered on the device, and no cloud service exists.
-
-Given an image file, the Face Detection service will attempt to recognize one or more faces, offering information
-such as:
-
-- Face contour coordinates.
-- The rotation of the head/face along the Y & Z axis.
-- The probability that the face has it's left/right eyes open.
-- The probability that the face is smiling.
-- A list of face features (e.g. eyes, nose, mouth etc) and their positions on the face.
-
-# On-device Face Detection
-
-## Enable the model
-
-To enable the mode, set the `ml_vision_face_model` key to `true` in your `firebase.json` file:
-
-```json
-// /firebase.json
-{
- "react-native": {
- "ml_vision_face_model": true
- }
-}
-```
-
-Once complete, rebuild your application:
-
-```bash
-# For Android
-npx react-native run-android
-
-# For iOS
-cd ios/ && pod install --repo-update
-npx react-native run-ios
-```
-
-## Process
-
-Once the model has been downloaded, call the `faceDetectorProcessImage` method with a path to a local file on your device:
-
-```js
-import { utils } from '@react-native-firebase/app';
-import vision, { VisionFaceContourType } from '@react-native-firebase/ml-vision';
-
-async function processFaces(localPath) {
- const faces = await vision().faceDetectorProcessImage(localPath);
-
- faces.forEach(face => {
- console.log('Head rotation on Y axis: ', face.headEulerAngleY);
- console.log('Head rotation on Z axis: ', face.headEulerAngleZ);
-
- console.log('Left eye open probability: ', face.leftEyeOpenProbability);
- console.log('Right eye open probability: ', face.rightEyeOpenProbability);
- console.log('Smiling probability: ', face.smilingProbability);
-
- face.faceContours.forEach(contour => {
- if (contour.type === VisionFaceContourType.FACE) {
- console.log('Face outline points: ', contour.points);
- }
- });
- });
-}
-
-// Local path to file on the device
-const localFile = `${utils.FilePath.PICTURES_DIRECTORY}/barcode-document.jpg`;
-
-processBarcodes(localFile).then(() => console.log('Finished processing file.'));
-```
-
-To learn about the types of information the face detector can return, view the
-[`VisionFace`](/reference/ml-vision/visionface) documentation.
diff --git a/docs/ml-vision/image-labeling.md b/docs/ml-vision/image-labeling.md
deleted file mode 100644
index c4f194beadc..00000000000
--- a/docs/ml-vision/image-labeling.md
+++ /dev/null
@@ -1,106 +0,0 @@
----
-title: Image Labeling
-description: Get started with ML Kit Vision Image Labeling.
-next: /ml-vision/face-detection
-previous: /ml-vision/barcode-scanning
----
-
-Image labeling can recognize entities in an image without having to provide any additional contextual metadata, using
-either an on-device API or a cloud-based API. It gets a list of the entities that were recognized: people, things, places,
-activities, and so on.
-
-# Cloud Image Labeling
-
-The cloud based image labeling service uploads a given image to the Firebase services, processes the results and returns them.
-To get started, call the `cloudImageLabelerProcessImage` method with a path to a local file on your device:
-
-```js
-import { utils } from '@react-native-firebase/app';
-import vision from '@react-native-firebase/ml-vision';
-
-async function processImage(localPath) {
- const labels = await vision().cloudImageLabelerProcessImage(localPath);
-
- labels.forEach(label => {
- console.log('Service labelled the image: ', label.text);
- console.log('Confidence in the label: ', label.confidence);
- });
-}
-
-// Local path to file on the device
-const localFile = `${utils.FilePath.PICTURES_DIRECTORY}/image-document.jpg`;
-
-processImage(localFile).then(() => console.log('Finished processing file.'));
-```
-
-To learn more about the available data on a processed document, view the [`VisionImageLabel`](/reference/ml-vision/visionimagelabel)
-documentation.
-
-## Configuration
-
-By default, the service will return labels with any confidence level, which may include labels you do not care about or
-are too obvious. Set the `confidenceThreshold` key to a value between 0 & 1, where 1 represents 100% confidence. The
-cloud service will only return labels with a confidence greater than what you specified:
-
-```js
-const processed = await vision().cloudDocumentTextRecognizerProcessImage(localPath, {
- // 80% or higher confidence labels only
- confidenceThreshold: 0.8,
-});
-```
-
-View the [`VisionCloudImageLabelerOptions`](/reference/ml-vision/visioncloudimagelabeleroptions) documentation for more information.
-
-# On-device Image Labeling
-
-Running the ML Kit service on a device requires the `ml_vision_image_label_model` and `ml_vision_label_model` to be download to the device. Although the results
-of on-device processing will be faster and more accurate, including the model in your application will increase the size
-of the application.
-
-## Enable the model
-
-To enable the mode, set the `ml_vision_image_label_model` & `ml_vision_label_model` key to `true` in your `firebase.json` file:
-
-```json
-// /firebase.json
-{
- "react-native": {
- "ml_vision_image_label_model": true,
- "ml_vision_label_model": true
- }
-}
-```
-
-Once complete, rebuild your application:
-
-```bash
-# For Android
-npx react-native run-android
-
-# For iOS
-cd ios/ && pod install --repo-update
-npx react-native run-ios
-```
-
-## Process
-
-Once the models have been downloaded, call the `imageLabelerProcessImage` method with a path to a local file on your device:
-
-```js
-import { utils } from '@react-native-firebase/app';
-import vision from '@react-native-firebase/ml-vision';
-
-async function processImage(localPath) {
- const labels = await vision().imageLabelerProcessImage(localPath);
-
- labels.forEach(label => {
- console.log('Service labelled the image: ', label.text);
- console.log('Confidence in the label: ', label.confidence);
- });
-}
-
-// Local path to file on the device
-const localFile = `${utils.FilePath.PICTURES_DIRECTORY}/image-document.jpg`;
-
-processImage(localFile).then(() => console.log('Finished processing file.'));
-```
diff --git a/docs/ml-vision/index.md b/docs/ml-vision/index.md
deleted file mode 100644
index 1260a10acbf..00000000000
--- a/docs/ml-vision/index.md
+++ /dev/null
@@ -1,3 +0,0 @@
----
-redirect: /ml-vision/usage
----
diff --git a/docs/ml-vision/text-recognition.md b/docs/ml-vision/text-recognition.md
deleted file mode 100644
index 33244983f45..00000000000
--- a/docs/ml-vision/text-recognition.md
+++ /dev/null
@@ -1,121 +0,0 @@
----
-title: Text Recognition
-description: Get started with ML Kit Vision Text Recognition.
-next: /ml-vision/landmark-recognition
-previous: /ml-vision/usage
----
-
-Text recognition can automate tedious data entry for credit cards, receipts, and business cards. With the Cloud-based API,
-you can also extract text from pictures of documents, which you can use to increase accessibility or translate documents.
-
-Once an image file has been processed, the API returns a [`VisionDocumentText`](/reference/ml-vision/visiondocumenttext), referencing
-all found text along with each [`VisionDocumentTextBlock`](/reference/ml-vision/visiondocumenttextblock). Each block contains
-meta-data such as:
-
-- The 4-point coordinates of the box on the document.
-- Paragraphs within the block.
-- Recognized languages within the block/document.
-- The confidence the Machine Learning service has in it's own results.
-
-# Cloud Text Recognition
-
-The cloud based text recognition service uploads a given image of a document to the remote Firebase service which processes the results and returns them. Only image file types are allowed.
-To get started, call the `cloudDocumentTextRecognizerProcessImage` method with a path to a local file on your device:
-
-```js
-import { utils } from '@react-native-firebase/app';
-import vision from '@react-native-firebase/ml-vision';
-
-async function processDocument(localPath) {
- const processed = await vision().cloudDocumentTextRecognizerProcessImage(localPath);
-
- console.log('Found text in document: ', processed.text);
-
- processed.blocks.forEach(block => {
- console.log('Found block with text: ', block.text);
- console.log('Confidence in block: ', block.confidence);
- console.log('Languages found in block: ', block.recognizedLanguages);
- });
-}
-
-// Local path to file on the device
-const localFile = `${utils.FilePath.PICTURES_DIRECTORY}/text-document.jpg`;
-
-processDocument(localFile).then(() => console.log('Finished processing file.'));
-```
-
-To learn more about the available data on a processed document, view the [`VisionDocumentText`](/reference/ml-vision/visiondocumenttext)
-documentation.
-
-## Configuration
-
-To help improve the results when using the cloud service, you can optionally provide arguments to the `cloudDocumentTextRecognizerProcessImage`
-method:
-
-```js
-const processed = await vision().cloudDocumentTextRecognizerProcessImage(documentPath, {
- // The document contains Kurdish
- languageHints: ['KU'],
-});
-```
-
-In most scenarios, not providing any hints will yield better results. Use this configuration if the cloud service is struggling
-to detect a language.
-
-View the [`VisionCloudDocumentTextRecognizerOptions`](/reference/ml-vision/visionclouddocumenttextrecognizeroptions) documentation for more information.
-
-# On-device Text Recognition
-
-Running the ML Kit service on a device requires the `ml_vision_ocr_model` to be download to the device. Although the results
-of on-device processing will be faster and more accurate, including the model in your application will increase the size
-of the application.
-
-## Enable the model
-
-To enable the mode, set the `ml_vision_ocr_model` key to `true` in your `firebase.json` file:
-
-```json
-// /firebase.json
-{
- "react-native": {
- "ml_vision_ocr_model": true
- }
-}
-```
-
-Once complete, rebuild your application:
-
-```bash
-# For Android
-npx react-native run-android
-
-# For iOS
-cd ios/ && pod install --repo-update
-npx react-native run-ios
-```
-
-## Process
-
-Once the model has been downloaded, call the `textRecognizerProcessImage` method with a path to a local file on your device:
-
-```js
-import { utils } from '@react-native-firebase/app';
-import vision from '@react-native-firebase/ml-vision';
-
-async function processDocument(localPath) {
- const processed = await vision().textRecognizerProcessImage(localPath);
-
- console.log('Found text in document: ', processed.text);
-
- processed.blocks.forEach(block => {
- console.log('Found block with text: ', block.text);
- console.log('Confidence in block: ', block.confidence);
- console.log('Languages found in block: ', block.recognizedLanguages);
- });
-}
-
-// Local path to file on the device
-const localFile = `${utils.FilePath.PICTURES_DIRECTORY}/text-document.jpg`;
-
-processDocument(localFile).then(() => console.log('Finished processing file.'));
-```
diff --git a/docs/ml-vision/usage/index.md b/docs/ml-vision/usage/index.md
deleted file mode 100644
index fc9034fc59a..00000000000
--- a/docs/ml-vision/usage/index.md
+++ /dev/null
@@ -1,100 +0,0 @@
----
-title: ML Kit Vision
-description: Installation and getting started with ML Kit Vision.
-icon: //static.invertase.io/assets/firebase/ml-kit.svg
-next: /ml-vision/text-recognition
-previous: /ml-natural-language/usage
----
-
-# Installation
-
-This module requires that the `@react-native-firebase/app` module is already setup and installed. To install the "app" module, view the
-[Getting Started](/) documentation.
-
-```bash
-# Install & setup the app module
-yarn add @react-native-firebase/app
-
-# Install the ml-vision module
-yarn add @react-native-firebase/ml-vision
-
-# If you're developing your app using iOS, run this command
-cd ios/ && pod install
-```
-
-If you're using an older version of React Native without autolinking support, or wish to integrate into an existing project,
-you can follow the manual installation steps for [iOS](/ml-vision/usage/installation/ios) and [Android](/ml-vision/usage/installation/android).
-
-# What does it do
-
-ML Kit Vision makes use of Firebase's Machine Learning Kit's [Text Recognition](https://firebase.google.com/docs/ml-kit/recognize-text),
-[Face Detection](https://firebase.google.com/docs/ml-kit/detect-faces), [Barcode Scanning](https://firebase.google.com/docs/ml-kit/read-barcodes),
-[Image Labeling](https://firebase.google.com/docs/ml-kit/label-images) & [Landmark Recognition](https://firebase.google.com/docs/ml-kit/recognize-landmarks) features.
-
-Depending on the service, it is possible to perform Machine Learning on both the local device or cloud.
-
-
-
-## Support table
-
-The table below outlines the current module support for each available service, and whether they are available on local device,
-cloud or both.
-
-| API | Cloud Model | On Device |
-| ------------------------------------------------------------------------------------- | ----------- | --------- |
-| [Text Recognition](https://firebase.google.com/docs/ml-kit/recognize-text) | ✅ | ✅ |
-| [Document Text Recognition](https://firebase.google.com/docs/ml-kit/recognize-text)) | ✅ | |
-| [Face Detection](https://firebase.google.com/docs/ml-kit/detect-faces) | | ✅ |
-| [Barcode Scanning](https://firebase.google.com/docs/ml-kit/read-barcodes) | | ✅ |
-| [Image Labeling](https://firebase.google.com/docs/ml-kit/label-images) | ✅ | ✅ |
-| [Landmark Recognition](https://firebase.google.com/docs/ml-kit/recognize-landmarks) | | ✅ |
-| [AutoML Vision Edge](https://firebase.google.com/docs/ml-kit/automl-image-labeling) | ❌ | ❌ |
-| [Object Detection/Tracking](https://firebase.google.com/docs/ml-kit/object-detection) | ❌ | ❌ |
-
-# Usage
-
-To get started, you can find the documentation for the individual ML Kit Vision services below:
-
-- [Text Recognition](/ml-vision/text-recognition).
-- [Landmark Recognition](/ml-vision/landmark-recognition).
-- [Barcode Scanning](/ml-vision/barcode-scanning).
-- [Image ](/ml-vision/image-labeling).
-- [Face Detection](/ml-vision/face-detection).
-
-# firebase.json
-
-## Enabling models
-
-To be able to use the on-device Machine Learning models you'll need to enable them. This is possible by setting the below noted properties
-on the `firebase.json` file at the root of your project directory.
-
-```json
-// /firebase.json
-{
- "react-native": {
- // on device face detection
- "ml_vision_face_model": true,
- // on device text recognition
- "ml_vision_ocr_model": true,
- // on device barcode detection
- "ml_vision_barcode_model": true,
-
- // on device image labeling
- "ml_vision_label_model": true,
- "ml_vision_image_label_model": true
- }
-}
-```
-
-The models are disabled by default to help control app size.
-
-Since only models enabled here will be compiled into the application, any changes to this file require a rebuild.
-
-```bash
-# For Android
-npx react-native run-android
-
-# For iOS
-cd ios/ && pod install --repo-update
-npx react-native run-ios
-```
diff --git a/docs/ml/image-labeling.md b/docs/ml/image-labeling.md
new file mode 100644
index 00000000000..4b91322b018
--- /dev/null
+++ b/docs/ml/image-labeling.md
@@ -0,0 +1,52 @@
+---
+title: Image Labeling
+description: Get started with ML Image Labeling.
+next: /remote-config/usage
+previous: /ml/landmark-recognition
+---
+
+Image labeling can recognize entities in an image without having to provide any additional contextual metadata, using
+either a cloud-based API. It gets a list of the entities that were recognized: people, things, places,
+activities, and so on.
+
+# Cloud Image Labeling
+
+The cloud based image labeling service uploads a given image to the Firebase services, processes the results and returns them.
+To get started, call the `cloudImageLabelerProcessImage` method with a path to a local file on your device:
+
+```js
+import { utils } from '@react-native-firebase/app';
+import ml from '@react-native-firebase/ml';
+
+async function processImage(localPath) {
+ const labels = await ml().cloudImageLabelerProcessImage(localPath);
+
+ labels.forEach(label => {
+ console.log('Service labelled the image: ', label.text);
+ console.log('Confidence in the label: ', label.confidence);
+ });
+}
+
+// Local path to file on the device
+const localFile = `${utils.FilePath.PICTURES_DIRECTORY}/image-document.jpg`;
+
+processImage(localFile).then(() => console.log('Finished processing file.'));
+```
+
+To learn more about the available data on a processed document, view the [`MLImageLabel`](/reference/ml/mlimagelabel)
+documentation.
+
+## Configuration
+
+By default, the service will return labels with any confidence level, which may include labels you do not care about or
+are too obvious. Set the `confidenceThreshold` key to a value between 0 & 1, where 1 represents 100% confidence. The
+cloud service will only return labels with a confidence greater than what you specified:
+
+```js
+const processed = await ml().cloudDocumentTextRecognizerProcessImage(localPath, {
+ // 80% or higher confidence labels only
+ confidenceThreshold: 0.8,
+});
+```
+
+View the [`MLCloudImageLabelerOptions`](/reference/ml/mlcloudimagelabeleroptions) documentation for more information.
diff --git a/docs/ml/index.md b/docs/ml/index.md
new file mode 100644
index 00000000000..b9fa9915d19
--- /dev/null
+++ b/docs/ml/index.md
@@ -0,0 +1,3 @@
+---
+redirect: /ml/usage
+---
diff --git a/docs/ml-vision/landmark-recognition.md b/docs/ml/landmark-recognition.md
similarity index 68%
rename from docs/ml-vision/landmark-recognition.md
rename to docs/ml/landmark-recognition.md
index cb8d1131b16..569f2c8d909 100644
--- a/docs/ml-vision/landmark-recognition.md
+++ b/docs/ml/landmark-recognition.md
@@ -1,15 +1,13 @@
---
title: Landmark Recognition
-description: Get started with ML Kit Vision Landmark Recognition.
-next: /ml-vision/barcode-scanning
-previous: /ml-vision/text-recognition
+description: Get started with ML Landmark Recognition.
+next: /ml/image-labeling
+previous: /ml/text-recognition
---
Landmark recognition can recognize well-known landmarks in an image. It returns the landmarks that were recognized, along
with each landmark's geographic coordinates and the region of the image the landmark was found.
-The Machine Learning service is only offered as a cloud based one, and no on-device service exists.
-
Given an image file, the Landmark Recognition service will attempt to recognize one or more landmarks, offering information
such as:
@@ -25,13 +23,13 @@ To get started, call the `cloudLandmarkRecognizerProcessImage` method with a pat
```js
import { utils } from '@react-native-firebase/app';
-import vision from '@react-native-firebase/ml-vision';
+import ml from '@react-native-firebase/ml';
async function processLandmarks(localPath) {
- const landmarks = await vision().cloudLandmarkRecognizerProcessImage(localPath);
+ const landmarks = await ml().cloudLandmarkRecognizerProcessImage(localPath);
- landmarks.forEach(visionLandmark => {
- console.log('Landmark name: ', visionLandmark.landmark);
+ landmarks.forEach(landmark => {
+ console.log('Landmark name: ', landmark.landmark);
console.log('Landmark locations: ', block.locations);
console.log('Confidence score: ', block.confidence);
});
@@ -49,17 +47,17 @@ To help speed up requests and improve results, the `cloudLandmarkRecognizerProce
configuration object.
```js
-import vision, { VisionCloudLandmarkRecognizerModelType } from '@react-native-firebase/ml-vision';
+import ml, { MLCloudLandmarkRecognizerModelType } from '@react-native-firebase/ml';
-const landmarks = await vision().cloudLandmarkRecognizerProcessImage(localPath, {
+const landmarks = await ml().cloudLandmarkRecognizerProcessImage(localPath, {
// Limit the results
maxResults: 2,
// Set the model type
- modelType: VisionCloudLandmarkRecognizerModelType.LATEST_MODEL,
+ modelType: MLCloudLandmarkRecognizerModelType.LATEST_MODEL,
});
```
By default, the service will use a stable model to detect landmarks. However, if you feel results are not up-to-date, you
can optionally use the latest model available. Results however may change unexpectedly.
-View the [`VisionCloudLandmarkRecognizerOptions`](/reference/ml-vision/visioncloudlandmarkrecognizeroptions) documentation for more information.
+View the [`MLCloudLandmarkRecognizerOptions`](/reference/ml/mlcloudlandmarkrecognizeroptions) documentation for more information.
diff --git a/docs/ml/text-recognition.md b/docs/ml/text-recognition.md
new file mode 100644
index 00000000000..638f1832a09
--- /dev/null
+++ b/docs/ml/text-recognition.md
@@ -0,0 +1,66 @@
+---
+title: Text Recognition
+description: Get started with ML Kit Text Recognition.
+next: /ml/landmark-recognition
+previous: /ml/usage
+---
+
+Text recognition can automate tedious data entry for credit cards, receipts, and business cards. With the Cloud-based API,
+you can also extract text from pictures of documents, which you can use to increase accessibility or translate documents.
+
+Once an image file has been processed, the API returns a [`MLDocumentText`](/reference/ml/mldocumenttext), referencing
+all found text along with each [`MLDocumentTextBlock`](/reference/ml/mldocumenttextblock). Each block contains
+meta-data such as:
+
+- The 4-point coordinates of the box on the document.
+- Paragraphs within the block.
+- Recognized languages within the block/document.
+- The confidence the Machine Learning service has in it's own results.
+
+# Cloud Text Recognition
+
+The cloud based text recognition service uploads a given image of a document to the remote Firebase service which processes the results and returns them. Only image file types are allowed.
+To get started, call the `cloudDocumentTextRecognizerProcessImage` method with a path to a local file on your device:
+
+```js
+import { utils } from '@react-native-firebase/app';
+import ml from '@react-native-firebase/ml';
+
+async function processDocument(localPath) {
+ const processed = await ml().cloudDocumentTextRecognizerProcessImage(localPath);
+
+ console.log('Found text in document: ', processed.text);
+
+ processed.blocks.forEach(block => {
+ console.log('Found block with text: ', block.text);
+ console.log('Confidence in block: ', block.confidence);
+ console.log('Languages found in block: ', block.recognizedLanguages);
+ });
+}
+
+// Local path to file on the device
+const localFile = `${utils.FilePath.PICTURES_DIRECTORY}/text-document.jpg`;
+
+processDocument(localFile).then(() => console.log('Finished processing file.'));
+```
+
+To learn more about the available data on a processed document, view the [`MLDocumentText`](/reference/ml/mldocumenttext)
+documentation.
+
+## Configuration
+
+To help improve the results when using the cloud service, you can optionally provide arguments to the `cloudDocumentTextRecognizerProcessImage`
+method:
+
+```js
+const processed = await ml().cloudDocumentTextRecognizerProcessImage(documentPath, {
+ // The document contains Kurdish
+ languageHints: ['KU'],
+});
+```
+
+In most scenarios, not providing any hints will yield better results. Use this configuration if the cloud service is struggling
+to detect a language.
+
+View the [`MLCloudDocumentTextRecognizerOptions`](/reference/ml/mlclouddocumenttextrecognizeroptions) documentation for more information.
+
diff --git a/docs/ml/usage/index.md b/docs/ml/usage/index.md
new file mode 100644
index 00000000000..34c250d60e4
--- /dev/null
+++ b/docs/ml/usage/index.md
@@ -0,0 +1,56 @@
+---
+title: ML
+description: Installation and getting started with ML.
+icon: //static.invertase.io/assets/firebase/ml-kit.svg
+next: /ml/text-recognition
+previous: /in-app-messaging/usage
+---
+
+# Installation
+
+This module requires that the `@react-native-firebase/app` module is already setup and installed. To install the "app" module, view the
+[Getting Started](/) documentation.
+
+```bash
+# Install & setup the app module
+yarn add @react-native-firebase/app
+
+# Install the ml module
+yarn add @react-native-firebase/ml
+
+# If you're developing your app using iOS, run this command
+cd ios/ && pod install
+```
+
+If you're using an older version of React Native without autolinking support, or wish to integrate into an existing project,
+you can follow the manual installation steps for [iOS](/ml/usage/installation/ios) and [Android](/ml/usage/installation/android).
+
+# What does it do
+
+ML makes use of Firebase Machine Learning's [Text Recognition](https://firebase.google.com/docs/ml/recognize-text),
+[Image Labeling](https://firebase.google.com/docs/ml/label-images) & [Landmark Recognition](https://firebase.google.com/docs/ml/recognize-landmarks) features.
+
+All Firebase ML services are cloud-based, with on-device APIs handled by the new, separate [Google MLKit](https://developers.google.com/ml-kit/) (Usable in react-native
+as a set of [react-native-mlkit modules](https://www.npmjs.com/org/react-native-mlkit))
+
+
+
+## Support table
+
+The table below outlines the current module support for each available service, and their support status here
+
+| API | Cloud Model |
+| --------------------------------------------------------------------------------- | ----------- |
+| [Text Recognition](https://firebase.google.com/docs/ml/recognize-text) | ✅ |
+| [Document Text Recognition](https://firebase.google.com/docs/ml/recognize-text)) | ✅ |
+| [Image Labeling](https://firebase.google.com/docs/ml/label-images) | ✅ |
+| [AutoML Vision Edge](https://firebase.google.com/docs/ml/automl-image-labeling) | ❌ |
+| [Object Detection/Tracking](https://firebase.google.com/docs/ml/object-detection) | ❌ |
+
+# Usage
+
+To get started, you can find the documentation for the individual ML Kit services below:
+
+- [Text Recognition](/ml/text-recognition)
+- [Landmark Recognition](/ml/landmark-recognition)
+- [Image](/ml/image-labeling)
diff --git a/docs/ml-vision/usage/installation/android.md b/docs/ml/usage/installation/android.md
similarity index 64%
rename from docs/ml-vision/usage/installation/android.md
rename to docs/ml/usage/installation/android.md
index 433a46ba98d..f3ff62890d0 100644
--- a/docs/ml-vision/usage/installation/android.md
+++ b/docs/ml/usage/installation/android.md
@@ -1,8 +1,8 @@
---
title: Android Installation
-description: Manually integrate ML Kit Vision into your Android application.
-next: /ml-vision/usage/installation/ios
-previous: /ml-vision/usage
+description: Manually integrate ML into your Android application.
+next: /ml/usage/installation/ios
+previous: /ml/usage
---
# Android Manual Installation
@@ -15,8 +15,8 @@ auto-linking.
Add the following to your projects `/android/settings.gradle` file:
```groovy
-include ':@react-native-firebase_ml-vision'
-project(':@react-native-firebase_ml-vision').projectDir = new File(rootProject.projectDir, './../node_modules/@react-native-firebase/ml-vision/android')
+include ':@react-native-firebase_ml'
+project(':@react-native-firebase_ml').projectDir = new File(rootProject.projectDir, './../node_modules/@react-native-firebase/ml/android')
```
## 2. Update Gradle Dependencies
@@ -27,7 +27,7 @@ Add the React Native Functions module dependency to your `/android/app/build.gra
// ..
dependencies {
// ..
- implementation project(path: ":@react-native-firebase_ml-vision")
+ implementation project(path: ":@react-native-firebase_ml")
}
```
@@ -38,7 +38,7 @@ Import and apply the React Native Firebase module package to your `/android/app/
Import the package:
```java
-import io.invertase.firebase.perf.ReactNativeFirebaseMLVisionPackage;
+import io.invertase.firebase.perf.ReactNativeFirebaseMLPackage;
```
Add the package to the registry:
@@ -47,7 +47,7 @@ Add the package to the registry:
protected List getPackages() {
return Arrays.asList(
new MainReactPackage(),
- new ReactNativeFirebaseMLVisionPackage(),
+ new ReactNativeFirebaseMLPackage(),
```
## 4. Rebuild the project
diff --git a/docs/ml-vision/usage/installation/ios.md b/docs/ml/usage/installation/ios.md
similarity index 51%
rename from docs/ml-vision/usage/installation/ios.md
rename to docs/ml/usage/installation/ios.md
index 119ef6f3e7e..c11f45cef6c 100644
--- a/docs/ml-vision/usage/installation/ios.md
+++ b/docs/ml/usage/installation/ios.md
@@ -1,8 +1,8 @@
---
title: iOS Installation
-description: Manually integrate ML Kit Vision APIs into your iOS application.
-next: /ml-vision/usage/installation/android
-previous: /ml-vision/usage
+description: Manually integrate ML APIs into your iOS application.
+next: /ml/usage/installation/android
+previous: /ml/usage
---
# iOS Manual Installation
@@ -12,18 +12,18 @@ auto-linking.
## 1. Add the Pod
-Add the `RNFBMLVision` Pod to your projects `/ios/Podfile`:
+Add the `RNFBML` Pod to your projects `/ios/Podfile`:
```ruby
target 'app' do
# ...
- pod 'RNFBMLVision', :path => '../node_modules/@react-native-firebase/ml-vision'
+ pod 'RNFBML', :path => '../node_modules/@react-native-firebase/ml'
end
```
## 2. Update Pods & rebuild the project
-You may need to update your local Pods in order for the `RNFBMLVision` Pod to be installed in your project:
+You may need to update your local Pods in order for the `RNFBML` Pod to be installed in your project:
```bash
$ cd /ios/
diff --git a/docs/releases/index.md b/docs/releases/index.md
index e0e8ba2f00e..6cdbbd55cfe 100644
--- a/docs/releases/index.md
+++ b/docs/releases/index.md
@@ -21,8 +21,7 @@ Starting with version `v6.5.0`; all React Native Firebase packages are now indep
| Dynamic Links |  | [View Release Notes »](https://github.com/invertase/react-native-firebase/tree/master/packages/dynamic-links/CHANGELOG.md) |
| In-app Messaging |  | [View Release Notes »](https://github.com/invertase/react-native-firebase/tree/master/packages/in-app-messaging/CHANGELOG.md) |
| Instance ID |  | [View Release Notes »](https://github.com/invertase/react-native-firebase/tree/master/packages/iid/CHANGELOG.md) |
-| ML Kit Natural Language |  | [View Release Notes »](https://github.com/invertase/react-native-firebase/tree/master/packages/ml-natural-language/CHANGELOG.md) |
-| ML Kit Vision |  | [View Release Notes »](https://github.com/invertase/react-native-firebase/tree/master/packages/ml-vision/CHANGELOG.md) |
+| ML |  | [View Release Notes »](https://github.com/invertase/react-native-firebase/tree/master/packages/ml/CHANGELOG.md) |
| Performance Monitoring |  | [View Release Notes »](https://github.com/invertase/react-native-firebase/tree/master/packages/perf/CHANGELOG.md) |
| Realtime Database |  | [View Release Notes »](https://github.com/invertase/react-native-firebase/tree/master/packages/database/CHANGELOG.md) |
| Remote Config |  | [View Release Notes »](https://github.com/invertase/react-native-firebase/tree/master/packages/remote-config/CHANGELOG.md) |
diff --git a/docs/releases/v6.0.0.md b/docs/releases/v6.0.0.md
index f92fe30008f..2c6bff3a215 100644
--- a/docs/releases/v6.0.0.md
+++ b/docs/releases/v6.0.0.md
@@ -26,8 +26,7 @@ The new modules:
| [Dynamic Links](/dynamic-links) | [](https://www.npmjs.com/package/@react-native-firebase/dynamic-links) | [](https://api.rnfirebase.io/coverage/dynamic-links/detail) |
| [In-app Messaging](/in-app-messaging) | [](https://www.npmjs.com/package/@react-native-firebase/in-app-messaging) | [](https://api.rnfirebase.io/coverage/in-app-messaging/detail) |
| [Instance ID](/iid) | [](https://www.npmjs.com/package/@react-native-firebase/iid) | [](https://api.rnfirebase.io/coverage/iid/detail) |
-| [ML Kit Natural Language](/ml-natural-language) | [](https://www.npmjs.com/package/@react-native-firebase/ml-natural-language) | [](https://api.rnfirebase.io/coverage/ml-natural-language/detail) |
-| [ML Kit Vision ](/ml-vision) | [](https://www.npmjs.com/package/@react-native-firebase/ml-vision) | [](https://api.rnfirebase.io/coverage/ml-vision/detail) |
+| [ML](/ml) | [](https://www.npmjs.com/package/@react-native-firebase/ml) | [](https://api.rnfirebase.io/coverage/ml/detail) |
| [Performance Monitoring](/perf) | [](https://www.npmjs.com/package/@react-native-firebase/perf) | [](https://api.rnfirebase.io/coverage/perf/detail) |
| [Realtime Database](/database) | [](https://www.npmjs.com/package/@react-native-firebase/database) | [](https://api.rnfirebase.io/coverage/database/detail) |
| [Remote Config](/remote-config) | [](https://www.npmjs.com/package/@react-native-firebase/remote-config) | [](https://api.rnfirebase.io/coverage/remote-config/detail) |
@@ -307,38 +306,17 @@ The Remote Config API has had a significant API change as originally highlighted
---
-### ML Kit Natural Language (naturalLanguage)
+### ML (Machine Learning)
> This is a new module in React Native Firebase.
- [NEW] Implemented support for language identification APIs
- Single Languages: `identifyLanguage()`.
- Multiple Languages: `identifyPossibleLanguages()`
-- [NEW] Implemented support for [Smart Replies](https://firebase.google.com/docs/ml-kit/generate-smart-replies)
- - [Example Video](https://twitter.com/mikediarmid/status/1128837402481635331)
-
-> ML Kit Translate APIs to come in a later release.
-
----
-
-### ML Kit Vision (vision)
-
-> This is a new module in React Native Firebase.
-
-- [NEW] Implemented support for [Text Recognition](https://firebase.google.com/docs/ml-kit/recognize-text) Vision APIs;
- - [x] Cloud
- - [x] On Device
-- [NEW] Implemented support for [Document Text Recognition](https://firebase.google.com/docs/ml-kit/recognize-text) Vision APIs;
- - [x] Cloud
-- [NEW] Implemented support for [Face Detection](https://firebase.google.com/docs/ml-kit/detect-faces) Vision APIs;
- - [x] On Device
-- [NEW] Implemented support for [Barcode Detection](https://firebase.google.com/docs/ml-kit/read-barcodes) Vision APIs;
- - [x] On Device
-- [NEW] Implemented support for [Image Labeling](https://firebase.google.com/docs/ml-kit/label-images) Vision APIs;
- - [x] Cloud
- - [x] On Device
-- [NEW] Implemented support for [Landmark Recognition](https://firebase.google.com/docs/ml-kit/recognize-landmarks) Vision APIs;
- - [x] Cloud
+- [NEW] Implemented support for [Text Recognition](https://firebase.google.com/docs/ml/recognize-text) Vision APIs;
+- [NEW] Implemented support for [Document Text Recognition](https://firebase.google.com/docs/ml/recognize-text) Vision APIs;
+- [NEW] Implemented support for [Image Labeling](https://firebase.google.com/docs/ml/label-images) Vision APIs;
+- [NEW] Implemented support for [Landmark Recognition](https://firebase.google.com/docs/ml/recognize-landmarks) Vision APIs;
---
diff --git a/docs/remote-config/usage/index.md b/docs/remote-config/usage/index.md
index fbc993a61f0..420960d285b 100644
--- a/docs/remote-config/usage/index.md
+++ b/docs/remote-config/usage/index.md
@@ -3,7 +3,7 @@ title: Remote Config
description: Installation and getting started with Remote Config.
icon: //static.invertase.io/assets/firebase/remote-config.svg
next: /perf/usage
-previous: /ml-vision/face-detection
+previous: /ml/image-labeling
---
# Installation
@@ -85,8 +85,10 @@ remoteConfig()
.then(fetchedRemotely => {
if (fetchedRemotely) {
console.log('Configs were retrieved from the backend and activated.');
- } else {
- console.log('No configs were fetched from the backend, and the local configs were already activated');
+ } else {
+ console.log(
+ 'No configs were fetched from the backend, and the local configs were already activated',
+ );
}
});
```
@@ -121,11 +123,11 @@ The API also provides a `getAll` method to read all parameters at once rather th
```js
const parameters = remoteConfig().getAll();
-Object.entries(parameters).forEach(($) => {
+Object.entries(parameters).forEach($ => {
const [key, entry] = $;
- console.log('Key: ', key);
- console.log('Source: ', entry.getSource());
- console.log('Value: ', entry.asString());
+ console.log('Key: ', key);
+ console.log('Source: ', entry.getSource());
+ console.log('Value: ', entry.asString());
});
```
diff --git a/docs/sidebar.yaml b/docs/sidebar.yaml
index dbb20064fca..7f33964a1e8 100644
--- a/docs/sidebar.yaml
+++ b/docs/sidebar.yaml
@@ -103,23 +103,15 @@
- - - Usage
- '/in-app-messaging/usage'
- '//static.invertase.io/assets/firebase/in-app-messaging.svg'
-- - ML Kit Natural Language
+- - ML
- - - Usage
- - '/ml-natural-language/usage'
- - '//static.invertase.io/assets/firebase/ml-kit.svg'
-- - ML Kit Vision
- - - - Usage
- - '/ml-vision/usage'
+ - '/ml/usage'
- - Text Recognition
- - '/ml-vision/text-recognition'
+ - '/ml/text-recognition'
- - Landmark Recognition
- - '/ml-vision/landmark-recognition'
- - - Barcode Scanning
- - '/ml-vision/barcode-scanning'
+ - '/ml/landmark-recognition'
- - Image Labeling
- - '/ml-vision/image-labeling'
- - - Face Detection
- - '/ml-vision/face-detection'
+ - '/ml/image-labeling'
- '//static.invertase.io/assets/firebase/ml-kit.svg'
- - Remote Config
- - - Usage
diff --git a/package.json b/package.json
index fdac2cae27f..4d928ed5533 100644
--- a/package.json
+++ b/package.json
@@ -9,7 +9,9 @@
"build:all:build": "lerna run build",
"lint": "eslint . --ext .js,.jsx,.ts,.tsx",
"lint:report": "eslint --output-file=eslint-report.json --format=json . --ext .js,.jsx,.ts,.tsx",
+ "lint:spellcheck": "spellchecker --quiet --files=\"docs/**/*.md\" --dictionaries=\"./.spellcheck.dict.txt\" --reports=\"spelling.json\" --plugins spell indefinite-article repeated-words syntax-mentions syntax-urls frontmatter",
"tsc:compile": "tsc --project .",
+ "lint:all": "yarn lint && yarn lint:spellcheck && yarn tsc:compile",
"lerna:bootstrap": "lerna bootstrap",
"lerna:link": "lerna link",
"lerna:clean": "lerna clean",
@@ -19,7 +21,8 @@
"tests:packager:chrome": "cd tests && node_modules/.bin/react-native start --reset-cache",
"tests:packager:jet": "cd tests && cross-env REACT_DEBUGGER=\"echo nope\" node_modules/.bin/react-native start --no-interactive",
"tests:packager:jet-reset-cache": "cd tests && cross-env REACT_DEBUGGER=\"echo nope\" node_modules/.bin/react-native start --reset-cache --no-interactive",
- "tests:emulator:start": "./tests/node_modules/.bin/firebase emulators:start --only firestore",
+ "tests:emulator:start": "cd ./.github/workflows/scripts && sh ./start-firestore-emulator.sh --no-daemon",
+ "tests:emulator:start-ci": "cd ./.github/workflows/scripts && sh ./start-firestore-emulator.sh",
"tests:android:build": "cd tests && ./node_modules/.bin/detox build --configuration android.emu.debug",
"tests:android:build-release": "cd tests && ./node_modules/.bin/detox build --configuration android.emu.release",
"tests:android:test": "cd tests && ./node_modules/.bin/detox test --configuration android.emu.debug",
diff --git a/packages/admob/android/build.gradle b/packages/admob/android/build.gradle
index 2bfb3594c78..8d3b62ed8de 100644
--- a/packages/admob/android/build.gradle
+++ b/packages/admob/android/build.gradle
@@ -11,7 +11,7 @@ buildscript {
}
dependencies {
- classpath("com.android.tools.build:gradle:4.0.1")
+ classpath("com.android.tools.build:gradle:4.1.0")
}
}
}
diff --git a/packages/analytics/__tests__/analytics.test.ts b/packages/analytics/__tests__/analytics.test.ts
index 0c32df9d001..433019cbaa4 100644
--- a/packages/analytics/__tests__/analytics.test.ts
+++ b/packages/analytics/__tests__/analytics.test.ts
@@ -46,20 +46,6 @@ describe('Analytics', () => {
});
});
- it('errors if milliseconds not a number', () => {
- // @ts-ignore test
- expect(() => firebase.analytics().setMinimumSessionDuration('123')).toThrowError(
- "'milliseconds' expected a number value",
- );
- });
-
- it('errors if milliseconds is less than 0', () => {
- // @ts-ignore test
- expect(() => firebase.analytics().setMinimumSessionDuration(-100)).toThrowError(
- "'milliseconds' expected a positive number value",
- );
- });
-
it('errors if milliseconds not a number', () => {
// @ts-ignore test
expect(() => firebase.analytics().setSessionTimeoutDuration('123')).toThrowError(
diff --git a/packages/analytics/android/build.gradle b/packages/analytics/android/build.gradle
index b1f15091c56..f47bf93427f 100644
--- a/packages/analytics/android/build.gradle
+++ b/packages/analytics/android/build.gradle
@@ -11,7 +11,7 @@ buildscript {
}
dependencies {
- classpath("com.android.tools.build:gradle:4.0.1")
+ classpath("com.android.tools.build:gradle:4.1.0")
}
}
}
diff --git a/packages/analytics/android/src/main/java/io/invertase/firebase/analytics/UniversalFirebaseAnalyticsModule.java b/packages/analytics/android/src/main/java/io/invertase/firebase/analytics/UniversalFirebaseAnalyticsModule.java
index 6bd845c0db6..7fbb9e60ae4 100644
--- a/packages/analytics/android/src/main/java/io/invertase/firebase/analytics/UniversalFirebaseAnalyticsModule.java
+++ b/packages/analytics/android/src/main/java/io/invertase/firebase/analytics/UniversalFirebaseAnalyticsModule.java
@@ -52,13 +52,6 @@ Task setAnalyticsCollectionEnabled(Boolean enabled) {
});
}
- Task setMinimumSessionDuration(long milliseconds) {
- return Tasks.call(() -> {
- FirebaseAnalytics.getInstance(getContext()).setMinimumSessionDuration(milliseconds);
- return null;
- });
- }
-
Task setSessionTimeoutDuration(long milliseconds) {
return Tasks.call(() -> {
FirebaseAnalytics.getInstance(getContext()).setSessionTimeoutDuration(milliseconds);
diff --git a/packages/analytics/android/src/reactnative/java/io/invertase/firebase/analytics/ReactNativeFirebaseAnalyticsModule.java b/packages/analytics/android/src/reactnative/java/io/invertase/firebase/analytics/ReactNativeFirebaseAnalyticsModule.java
index f3ec0947460..cf909b282f6 100644
--- a/packages/analytics/android/src/reactnative/java/io/invertase/firebase/analytics/ReactNativeFirebaseAnalyticsModule.java
+++ b/packages/analytics/android/src/reactnative/java/io/invertase/firebase/analytics/ReactNativeFirebaseAnalyticsModule.java
@@ -59,17 +59,6 @@ public void setAnalyticsCollectionEnabled(Boolean enabled, Promise promise) {
});
}
- @ReactMethod
- public void setMinimumSessionDuration(double milliseconds, Promise promise) {
- module.setMinimumSessionDuration((long) milliseconds).addOnCompleteListener(task -> {
- if (task.isSuccessful()) {
- promise.resolve(task.getResult());
- } else {
- rejectPromiseWithExceptionMap(promise, task.getException());
- }
- });
- }
-
@ReactMethod
public void setSessionTimeoutDuration(double milliseconds, Promise promise) {
module.setSessionTimeoutDuration((long) milliseconds).addOnCompleteListener(task -> {
diff --git a/packages/analytics/e2e/analytics.e2e.js b/packages/analytics/e2e/analytics.e2e.js
index e6a5dee9007..6b3e25bb6c3 100644
--- a/packages/analytics/e2e/analytics.e2e.js
+++ b/packages/analytics/e2e/analytics.e2e.js
@@ -56,26 +56,6 @@ describe('analytics()', () => {
});
});
- describe('setCurrentScreen()', () => {
- it('screenName only', async () => {
- await firebase.analytics().setCurrentScreen('invertase screen');
- });
-
- it('screenName with screenClassOverride', async () => {
- await firebase.analytics().setCurrentScreen('invertase screen', 'invertase class override');
- });
- });
-
- describe('setMinimumSessionDuration()', () => {
- it('default duration', async () => {
- await firebase.analytics().setMinimumSessionDuration();
- });
-
- it('custom duration', async () => {
- await firebase.analytics().setMinimumSessionDuration(1337);
- });
- });
-
describe('setSessionTimeoutDuration()', () => {
it('default duration', async () => {
await firebase.analytics().setSessionTimeoutDuration();
diff --git a/packages/analytics/ios/RNFBAnalytics/RNFBAnalyticsModule.m b/packages/analytics/ios/RNFBAnalytics/RNFBAnalyticsModule.m
index 0d42fedf3db..1507a532d9f 100644
--- a/packages/analytics/ios/RNFBAnalytics/RNFBAnalyticsModule.m
+++ b/packages/analytics/ios/RNFBAnalytics/RNFBAnalyticsModule.m
@@ -125,16 +125,6 @@ - (dispatch_queue_t)methodQueue {
return resolve([NSNull null]);
}
- RCT_EXPORT_METHOD(setMinimumSessionDuration:
- (double) milliseconds
- resolver:
- (RCTPromiseResolveBlock) resolve
- rejecter:
- (RCTPromiseRejectBlock) reject) {
- // Do nothing - this only exists in android
- return resolve([NSNull null]);
- }
-
RCT_EXPORT_METHOD(setSessionTimeoutDuration:
(double) milliseconds
resolver:
diff --git a/packages/analytics/lib/index.d.ts b/packages/analytics/lib/index.d.ts
index 077706683fb..06bf32ebb93 100644
--- a/packages/analytics/lib/index.d.ts
+++ b/packages/analytics/lib/index.d.ts
@@ -663,39 +663,6 @@ export namespace FirebaseAnalyticsTypes {
*/
setAnalyticsCollectionEnabled(enabled: boolean): Promise;
- /**
- * Sets the current screen name.
- *
- * #### Example
- *
- * ```js
- * await firebase.analytics().setCurrentScreen('ProductScreen', 'ProductScreen');
- * ```
- *
- * > Whilst screenClassOverride is optional, it is recommended it is
- * always sent as your current class name. For example on Android it will always
- * show as 'MainActivity' if you do not specify it.
- *
- * @param screenName A screen name, e.g. Product.
- * @param screenClassOverride On Android, React Native runs in a single activity called
- * 'MainActivity'. Setting this parameter overrides the default name shown on logs.
- * @deprecated
- */
- setCurrentScreen(screenName: string, screenClassOverride?: string): Promise;
- /**
- * Sets the minimum engagement time required before starting a session.
- *
- * #### Example
- *
- * ```js
- * // 20 seconds
- * await firebase.analytics().setMinimumSessionDuration(20000);
- * ```
- *
- * @param milliseconds The default value is 10000 (10 seconds).
- */
- setMinimumSessionDuration(milliseconds?: number): Promise;
-
/**
* Sets the duration of inactivity that terminates the current session.
*
diff --git a/packages/analytics/lib/index.js b/packages/analytics/lib/index.js
index f3630da5ced..b54836defed 100644
--- a/packages/analytics/lib/index.js
+++ b/packages/analytics/lib/index.js
@@ -37,9 +37,20 @@ import version from './version';
import * as structs from './structs';
const ReservedEventNames = [
+ 'ad_reward',
+ 'app_background',
'app_clear_data',
- 'app_uninstall',
+ 'app_exception',
+ 'app_remove',
+ 'app_store_refund',
+ 'app_store_subscription_cancel',
+ 'app_store_subscription_convert',
+ 'app_store_subscription_renew',
'app_update',
+ 'app_upgrade',
+ 'dynamic_link_app_open',
+ 'dynamic_link_app_update',
+ 'dynamic_link_first_open',
'error',
'first_open',
'in_app_purchase',
@@ -49,6 +60,7 @@ const ReservedEventNames = [
'notification_receive',
'os_update',
'session_start',
+ 'session_start_with_rollout',
'user_engagement',
];
@@ -113,22 +125,6 @@ class FirebaseAnalyticsModule extends FirebaseModule {
});
}
- setMinimumSessionDuration(milliseconds = 10000) {
- if (!isNumber(milliseconds)) {
- throw new Error(
- "firebase.analytics().setMinimumSessionDuration(*) 'milliseconds' expected a number value.",
- );
- }
-
- if (milliseconds < 0) {
- throw new Error(
- "firebase.analytics().setMinimumSessionDuration(*) 'milliseconds' expected a positive number value.",
- );
- }
-
- return this.native.setMinimumSessionDuration(milliseconds);
- }
-
setSessionTimeoutDuration(milliseconds = 1800000) {
if (!isNumber(milliseconds)) {
throw new Error(
diff --git a/packages/analytics/type-test.ts b/packages/analytics/type-test.ts
index 7f37af3ed80..c9948578d1c 100644
--- a/packages/analytics/type-test.ts
+++ b/packages/analytics/type-test.ts
@@ -67,7 +67,6 @@ console.log(firebase.analytics().setAnalyticsCollectionEnabled);
console.log(firebase.analytics().logSelectPromotion);
console.log(firebase.analytics().logScreenView);
console.log(firebase.analytics().logViewPromotion);
-console.log(firebase.analytics().setMinimumSessionDuration);
console.log(firebase.analytics().setSessionTimeoutDuration);
console.log(firebase.analytics().setUserId);
console.log(firebase.analytics().setUserProperties);
@@ -110,7 +109,6 @@ console.log(analytics().setAnalyticsCollectionEnabled);
console.log(analytics().logSelectPromotion);
console.log(analytics().logScreenView);
console.log(analytics().logViewPromotion);
-console.log(analytics().setMinimumSessionDuration);
console.log(analytics().setSessionTimeoutDuration);
console.log(analytics().setUserId);
console.log(analytics().setUserProperties);
diff --git a/packages/app/android/build.gradle b/packages/app/android/build.gradle
index dc1e5cadc1a..04a224f8dcc 100644
--- a/packages/app/android/build.gradle
+++ b/packages/app/android/build.gradle
@@ -11,7 +11,7 @@ buildscript {
}
dependencies {
- classpath("com.android.tools.build:gradle:4.0.1")
+ classpath("com.android.tools.build:gradle:4.1.0")
}
}
}
diff --git a/packages/app/android/src/reactnative/java/io/invertase/firebase/utils/ReactNativeFirebaseUtilsModule.java b/packages/app/android/src/reactnative/java/io/invertase/firebase/utils/ReactNativeFirebaseUtilsModule.java
index 83f13be50a7..cf6e909b90e 100644
--- a/packages/app/android/src/reactnative/java/io/invertase/firebase/utils/ReactNativeFirebaseUtilsModule.java
+++ b/packages/app/android/src/reactnative/java/io/invertase/firebase/utils/ReactNativeFirebaseUtilsModule.java
@@ -161,8 +161,7 @@ public Map getConstants() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
- File folder = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DOCUMENTS);
- constants.put(KEY_DOCUMENT_DIRECTORY, folder.getAbsolutePath());
+ constants.put(KEY_DOCUMENT_DIRECTORY, context.getExternalFilesDir(null).getAbsolutePath());
} else {
constants.put(KEY_DOCUMENT_DIRECTORY, context.getFilesDir().getAbsolutePath());
}
diff --git a/packages/app/lib/internal/constants.js b/packages/app/lib/internal/constants.js
index 2db91325ec4..3badd9d796d 100644
--- a/packages/app/lib/internal/constants.js
+++ b/packages/app/lib/internal/constants.js
@@ -35,7 +35,7 @@ export const KNOWN_NAMESPACES = [
'dynamicLinks',
'messaging',
'naturalLanguage',
- 'vision',
+ 'ml',
'notifications',
'perf',
'utils',
diff --git a/packages/app/package.json b/packages/app/package.json
index e689a0e0baa..6f94d571abf 100644
--- a/packages/app/package.json
+++ b/packages/app/package.json
@@ -62,15 +62,14 @@
},
"sdkVersions": {
"ios": {
- "firebase": "~> 6.34.0"
+ "firebase": "~> 7.0.0"
},
"android": {
"minSdk": 16,
- "targetSdk": 29,
- "compileSdk": 29,
- "buildTools": "29.0.3",
- "firebase": "25.12.0",
- "iid": "20.3.0",
+ "targetSdk": 30,
+ "compileSdk": 30,
+ "buildTools": "30.0.2",
+ "firebase": "26.0.0",
"playServicesAuth": "18.1.0"
}
}
diff --git a/packages/auth/android/build.gradle b/packages/auth/android/build.gradle
index 335a183e30f..c08b08e991d 100644
--- a/packages/auth/android/build.gradle
+++ b/packages/auth/android/build.gradle
@@ -11,7 +11,7 @@ buildscript {
}
dependencies {
- classpath("com.android.tools.build:gradle:4.0.1")
+ classpath("com.android.tools.build:gradle:4.1.0")
}
}
}
diff --git a/packages/auth/ios/RNFBAuth/RNFBAuthModule.m b/packages/auth/ios/RNFBAuth/RNFBAuthModule.m
index cafbc8a4cb6..ad1a7c1e762 100644
--- a/packages/auth/ios/RNFBAuth/RNFBAuthModule.m
+++ b/packages/auth/ios/RNFBAuth/RNFBAuthModule.m
@@ -552,7 +552,7 @@ - (void)invalidate {
}];
}
- [[FIRAuth authWithApp:firebaseApp] signInAndRetrieveDataWithCredential:credential completion:^(
+ [[FIRAuth authWithApp:firebaseApp] signInWithCredential:credential completion:^(
FIRAuthDataResult *authResult,
NSError *error
) {
@@ -624,14 +624,14 @@ - (void)invalidate {
NSMutableDictionary *data = [NSMutableDictionary dictionary];
- if ([info dataForKey:FIRActionCodeEmailKey] != nil) {
- [data setValue:[info dataForKey:FIRActionCodeEmailKey] forKey:keyEmail];
+ if (info.email != nil) {
+ [data setValue:info.email forKey:keyEmail];
} else {
[data setValue:[NSNull null] forKey:keyEmail];
}
- if ([info dataForKey:FIRActionCodeFromEmailKey] != nil) {
- [data setValue:[info dataForKey:FIRActionCodeFromEmailKey] forKey:@"fromEmail"];
+ if (info.previousEmail != nil) {
+ [data setValue:info.previousEmail forKey:@"fromEmail"];
} else {
[data setValue:[NSNull null] forKey:@"fromEmail"];
}
@@ -767,7 +767,7 @@ - (void)invalidate {
FIRAuthCredential *credential =
[[FIRPhoneAuthProvider provider] credentialWithVerificationID:verificationId verificationCode:verificationCode];
- [[FIRAuth authWithApp:firebaseApp] signInAndRetrieveDataWithCredential:credential completion:^(
+ [[FIRAuth authWithApp:firebaseApp] signInWithCredential:credential completion:^(
FIRAuthDataResult *authResult,
NSError *error
) {
@@ -798,7 +798,7 @@ - (void)invalidate {
FIRUser *user = [FIRAuth authWithApp:firebaseApp].currentUser;
if (user) {
- [user linkAndRetrieveDataWithCredential:credential
+ [user linkWithCredential:credential
completion:^(FIRAuthDataResult *_Nullable authResult, NSError *_Nullable error) {
if (error) {
[self promiseRejectAuthException:reject error:error];
@@ -852,7 +852,7 @@ - (void)invalidate {
FIRUser *user = [FIRAuth authWithApp:firebaseApp].currentUser;
if (user) {
- [user reauthenticateAndRetrieveDataWithCredential:credential completion:^(
+ [user reauthenticateWithCredential:credential completion:^(
FIRAuthDataResult *_Nullable authResult,
NSError *_Nullable error
) {
diff --git a/packages/crashlytics/android/build.gradle b/packages/crashlytics/android/build.gradle
index b2116547bff..efc4de569d6 100644
--- a/packages/crashlytics/android/build.gradle
+++ b/packages/crashlytics/android/build.gradle
@@ -11,7 +11,7 @@ buildscript {
}
dependencies {
- classpath("com.android.tools.build:gradle:4.0.1")
+ classpath("com.android.tools.build:gradle:4.1.0")
}
}
}
diff --git a/packages/database/android/build.gradle b/packages/database/android/build.gradle
index 41d8e8396d4..e3c7d7a4edf 100644
--- a/packages/database/android/build.gradle
+++ b/packages/database/android/build.gradle
@@ -11,7 +11,7 @@ buildscript {
}
dependencies {
- classpath("com.android.tools.build:gradle:4.0.1")
+ classpath("com.android.tools.build:gradle:4.1.0")
}
}
}
diff --git a/packages/dynamic-links/android/build.gradle b/packages/dynamic-links/android/build.gradle
index 9188bbf4f3f..a53c0569daa 100644
--- a/packages/dynamic-links/android/build.gradle
+++ b/packages/dynamic-links/android/build.gradle
@@ -11,7 +11,7 @@ buildscript {
}
dependencies {
- classpath("com.android.tools.build:gradle:4.0.1")
+ classpath("com.android.tools.build:gradle:4.1.0")
}
}
}
diff --git a/packages/firestore/android/build.gradle b/packages/firestore/android/build.gradle
index 014809fb797..acc6df0be24 100644
--- a/packages/firestore/android/build.gradle
+++ b/packages/firestore/android/build.gradle
@@ -11,7 +11,7 @@ buildscript {
}
dependencies {
- classpath("com.android.tools.build:gradle:4.0.1")
+ classpath("com.android.tools.build:gradle:4.1.0")
}
}
}
diff --git a/packages/firestore/e2e/Query/limitToLast.e2e.js b/packages/firestore/e2e/Query/limitToLast.e2e.js
index 08bafca5875..464a77ea538 100644
--- a/packages/firestore/e2e/Query/limitToLast.e2e.js
+++ b/packages/firestore/e2e/Query/limitToLast.e2e.js
@@ -51,7 +51,8 @@ describe('firestore().collection().limitToLast()', () => {
should(colRef._modifiers.options.limit).equal(undefined);
});
- it('removes limitToLast query if limit is set afterwards', () => {
+ // FIXME flaky on local tests
+ xit('removes limitToLast query if limit is set afterwards', () => {
const colRef = firebase
.firestore()
.collection(COLLECTION)
@@ -61,7 +62,8 @@ describe('firestore().collection().limitToLast()', () => {
should(colRef._modifiers.options.limitToLast).equal(undefined);
});
- it('limitToLast the number of documents', async () => {
+ // FIXME flaky on local tests
+ xit('limitToLast the number of documents', async () => {
const subCol = `${COLLECTION}/limitToLast/count`;
const colRef = firebase.firestore().collection(subCol);
diff --git a/packages/firestore/e2e/Query/orderBy.e2e.js b/packages/firestore/e2e/Query/orderBy.e2e.js
index fc3973b59d2..258b9e2b378 100644
--- a/packages/firestore/e2e/Query/orderBy.e2e.js
+++ b/packages/firestore/e2e/Query/orderBy.e2e.js
@@ -107,7 +107,8 @@ describe('firestore().collection().orderBy()', () => {
}
});
- it('orders by a value ASC', async () => {
+ // FIXME flaky in local tests
+ xit('orders by a value ASC', async () => {
const colRef = firebase.firestore().collection(`${COLLECTION}/order/asc`);
await colRef.add({ value: 1 });
@@ -122,7 +123,8 @@ describe('firestore().collection().orderBy()', () => {
});
});
- it('orders by a value DESC', async () => {
+ // FIXME flaky in local tests
+ xit('orders by a value DESC', async () => {
const colRef = firebase.firestore().collection(`${COLLECTION}/order/desc`);
await colRef.add({ value: 1 });
diff --git a/packages/firestore/e2e/Query/where.e2e.js b/packages/firestore/e2e/Query/where.e2e.js
index 76a4a23de80..82aa125013b 100644
--- a/packages/firestore/e2e/Query/where.e2e.js
+++ b/packages/firestore/e2e/Query/where.e2e.js
@@ -339,7 +339,8 @@ describe('firestore().collection().where()', () => {
});
});
- it('returns with in filter', async () => {
+ // FIXME flaky with semi-persistent data until emulator is working
+ xit('returns with in filter', async () => {
const colRef = firebase.firestore().collection(`${COLLECTION}/filter/in`);
await Promise.all([
@@ -358,7 +359,8 @@ describe('firestore().collection().where()', () => {
});
});
- it('returns with array-contains-any filter', async () => {
+ // FIXME flaky with semi-persistent data until emulator is working
+ xit('returns with array-contains-any filter', async () => {
const colRef = firebase.firestore().collection(`${COLLECTION}/filter/array-contains-any`);
await Promise.all([
@@ -373,7 +375,8 @@ describe('firestore().collection().where()', () => {
snapshot.size.should.eql(3); // 2nd record should only be returned once
});
- it('returns with a FieldPath', async () => {
+ // FIXME flaky with semi-persistent data until emulator is working
+ xit('returns with a FieldPath', async () => {
const colRef = firebase.firestore().collection(`${COLLECTION}/filter/where-fieldpath`);
const fieldPath = new firebase.firestore.FieldPath('map', 'foo.bar@gmail.com');
@@ -409,7 +412,8 @@ describe('firestore().collection().where()', () => {
}
});
- it('should correctly query integer values with in operator', async () => {
+ // FIXME flaky with semi-persistent data until emulator is working
+ xit('should correctly query integer values with in operator', async () => {
const ref = firebase.firestore().collection(COLLECTION);
await ref.add({ status: 1 });
@@ -423,7 +427,8 @@ describe('firestore().collection().where()', () => {
items.length.should.equal(1);
});
- it('should correctly query integer values with array-contains operator', async () => {
+ // FIXME flaky with semi-persistent data until emulator is working
+ xit('should correctly query integer values with array-contains operator', async () => {
const ref = firebase.firestore().collection(COLLECTION);
await ref.add({ status: [1, 2, 3] });
@@ -437,7 +442,8 @@ describe('firestore().collection().where()', () => {
items.length.should.equal(1);
});
- it("should correctly retrieve data when using 'not-in' operator", async () => {
+ // FIXME flaky with semi-persistent data until emulator is working
+ xit("should correctly retrieve data when using 'not-in' operator", async () => {
const ref = firebase.firestore().collection(COLLECTION);
await Promise.all([ref.add({ notIn: 'here' }), ref.add({ notIn: 'now' })]);
@@ -513,7 +519,8 @@ describe('firestore().collection().where()', () => {
}
});
- it("should correctly retrieve data when using '!=' operator", async () => {
+ // FIXME flaky with semi-persistent data until emulator is working
+ xit("should correctly retrieve data when using '!=' operator", async () => {
const ref = firebase.firestore().collection(COLLECTION);
await Promise.all([ref.add({ notEqual: 'here' }), ref.add({ notEqual: 'now' })]);
diff --git a/packages/firestore/e2e/firestore.e2e.js b/packages/firestore/e2e/firestore.e2e.js
index 38687b72c7c..865c04d16d6 100644
--- a/packages/firestore/e2e/firestore.e2e.js
+++ b/packages/firestore/e2e/firestore.e2e.js
@@ -364,7 +364,7 @@ describe('firestore()', () => {
});
describe('wait for pending writes', () => {
- it('waits for pending writes', async () => {
+ xit('waits for pending writes', async () => {
const waitForPromiseMs = 500;
const testTimeoutMs = 10000;
diff --git a/packages/functions/android/build.gradle b/packages/functions/android/build.gradle
index 4ac4e89d0f3..b4ad74afd93 100644
--- a/packages/functions/android/build.gradle
+++ b/packages/functions/android/build.gradle
@@ -11,7 +11,7 @@ buildscript {
}
dependencies {
- classpath("com.android.tools.build:gradle:4.0.1")
+ classpath("com.android.tools.build:gradle:4.1.0")
}
}
}
diff --git a/packages/iid/android/build.gradle b/packages/iid/android/build.gradle
index 398c2c7c7fa..c9b11af32f4 100644
--- a/packages/iid/android/build.gradle
+++ b/packages/iid/android/build.gradle
@@ -11,7 +11,7 @@ buildscript {
}
dependencies {
- classpath("com.android.tools.build:gradle:4.0.1")
+ classpath("com.android.tools.build:gradle:4.1.0")
}
}
}
@@ -32,12 +32,6 @@ def packageJson = PackageJson.getForProject(project)
def appPackageJson = PackageJson.getForProject(appProject)
def firebaseBomVersion = appPackageJson['sdkVersions']['android']['firebase']
-// TODO Must put the concrete version here as the BoM no longer publishes the IID version, align it with the BoM change above
-// Upstream issue: https://github.com/firebase/firebase-android-sdk/issues/1077
-// Issue: https://github.com/invertase/react-native-firebase/issues/3918
-// Example of where to find concrete version to match BoM: https://firebase.google.com/support/release-notes/android#iid_v20-2-1
-def firebaseIidVersion = appPackageJson['sdkVersions']['android']['iid']
-
def jsonMinSdk = appPackageJson['sdkVersions']['android']['minSdk']
def jsonTargetSdk = appPackageJson['sdkVersions']['android']['targetSdk']
def jsonCompileSdk = appPackageJson['sdkVersions']['android']['compileSdk']
@@ -65,7 +59,6 @@ project.ext {
firebase: [
bom: firebaseBomVersion,
- iid: firebaseIidVersion
],
],
@@ -99,7 +92,7 @@ repositories {
dependencies {
api appProject
implementation platform("com.google.firebase:firebase-bom:${ReactNative.ext.getVersion("firebase", "bom")}")
- implementation "com.google.firebase:firebase-iid:${ReactNative.ext.getVersion("firebase", "iid")}"
+ implementation "com.google.firebase:firebase-iid"
}
ReactNative.shared.applyPackageVersion()
diff --git a/packages/in-app-messaging/android/build.gradle b/packages/in-app-messaging/android/build.gradle
index 56e2b24ee0b..083b5c78686 100644
--- a/packages/in-app-messaging/android/build.gradle
+++ b/packages/in-app-messaging/android/build.gradle
@@ -11,7 +11,7 @@ buildscript {
}
dependencies {
- classpath("com.android.tools.build:gradle:4.0.1")
+ classpath("com.android.tools.build:gradle:4.1.0")
}
}
}
diff --git a/packages/messaging/android/build.gradle b/packages/messaging/android/build.gradle
index 4ad2c47b9e7..47122c6340e 100644
--- a/packages/messaging/android/build.gradle
+++ b/packages/messaging/android/build.gradle
@@ -11,7 +11,7 @@ buildscript {
}
dependencies {
- classpath("com.android.tools.build:gradle:4.0.1")
+ classpath("com.android.tools.build:gradle:4.1.0")
}
}
}
diff --git a/packages/messaging/e2e/remoteMessage.e2e.js b/packages/messaging/e2e/remoteMessage.e2e.js
index 81aafcb2114..38807fc38b0 100644
--- a/packages/messaging/e2e/remoteMessage.e2e.js
+++ b/packages/messaging/e2e/remoteMessage.e2e.js
@@ -16,7 +16,23 @@
*/
describe('messaging().sendMessage(*)', () => {
- it('throws if no object provided', () => {
+ it('throws if used on ios', () => {
+ if (device.getPlatform() === 'ios') {
+ try {
+ firebase.messaging().sendMessage(123);
+ return Promise.reject(new Error('Did not throw Error.'));
+ } catch (e) {
+ e.message.should.containEql(
+ 'firebase.messaging().sendMessage() is only supported on Android devices.',
+ );
+ return Promise.resolve();
+ }
+ } else {
+ Promise.resolve();
+ }
+ });
+
+ android.it('throws if no object provided', () => {
try {
firebase.messaging().sendMessage(123);
return Promise.reject(new Error('Did not throw Error.'));
@@ -26,11 +42,11 @@ describe('messaging().sendMessage(*)', () => {
}
});
- it('uses default values', async () => {
+ android.it('uses default values', async () => {
firebase.messaging().sendMessage({});
});
- describe('to', () => {
+ android.describe('to', () => {
it('throws if not a string', () => {
try {
firebase.messaging().sendMessage({
@@ -50,7 +66,7 @@ describe('messaging().sendMessage(*)', () => {
});
});
- describe('messageId', () => {
+ android.describe('messageId', () => {
it('throws if not a string', () => {
try {
firebase.messaging().sendMessage({
@@ -70,7 +86,7 @@ describe('messaging().sendMessage(*)', () => {
});
});
- describe('ttl', () => {
+ android.describe('ttl', () => {
it('throws if not a number', () => {
try {
firebase.messaging().sendMessage({
@@ -114,7 +130,7 @@ describe('messaging().sendMessage(*)', () => {
});
});
- describe('data', () => {
+ android.describe('data', () => {
it('throws if not an object', () => {
try {
firebase.messaging().sendMessage({
@@ -136,7 +152,7 @@ describe('messaging().sendMessage(*)', () => {
});
});
- describe('collapseKey', () => {
+ android.describe('collapseKey', () => {
it('throws if not a string', () => {
try {
firebase.messaging().sendMessage({
@@ -156,7 +172,7 @@ describe('messaging().sendMessage(*)', () => {
});
});
- describe('messageType', () => {
+ android.describe('messageType', () => {
it('throws if not a string', () => {
try {
firebase.messaging().sendMessage({
diff --git a/packages/messaging/ios/RNFBMessaging/RNFBMessaging+FIRMessagingDelegate.m b/packages/messaging/ios/RNFBMessaging/RNFBMessaging+FIRMessagingDelegate.m
index 87f78ab2a34..e0ffa9dd052 100644
--- a/packages/messaging/ios/RNFBMessaging/RNFBMessaging+FIRMessagingDelegate.m
+++ b/packages/messaging/ios/RNFBMessaging/RNFBMessaging+FIRMessagingDelegate.m
@@ -64,14 +64,4 @@ - (void)messaging:(FIRMessaging *)messaging didReceiveRegistrationToken:(NSStrin
}
}
-- (void)messaging:(nonnull FIRMessaging *)messaging didReceiveMessage:(nonnull FIRMessagingRemoteMessage *)remoteMessage {
- // If the users AppDelegate implements messaging:didReceiveMessage: then call it
- SEL messaging_didReceiveMessageSelector =
- NSSelectorFromString(@"messaging:didReceiveMessage:");
- if ([[GULAppDelegateSwizzler sharedApplication].delegate respondsToSelector:messaging_didReceiveMessageSelector]) {
- void (*usersDidReceiveMessageIMP)(id, SEL, FIRMessaging *, FIRMessagingRemoteMessage *) = (typeof(usersDidReceiveMessageIMP)) &objc_msgSend;
- usersDidReceiveMessageIMP([GULAppDelegateSwizzler sharedApplication].delegate, messaging_didReceiveMessageSelector, messaging, remoteMessage);
- }
-}
-
@end
diff --git a/packages/messaging/ios/RNFBMessaging/RNFBMessaging+NSNotificationCenter.m b/packages/messaging/ios/RNFBMessaging/RNFBMessaging+NSNotificationCenter.m
index 7872b8042e0..abe2e8d6a87 100644
--- a/packages/messaging/ios/RNFBMessaging/RNFBMessaging+NSNotificationCenter.m
+++ b/packages/messaging/ios/RNFBMessaging/RNFBMessaging+NSNotificationCenter.m
@@ -51,18 +51,6 @@ - (void)observe {
// ObjC - > Mutates the root React components initialProps to toggle `isHeadless` state
[[NSNotificationCenter defaultCenter] addObserver:strongSelf selector:@selector(application_onDidEnterForeground) name:UIApplicationWillEnterForegroundNotification object:nil];
- // Firebase Messaging
- // JS -> `onSendError` events
- [[NSNotificationCenter defaultCenter] addObserver:strongSelf selector:@selector(messaging_onSendErrorNotification:) name:FIRMessagingSendErrorNotification object:nil];
-
- // Firebase Messaging
- // JS -> `onMessageSent` events
- [[NSNotificationCenter defaultCenter] addObserver:strongSelf selector:@selector(messaging_onSendSuccessNotification:) name:FIRMessagingSendSuccessNotification object:nil];
-
- // Firebase Messaging
- // JS -> `onDeletedMessages` events
- [[NSNotificationCenter defaultCenter] addObserver:strongSelf selector:@selector(messaging_onDeletedMessagesNotification) name:FIRMessagingMessagesDeletedNotification object:nil];
-
});
}
@@ -71,40 +59,6 @@ + (void)load {
[[self sharedInstance] observe];
}
-#pragma mark -
-#pragma mark Firebase Messaging Notifications
-
-// Firebase Messaging
-// JS -> `onSendError`
-- (void)messaging_onSendErrorNotification:(NSNotification *)notification {
- NSDictionary *userInfo = notification.userInfo;
- NSError *error = (NSError *) userInfo[@"error"];
- NSString *messageID = (NSString *) userInfo[@"messageID"];
- [[RNFBRCTEventEmitter shared] sendEventWithName:@"messaging_message_send_error" body:@{
- @"messageId": messageID,
- @"error": @{
- @"code": @"unknown",
- @"message": error.localizedDescription
- }
- }];
-}
-
-// Firebase Messaging
-// JS -> `onMessageSent`
-- (void)messaging_onSendSuccessNotification:(NSNotification *)notification {
- NSDictionary *userInfo = notification.userInfo;
- NSString *messageID = (NSString *) userInfo[@"messageID"];
- [[RNFBRCTEventEmitter shared] sendEventWithName:@"messaging_message_sent" body:@{
- @"messageId": messageID
- }];
-}
-
-// Firebase Messaging
-// JS -> `onDeletedMessages`
-- (void)messaging_onDeletedMessagesNotification {
- [[RNFBRCTEventEmitter shared] sendEventWithName:@"messaging_message_deleted" body:@{}];
-}
-
#pragma mark -
#pragma mark Application Notifications
diff --git a/packages/messaging/ios/RNFBMessaging/RNFBMessagingModule.m b/packages/messaging/ios/RNFBMessaging/RNFBMessagingModule.m
index 21c7bf7de53..6199d759852 100644
--- a/packages/messaging/ios/RNFBMessaging/RNFBMessagingModule.m
+++ b/packages/messaging/ios/RNFBMessaging/RNFBMessagingModule.m
@@ -306,19 +306,6 @@ - (NSDictionary *)constantsToExport {
}
}
-RCT_EXPORT_METHOD(sendMessage:
- (NSDictionary *) message
- :(RCTPromiseResolveBlock) resolve
- :(RCTPromiseRejectBlock) reject
-) {
- NSString *to = message[@"to"];
- NSNumber *ttl = message[@"ttl"];
- NSDictionary *data = message[@"data"];
- NSString *messageId = message[@"messageId"];
- [[FIRMessaging messaging] sendMessage:data to:to withMessageID:messageId timeToLive:[ttl intValue]];
- resolve(nil);
-}
-
RCT_EXPORT_METHOD(subscribeToTopic:
(NSString *) topic
:(RCTPromiseResolveBlock) resolve
diff --git a/packages/messaging/ios/RNFBMessaging/RNFBMessagingSerializer.h b/packages/messaging/ios/RNFBMessaging/RNFBMessagingSerializer.h
index 344c3df3fb6..8347e076111 100644
--- a/packages/messaging/ios/RNFBMessaging/RNFBMessagingSerializer.h
+++ b/packages/messaging/ios/RNFBMessaging/RNFBMessagingSerializer.h
@@ -23,8 +23,6 @@
+ (NSString *)APNSTokenFromNSData:(NSData *)tokenData;
-+ (NSDictionary *)remoteMessageToDict:(FIRMessagingRemoteMessage *)remoteMessage;
-
+ (NSDictionary *)notificationToDict:(UNNotification *)notification;
+ (NSDictionary *)remoteMessageUserInfoToDict:(NSDictionary *)userInfo;
diff --git a/packages/messaging/ios/RNFBMessaging/RNFBMessagingSerializer.m b/packages/messaging/ios/RNFBMessaging/RNFBMessagingSerializer.m
index d4ceb3c9554..a1c1daf70ca 100644
--- a/packages/messaging/ios/RNFBMessaging/RNFBMessagingSerializer.m
+++ b/packages/messaging/ios/RNFBMessaging/RNFBMessagingSerializer.m
@@ -31,10 +31,6 @@ + (NSString *)APNSTokenFromNSData:(NSData *)tokenData {
return [token copy];
}
-+ (NSDictionary *)remoteMessageToDict:(FIRMessagingRemoteMessage *)remoteMessage {
- return [self remoteMessageUserInfoToDict:remoteMessage.appData];
-}
-
+ (NSDictionary *)notificationToDict:(UNNotification *)notification {
return [self remoteMessageUserInfoToDict:notification.request.content.userInfo];
}
diff --git a/packages/messaging/lib/index.d.ts b/packages/messaging/lib/index.d.ts
index 20fd7a59357..5fa852de757 100644
--- a/packages/messaging/lib/index.d.ts
+++ b/packages/messaging/lib/index.d.ts
@@ -839,6 +839,8 @@ export namespace FirebaseMessagingTypes {
* unsubscribe();
* ```
*
+ * NOTE: Android only
+ *
* @param listener Called when the FCM deletes pending messages.
*/
onDeletedMessages(listener: () => void): () => void;
@@ -859,6 +861,8 @@ export namespace FirebaseMessagingTypes {
* unsubscribe();
* ```
*
+ * NOTE: Android only
+ *
* @param listener Called when the FCM sends the remote message to FCM.
*/
onMessageSent(listener: (messageId: string) => any): () => void;
@@ -880,6 +884,8 @@ export namespace FirebaseMessagingTypes {
* unsubscribe();
* ```
*
+ * NOTE: Android only
+ *
* @param listener
*/
onSendError(listener: (evt: SendErrorEvent) => any): () => void;
@@ -925,6 +931,8 @@ export namespace FirebaseMessagingTypes {
* });
* ```
*
+ * NOTE: Android only
+ *
* @param message A `RemoteMessage` interface.
*/
sendMessage(message: RemoteMessage): Promise;
diff --git a/packages/messaging/lib/index.js b/packages/messaging/lib/index.js
index bdc05fb06b4..afb43850060 100644
--- a/packages/messaging/lib/index.js
+++ b/packages/messaging/lib/index.js
@@ -328,6 +328,9 @@ class FirebaseMessagingModule extends FirebaseModule {
}
sendMessage(remoteMessage) {
+ if (isIOS) {
+ throw new Error(`firebase.messaging().sendMessage() is only supported on Android devices.`);
+ }
let options;
try {
options = remoteMessageOptions(this.app.options.messagingSenderId, remoteMessage);
diff --git a/packages/ml-natural-language/CHANGELOG.md b/packages/ml-natural-language/CHANGELOG.md
deleted file mode 100644
index ffc9612fe13..00000000000
--- a/packages/ml-natural-language/CHANGELOG.md
+++ /dev/null
@@ -1,140 +0,0 @@
-# Change Log
-
-All notable changes to this project will be documented in this file.
-See [Conventional Commits](https://conventionalcommits.org) for commit guidelines.
-
-## [7.4.11](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.4.10...@react-native-firebase/ml-natural-language@7.4.11) (2020-11-10)
-
-**Note:** Version bump only for package @react-native-firebase/ml-natural-language
-
-## [7.4.10](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.4.9...@react-native-firebase/ml-natural-language@7.4.10) (2020-10-30)
-
-**Note:** Version bump only for package @react-native-firebase/ml-natural-language
-
-## [7.4.9](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.4.8...@react-native-firebase/ml-natural-language@7.4.9) (2020-10-16)
-
-**Note:** Version bump only for package @react-native-firebase/ml-natural-language
-
-## [7.4.8](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.4.7...@react-native-firebase/ml-natural-language@7.4.8) (2020-09-30)
-
-**Note:** Version bump only for package @react-native-firebase/ml-natural-language
-
-## [7.4.7](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.4.6...@react-native-firebase/ml-natural-language@7.4.7) (2020-09-30)
-
-### Bug Fixes
-
-- **types:** enable TypeScript libCheck & resolve type conflicts ([#4306](https://github.com/invertase/react-native-firebase/issues/4306)) ([aa8ee8b](https://github.com/invertase/react-native-firebase/commit/aa8ee8b7e83443d2c1664993800e15faf4b59b0e))
-
-## [7.4.6](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.4.5...@react-native-firebase/ml-natural-language@7.4.6) (2020-09-30)
-
-**Note:** Version bump only for package @react-native-firebase/ml-natural-language
-
-## [7.4.5](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.4.4...@react-native-firebase/ml-natural-language@7.4.5) (2020-09-17)
-
-**Note:** Version bump only for package @react-native-firebase/ml-natural-language
-
-## [7.4.4](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.4.3...@react-native-firebase/ml-natural-language@7.4.4) (2020-09-17)
-
-### Bug Fixes
-
-- **ios, podspec:** depend on React-Core instead of React ([#4275](https://github.com/invertase/react-native-firebase/issues/4275)) ([fd1a2be](https://github.com/invertase/react-native-firebase/commit/fd1a2be6b6ab1dec89e5dce1fc237435c3e1d510))
-
-## [7.4.3](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.4.2...@react-native-firebase/ml-natural-language@7.4.3) (2020-09-11)
-
-**Note:** Version bump only for package @react-native-firebase/ml-natural-language
-
-## [7.4.2](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.4.1...@react-native-firebase/ml-natural-language@7.4.2) (2020-08-28)
-
-**Note:** Version bump only for package @react-native-firebase/ml-natural-language
-
-## [7.4.1](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.4.0...@react-native-firebase/ml-natural-language@7.4.1) (2020-08-26)
-
-**Note:** Version bump only for package @react-native-firebase/ml-natural-language
-
-# [7.4.0](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.3.2...@react-native-firebase/ml-natural-language@7.4.0) (2020-08-26)
-
-### Features
-
-- bump firebase sdk versions, add GoogleApi dep, use Android API29 ([#4122](https://github.com/invertase/react-native-firebase/issues/4122)) ([728f418](https://github.com/invertase/react-native-firebase/commit/728f41863832d21230c6eb1f55385284fef03c09))
-
-## [7.3.2](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.3.1...@react-native-firebase/ml-natural-language@7.3.2) (2020-08-15)
-
-**Note:** Version bump only for package @react-native-firebase/ml-natural-language
-
-## [7.3.1](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.3.0...@react-native-firebase/ml-natural-language@7.3.1) (2020-08-03)
-
-**Note:** Version bump only for package @react-native-firebase/ml-natural-language
-
-# [7.3.0](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.2.2...@react-native-firebase/ml-natural-language@7.3.0) (2020-08-03)
-
-### Features
-
-- use latest android & ios Firebase SDKs version ([#3956](https://github.com/invertase/react-native-firebase/issues/3956)) ([e7b4bb3](https://github.com/invertase/react-native-firebase/commit/e7b4bb31b05985c044b1f01625a43e364bb653ef))
-
-## [7.2.2](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.2.1...@react-native-firebase/ml-natural-language@7.2.2) (2020-07-09)
-
-**Note:** Version bump only for package @react-native-firebase/ml-natural-language
-
-## [7.2.1](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.2.0...@react-native-firebase/ml-natural-language@7.2.1) (2020-07-07)
-
-**Note:** Version bump only for package @react-native-firebase/ml-natural-language
-
-# [7.2.0](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.1.8...@react-native-firebase/ml-natural-language@7.2.0) (2020-07-07)
-
-### Features
-
-- **android,ios:** upgrade native SDK versions ([#3881](https://github.com/invertase/react-native-firebase/issues/3881)) ([6cb68a8](https://github.com/invertase/react-native-firebase/commit/6cb68a8ea808392fac3a28bdb1a76049c7b52e86))
-
-## [7.1.8](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.1.7...@react-native-firebase/ml-natural-language@7.1.8) (2020-07-05)
-
-**Note:** Version bump only for package @react-native-firebase/ml-natural-language
-
-## [7.1.7](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.1.6...@react-native-firebase/ml-natural-language@7.1.7) (2020-06-30)
-
-**Note:** Version bump only for package @react-native-firebase/ml-natural-language
-
-## [7.1.6](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.1.5...@react-native-firebase/ml-natural-language@7.1.6) (2020-06-26)
-
-**Note:** Version bump only for package @react-native-firebase/ml-natural-language
-
-## [7.1.5](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.1.4...@react-native-firebase/ml-natural-language@7.1.5) (2020-06-22)
-
-**Note:** Version bump only for package @react-native-firebase/ml-natural-language
-
-## [7.1.4](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.1.3...@react-native-firebase/ml-natural-language@7.1.4) (2020-06-10)
-
-**Note:** Version bump only for package @react-native-firebase/ml-natural-language
-
-## [7.1.3](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.1.2...@react-native-firebase/ml-natural-language@7.1.3) (2020-06-03)
-
-**Note:** Version bump only for package @react-native-firebase/ml-natural-language
-
-## [7.1.2](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.1.1...@react-native-firebase/ml-natural-language@7.1.2) (2020-05-29)
-
-**Note:** Version bump only for package @react-native-firebase/ml-natural-language
-
-## [7.1.1](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.1.0...@react-native-firebase/ml-natural-language@7.1.1) (2020-05-29)
-
-**Note:** Version bump only for package @react-native-firebase/ml-natural-language
-
-# [7.1.0](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.0.1...@react-native-firebase/ml-natural-language@7.1.0) (2020-05-22)
-
-### Features
-
-- update native Firebase SDK versions ([#3663](https://github.com/invertase/react-native-firebase/issues/3663)) ([4db9dbc](https://github.com/invertase/react-native-firebase/commit/4db9dbc3ec20bf96de0efad15000f00b41e4a799))
-
-## [7.0.1](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.0.0...@react-native-firebase/ml-natural-language@7.0.1) (2020-05-13)
-
-**Note:** Version bump only for package @react-native-firebase/ml-natural-language
-
-## [7.0.0](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.0.0...@react-native-firebase/ml-natural-language@7.0.0) (2020-05-13)
-
-- feat!: all packages should depend on core (#3613) ([252a423](https://github.com/invertase/react-native-firebase/commit/252a4239e98a0f2a55c4afcd2d82e4d5f97e65e9)), closes [#3613](https://github.com/invertase/react-native-firebase/issues/3613)
-
-### Features
-
-- **ios:** podspecs now utilize CoreOnly instead of Core ([#3575](https://github.com/invertase/react-native-firebase/issues/3575)) ([35285f1](https://github.com/invertase/react-native-firebase/commit/35285f1655b16d05e6630fc556f95cccfb707ee4))
-
-### BREAKING CHANGES
-
-- breaking change to mark new internal versioning requirements.
diff --git a/packages/ml-natural-language/README.md b/packages/ml-natural-language/README.md
deleted file mode 100644
index eb42e7da451..00000000000
--- a/packages/ml-natural-language/README.md
+++ /dev/null
@@ -1,31 +0,0 @@
-
-
-
-
-
React Native Firebase - ML Kit Natural Language
-
-
----
-
-# DEPRECATED
-
-This package is deprecated and should no longer be used.
-
-Google has split mobile machine learning functionality into two pieces:
-
-1. "On-Device" inferences - this will be handled via the standalone ["Google ML Kit"](https://developers.google.com/ml-kit) libraries, and the related [`react-native-mlkit`](https://github.com/invertase/react-native-mlkit) package. This includes any APIs where the device uses a local model to make inferences
-
-1. "Cloud" inferences - these will continue in Firebase, but are now in the ["Firebase ML"](https://firebase.google.com/docs/ml) library, and will be available from the new consolidated `@react-native-firebase/ml` package
-
-More information on the transition is available here: https://firebase.google.com/docs/ml#cloud_vs_on-device
-
----
-
-
-
-
- Built and maintained with 💛 by Invertase .
-
-
-
----
diff --git a/packages/ml-natural-language/RNFBMLNaturalLanguage.podspec b/packages/ml-natural-language/RNFBMLNaturalLanguage.podspec
deleted file mode 100644
index 761e4233690..00000000000
--- a/packages/ml-natural-language/RNFBMLNaturalLanguage.podspec
+++ /dev/null
@@ -1,60 +0,0 @@
-require 'json'
-require '../app/firebase_json'
-package = JSON.parse(File.read(File.join(__dir__, 'package.json')))
-appPackage = JSON.parse(File.read(File.join('..', 'app', 'package.json')))
-
-coreVersionDetected = appPackage['version']
-coreVersionRequired = package['peerDependencies'][appPackage['name']]
-firebase_sdk_version = appPackage['sdkVersions']['ios']['firebase']
-if coreVersionDetected != coreVersionRequired
- Pod::UI.warn "NPM package '#{package['name']}' depends on '#{appPackage['name']}' v#{coreVersionRequired} but found v#{coreVersionDetected}, this might cause build issues or runtime crashes."
-end
-
-Pod::Spec.new do |s|
- s.name = "RNFBMLNaturalLanguage"
- s.version = package["version"]
- s.description = package["description"]
- s.summary = <<-DESC
- A well tested feature rich Firebase implementation for React Native, supporting iOS & Android.
- DESC
- s.homepage = "http://invertase.io/oss/react-native-firebase"
- s.license = package['license']
- s.authors = "Invertase Limited"
- s.source = { :git => "https://github.com/invertase/react-native-firebase.git", :tag => "v#{s.version}" }
- s.social_media_url = 'http://twitter.com/invertaseio'
- s.ios.deployment_target = "9.0"
- s.source_files = 'ios/**/*.{h,m}'
-
- # React Native dependencies
- s.dependency 'React-Core'
- s.dependency 'RNFBApp'
-
- if defined?($FirebaseSDKVersion)
- Pod::UI.puts "#{s.name}: Using user specified Firebase SDK version '#{$FirebaseSDKVersion}'"
- firebase_sdk_version = $FirebaseSDKVersion
- end
-
- # Firebase dependencies
- s.dependency 'Firebase/MLNaturalLanguage', firebase_sdk_version
-
- if FirebaseJSON::Config.get_value_or_default('ml_natural_language_language_id_model', false)
- s.dependency 'Firebase/MLNLLanguageID', firebase_sdk_version
- end
-
- # ignore until after v6 release, add support in a feature release
- # if FirebaseJSON::Config.get_value_or_default('ml_natural_language_translate_model', false)
- # s.dependency 'Firebase/MLNLTranslate', firebase_sdk_version
- # end
-
- if FirebaseJSON::Config.get_value_or_default('ml_natural_language_smart_reply_model', false)
- s.dependency 'Firebase/MLCommon', firebase_sdk_version
- s.dependency 'Firebase/MLNLSmartReply', firebase_sdk_version
- end
-
- if defined?($RNFirebaseAsStaticFramework)
- Pod::UI.puts "#{s.name}: Using overridden static_framework value of '#{$RNFirebaseAsStaticFramework}'"
- s.static_framework = $RNFirebaseAsStaticFramework
- else
- s.static_framework = false
- end
-end
diff --git a/packages/ml-natural-language/android/build.gradle b/packages/ml-natural-language/android/build.gradle
deleted file mode 100644
index 4d50f2f91a0..00000000000
--- a/packages/ml-natural-language/android/build.gradle
+++ /dev/null
@@ -1,105 +0,0 @@
-import io.invertase.gradle.common.PackageJson
-
-buildscript {
- // The Android Gradle plugin is only required when opening the android folder stand-alone.
- // This avoids unnecessary downloads and potential conflicts when the library is included as a
- // module dependency in an application project.
- if (project == rootProject) {
- repositories {
- google()
- jcenter()
- }
-
- dependencies {
- classpath("com.android.tools.build:gradle:4.0.1")
- }
- }
-}
-
-plugins {
- id "io.invertase.gradle.build" version "1.4"
-}
-
-def appProject
-if (findProject(':@react-native-firebase_app')) {
- appProject = project(':@react-native-firebase_app')
-} else if (findProject(':react-native-firebase_app')) {
- appProject = project(':react-native-firebase_app')
-} else {
- throw new GradleException('Could not find the react-native-firebase/app package, have you installed it?')
-}
-def packageJson = PackageJson.getForProject(project)
-def appPackageJson = PackageJson.getForProject(appProject)
-def firebaseBomVersion = appPackageJson['sdkVersions']['android']['firebase']
-def jsonMinSdk = appPackageJson['sdkVersions']['android']['minSdk']
-def jsonTargetSdk = appPackageJson['sdkVersions']['android']['targetSdk']
-def jsonCompileSdk = appPackageJson['sdkVersions']['android']['compileSdk']
-def jsonBuildTools = appPackageJson['sdkVersions']['android']['buildTools']
-def coreVersionDetected = appPackageJson['version']
-def coreVersionRequired = packageJson['peerDependencies'][appPackageJson['name']]
-// Only log after build completed so log warning appears at the end
-if (coreVersionDetected != coreVersionRequired) {
- gradle.buildFinished {
- project.logger.warn("ReactNativeFirebase WARNING: NPM package '${packageJson['name']}' depends on '${appPackageJson['name']}' v${coreVersionRequired} but found v${coreVersionDetected}, this might cause build issues or runtime crashes.")
- }
-}
-
-project.ext {
- set('react-native', [
- versions: [
- android : [
- minSdk : jsonMinSdk,
- targetSdk : jsonTargetSdk,
- compileSdk: jsonCompileSdk,
- // optional as gradle.buildTools comes with one by default
- // overriding here though to match the version RN uses
- buildTools: jsonBuildTools
- ],
-
- firebase: [
- bom: firebaseBomVersion,
- ],
- ],
- ])
-}
-
-android {
- defaultConfig {
- multiDexEnabled true
- }
- aaptOptions {
- noCompress "tflite"
- }
- lintOptions {
- disable 'GradleCompatible'
- abortOnError false
- }
- compileOptions {
- sourceCompatibility JavaVersion.VERSION_1_8
- targetCompatibility JavaVersion.VERSION_1_8
- }
-
- sourceSets {
- main {
- java.srcDirs = ['src/main/java', 'src/reactnative/java']
- }
- }
-}
-
-repositories {
- google()
- jcenter()
-}
-
-dependencies {
- api appProject
- implementation platform("com.google.firebase:firebase-bom:${ReactNative.ext.getVersion("firebase", "bom")}")
- implementation "com.google.firebase:firebase-ml-natural-language"
-}
-
-apply from: file("./ml-models.gradle")
-
-ReactNative.shared.applyPackageVersion()
-ReactNative.shared.applyDefaultExcludes()
-ReactNative.module.applyAndroidVersions()
-ReactNative.module.applyReactNativeDependency("api")
diff --git a/packages/ml-natural-language/android/ml-models.gradle b/packages/ml-natural-language/android/ml-models.gradle
deleted file mode 100644
index f4a3786ccb6..00000000000
--- a/packages/ml-natural-language/android/ml-models.gradle
+++ /dev/null
@@ -1,23 +0,0 @@
-apply from: file("./../../app/android/firebase-json.gradle")
-
-def mlModels = [
- // TODO not available on iOS until SDK 6.0.0
- // 'ml_natural_language_translate_model',
- 'ml_natural_language_language_id_model',
- 'ml_natural_language_smart_reply_model',
-]
-
-dependencies {
- if (rootProject.ext && rootProject.ext.firebaseJson) {
- mlModels.each { modelFlag ->
- if (rootProject.ext.firebaseJson.isFlagEnabled(modelFlag) == true) {
- rootProject.logger.info ":${project.name} model enabled: '${modelFlag}'"
- implementation "com.google.firebase:firebase-${modelFlag.replaceAll("_", "-")}"
- } else {
- rootProject.logger.warn ":${project.name} model disabled: '${modelFlag}'"
- }
- }
- } else {
- rootProject.logger.warn ":${project.name} skipping optional models as no firebaseJson extension found, you may be missing a firebase.json file in the root of your React Native project, or you've not installed the @react-native-firebase/app package and included it in your app build."
- }
-}
diff --git a/packages/ml-natural-language/android/settings.gradle b/packages/ml-natural-language/android/settings.gradle
deleted file mode 100644
index 9c9c7705f1d..00000000000
--- a/packages/ml-natural-language/android/settings.gradle
+++ /dev/null
@@ -1 +0,0 @@
-rootProject.name = '@react-native-firebase_ml-natural-language'
diff --git a/packages/ml-natural-language/android/src/main/AndroidManifest.xml b/packages/ml-natural-language/android/src/main/AndroidManifest.xml
deleted file mode 100644
index cc9b0e0efef..00000000000
--- a/packages/ml-natural-language/android/src/main/AndroidManifest.xml
+++ /dev/null
@@ -1,2 +0,0 @@
-
-
diff --git a/packages/ml-natural-language/android/src/main/java/io/invertase/firebase/ml/naturallanguage/UniversalFirebaseMLNaturalLanguageCommon.java b/packages/ml-natural-language/android/src/main/java/io/invertase/firebase/ml/naturallanguage/UniversalFirebaseMLNaturalLanguageCommon.java
deleted file mode 100644
index 60d7c326bb0..00000000000
--- a/packages/ml-natural-language/android/src/main/java/io/invertase/firebase/ml/naturallanguage/UniversalFirebaseMLNaturalLanguageCommon.java
+++ /dev/null
@@ -1,96 +0,0 @@
-package io.invertase.firebase.ml.naturallanguage;
-
-import com.google.firebase.ml.common.FirebaseMLException;
-
-import javax.annotation.Nullable;
-
-public class UniversalFirebaseMLNaturalLanguageCommon {
-
- static String[] getErrorCodeAndMessageFromException(@Nullable Exception possibleMLException) {
- String code = "unknown";
- String message = "An unknown error has occurred.";
-
- if (possibleMLException != null) {
- message = possibleMLException.getMessage();
- if (possibleMLException instanceof FirebaseMLException) {
- FirebaseMLException mlException = (FirebaseMLException) possibleMLException;
- switch (mlException.getCode()) {
- case FirebaseMLException.ABORTED:
- code = "aborted";
- message = "The operation was aborted, typically due to a concurrency issue like transaction aborts, etc.";
- break;
- case FirebaseMLException.ALREADY_EXISTS:
- code = "already-exists";
- message = "Some resource that we attempted to create already exists.";
- break;
- case FirebaseMLException.CANCELLED:
- code = "cancelled";
- message = "The operation was cancelled (typically by the caller).";
- break;
- case FirebaseMLException.DATA_LOSS:
- code = "data-loss";
- message = "Unrecoverable data loss or corruption.";
- break;
- case FirebaseMLException.DEADLINE_EXCEEDED:
- code = "deadline-exceeded";
- message = "Deadline expired before operation could complete.";
- break;
- case FirebaseMLException.FAILED_PRECONDITION:
- code = "failed-precondition";
- message = "Operation was rejected because the system is not in a state required for the operation's execution.";
- break;
- case FirebaseMLException.INTERNAL:
- code = "internal";
- message = "Internal errors.";
- break;
- case FirebaseMLException.INVALID_ARGUMENT:
- code = "invalid-argument";
- message = "Client specified an invalid argument.";
- break;
- case FirebaseMLException.MODEL_HASH_MISMATCH:
- code = "model-hash-mismatch";
- message = "The downloaded model's hash doesn't match the expected value.";
- break;
- case FirebaseMLException.MODEL_INCOMPATIBLE_WITH_TFLITE:
- code = "model-incompatible-with-tflite";
- message = "The downloaded model isn't compatible with the TFLite runtime.";
- break;
- case FirebaseMLException.NOT_ENOUGH_SPACE:
- code = "not-enough-space";
- message = "There is not enough space left on the device.";
- break;
- case FirebaseMLException.NOT_FOUND:
- code = "not-found";
- message = "Some requested resource was not found.";
- break;
- case FirebaseMLException.OUT_OF_RANGE:
- code = "out-of-range";
- message = "Operation was attempted past the valid range.";
- break;
- case FirebaseMLException.PERMISSION_DENIED:
- code = "permission-denied";
- message = "The caller does not have permission to execute the specified operation.";
- break;
- case FirebaseMLException.RESOURCE_EXHAUSTED:
- code = "resource-exhausted";
- message = "Some resource has been exhausted, perhaps a per-user quota, or perhaps the entire file system is out of space.";
- break;
- case FirebaseMLException.UNAUTHENTICATED:
- code = "unauthenticated";
- message = "The request does not have valid authentication credentials for the operation.";
- break;
- case FirebaseMLException.UNAVAILABLE:
- code = "unavailable";
- message = "The service is currently unavailable.";
- break;
- case FirebaseMLException.UNIMPLEMENTED:
- code = "unimplemented";
- message = "Operation is not implemented or not supported/enabled.";
- break;
- }
- }
- }
-
- return new String[]{code, message, possibleMLException != null ? possibleMLException.getMessage() : ""};
- }
-}
diff --git a/packages/ml-natural-language/android/src/main/java/io/invertase/firebase/ml/naturallanguage/UniversalFirebaseMLNaturalLanguageIdModule.java b/packages/ml-natural-language/android/src/main/java/io/invertase/firebase/ml/naturallanguage/UniversalFirebaseMLNaturalLanguageIdModule.java
deleted file mode 100644
index f87b72236b6..00000000000
--- a/packages/ml-natural-language/android/src/main/java/io/invertase/firebase/ml/naturallanguage/UniversalFirebaseMLNaturalLanguageIdModule.java
+++ /dev/null
@@ -1,126 +0,0 @@
-package io.invertase.firebase.ml.naturallanguage;
-
-/*
- * Copyright (c) 2016-present Invertase Limited & Contributors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this library except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-import android.content.Context;
-import android.os.Bundle;
-
-import com.google.android.gms.tasks.Task;
-import com.google.android.gms.tasks.Tasks;
-import com.google.firebase.FirebaseApp;
-import com.google.firebase.ml.naturallanguage.FirebaseNaturalLanguage;
-import com.google.firebase.ml.naturallanguage.languageid.FirebaseLanguageIdentification;
-import com.google.firebase.ml.naturallanguage.languageid.FirebaseLanguageIdentificationOptions;
-import com.google.firebase.ml.naturallanguage.languageid.IdentifiedLanguage;
-import com.google.firebase.ml.naturallanguage.translate.FirebaseTranslateLanguage;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import io.invertase.firebase.common.UniversalFirebaseModule;
-
-@SuppressWarnings("WeakerAccess")
-class UniversalFirebaseMLNaturalLanguageIdModule extends UniversalFirebaseModule {
-
- UniversalFirebaseMLNaturalLanguageIdModule(Context context, String serviceName) {
- super(context, serviceName);
- }
-
- /**
- * @url https://firebase.google.com/docs/reference/android/com/google/firebase/ml/naturallanguage/languageid/FirebaseLanguageIdentification.html#identifyLanguage(java.lang.String)
- */
- public Task identifyLanguage(
- String appName,
- String text,
- Bundle identificationOptionsBundle
- ) {
- return Tasks.call(getExecutor(), () -> {
- FirebaseApp firebaseApp = FirebaseApp.getInstance(appName);
- FirebaseNaturalLanguage naturalLanguage = FirebaseNaturalLanguage.getInstance(firebaseApp);
-
- FirebaseLanguageIdentificationOptions identificationOptions = getOptions(
- identificationOptionsBundle
- );
-
- FirebaseLanguageIdentification languageIdentification = naturalLanguage.getLanguageIdentification(
- identificationOptions);
-
- return Tasks.await(languageIdentification.identifyLanguage(text));
- });
- }
-
- /**
- * @url https://firebase.google.com/docs/reference/android/com/google/firebase/ml/naturallanguage/languageid/FirebaseLanguageIdentification.html#identifyPossibleLanguages(java.lang.String)
- */
- public Task> identifyPossibleLanguages(
- String appName,
- String text,
- Bundle identificationOptionsBundle
- ) {
- return Tasks.call(getExecutor(), () -> {
- FirebaseApp firebaseApp = FirebaseApp.getInstance(appName);
- FirebaseNaturalLanguage naturalLanguage = FirebaseNaturalLanguage.getInstance(firebaseApp);
- FirebaseLanguageIdentificationOptions identificationOptions = getOptions(
- identificationOptionsBundle
- );
- FirebaseLanguageIdentification languageIdentification = naturalLanguage.getLanguageIdentification(
- identificationOptions);
-
- List languagesRaw = Tasks.await(languageIdentification.identifyPossibleLanguages(
- text));
-
- List formattedLanguages = new ArrayList<>(languagesRaw.size());
-
-
- for (IdentifiedLanguage identifiedLanguage : languagesRaw) {
- Bundle formattedLanguage = new Bundle(2);
- formattedLanguage.putString("language", identifiedLanguage.getLanguageCode());
- formattedLanguage.putFloat("confidence", identifiedLanguage.getConfidence());
- formattedLanguages.add(formattedLanguage);
- }
-
- return formattedLanguages;
- });
-
- }
-
- /**
- * @url https://firebase.google.com/docs/reference/android/com/google/firebase/ml/naturallanguage/languageid/FirebaseLanguageIdentificationOptions.html
- */
- private FirebaseLanguageIdentificationOptions getOptions(
- Bundle identificationOptionsBundle
- ) {
- boolean multipleLanguages = identificationOptionsBundle.containsKey("multipleLanguages");
- FirebaseLanguageIdentificationOptions.Builder optionsBuilder = new FirebaseLanguageIdentificationOptions.Builder();
-
- if (identificationOptionsBundle.containsKey("confidenceThreshold")) {
- optionsBuilder.setConfidenceThreshold((float) identificationOptionsBundle.getDouble(
- "confidenceThreshold"));
- } else {
- if (!multipleLanguages) {
- optionsBuilder.setConfidenceThreshold(FirebaseLanguageIdentification.DEFAULT_IDENTIFY_LANGUAGE_CONFIDENCE_THRESHOLD);
- } else {
- optionsBuilder.setConfidenceThreshold(FirebaseLanguageIdentification.DEFAULT_IDENTIFY_POSSIBLE_LANGUAGES_CONFIDENCE_THRESHOLD);
- }
- }
-
- return optionsBuilder.build();
- }
-}
diff --git a/packages/ml-natural-language/android/src/main/java/io/invertase/firebase/ml/naturallanguage/UniversalFirebaseMLNaturalLanguageSmartReplyModule.java b/packages/ml-natural-language/android/src/main/java/io/invertase/firebase/ml/naturallanguage/UniversalFirebaseMLNaturalLanguageSmartReplyModule.java
deleted file mode 100644
index ff2ecb3daee..00000000000
--- a/packages/ml-natural-language/android/src/main/java/io/invertase/firebase/ml/naturallanguage/UniversalFirebaseMLNaturalLanguageSmartReplyModule.java
+++ /dev/null
@@ -1,108 +0,0 @@
-package io.invertase.firebase.ml.naturallanguage;
-
-/*
- * Copyright (c) 2016-present Invertase Limited & Contributors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this library except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-import android.content.Context;
-import android.os.Bundle;
-import com.google.android.gms.tasks.Task;
-import com.google.android.gms.tasks.Tasks;
-import com.google.firebase.FirebaseApp;
-import com.google.firebase.ml.naturallanguage.FirebaseNaturalLanguage;
-import com.google.firebase.ml.naturallanguage.smartreply.FirebaseTextMessage;
-import com.google.firebase.ml.naturallanguage.smartreply.SmartReplySuggestion;
-import com.google.firebase.ml.naturallanguage.smartreply.SmartReplySuggestionResult;
-import io.invertase.firebase.common.UniversalFirebaseModule;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-
-@SuppressWarnings({"WeakerAccess", "UnusedReturnValue"})
-class UniversalFirebaseMLNaturalLanguageSmartReplyModule extends UniversalFirebaseModule {
- UniversalFirebaseMLNaturalLanguageSmartReplyModule(Context context, String serviceName) {
- super(context, serviceName);
- }
-
- @Override
- public void onTearDown() {
- super.onTearDown();
- }
-
- @SuppressWarnings("unchecked")
- private List buildFirebaseTextMessagesList(List messages) {
- List firebaseTextMessages = new ArrayList<>(messages.size());
-
- for (Object message : messages) {
- Map messageMap = (Map) message;
-
- Boolean isLocalUser = (Boolean) messageMap.get("isLocalUser");
- long timestamp = (long) ((double) messageMap.get("timestamp"));
- String text = (String) messageMap.get("text");
-
- if (isLocalUser) {
- firebaseTextMessages.add(
- FirebaseTextMessage.createForLocalUser(
- text,
- timestamp
- )
- );
- } else {
- firebaseTextMessages.add(
- FirebaseTextMessage.createForRemoteUser(
- text,
- timestamp,
- (String) messageMap.get("userId")
- )
- );
- }
- }
-
- return firebaseTextMessages;
- }
-
- /**
- * @url https://firebase.google.com/docs/reference/android/com/google/firebase/ml/naturallanguage/smartreply/FirebaseSmartReply.html#public-tasksmartreplysuggestionresultsuggestreplieslistfirebasetextmessage-textmessages
- */
- public Task> suggestReplies(String appName, List messages) {
- return Tasks.call(getExecutor(), () -> {
- List firebaseTextMessages = buildFirebaseTextMessagesList(messages);
- FirebaseNaturalLanguage instance = FirebaseNaturalLanguage.getInstance(FirebaseApp.getInstance(appName));
-
- SmartReplySuggestionResult suggestionResult = Tasks.await(
- instance.getSmartReply().suggestReplies(firebaseTextMessages)
- );
-
- if (suggestionResult == null) return new ArrayList<>(0);
-
- List suggestedRepliesListRaw = suggestionResult.getSuggestions();
- List suggestedRepliesListFormatted = new ArrayList<>(
- suggestedRepliesListRaw.size());
-
-
- for (SmartReplySuggestion suggestedReplyRaw : suggestedRepliesListRaw) {
- Bundle suggestReplyFormatted = new Bundle(2);
- suggestReplyFormatted.putString("text", suggestedReplyRaw.getText());
- // TODO no longer exists - undocumented breaking change
- // suggestReplyFormatted.putFloat("confidence", suggestedReplyRaw.getConfidence());
- suggestedRepliesListFormatted.add(suggestReplyFormatted);
- }
-
- return suggestedRepliesListFormatted;
- });
- }
-}
diff --git a/packages/ml-natural-language/android/src/main/java/io/invertase/firebase/ml/naturallanguage/UniversalFirebaseMLNaturalLanguageTranslateModule.java b/packages/ml-natural-language/android/src/main/java/io/invertase/firebase/ml/naturallanguage/UniversalFirebaseMLNaturalLanguageTranslateModule.java
deleted file mode 100644
index 5377067c3ec..00000000000
--- a/packages/ml-natural-language/android/src/main/java/io/invertase/firebase/ml/naturallanguage/UniversalFirebaseMLNaturalLanguageTranslateModule.java
+++ /dev/null
@@ -1,158 +0,0 @@
-package io.invertase.firebase.ml.naturallanguage;
-
-/*
- * Copyright (c) 2016-present Invertase Limited & Contributors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this library except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-import android.content.Context;
-import io.invertase.firebase.common.UniversalFirebaseModule;
-
-@SuppressWarnings("WeakerAccess")
-class UniversalFirebaseMLNaturalLanguageTranslateModule extends UniversalFirebaseModule {
- UniversalFirebaseMLNaturalLanguageTranslateModule(Context context, String serviceName) {
- super(context, serviceName);
- }
-
- // TODO not available on iOS until SDK 6.0.0
-// /**
-// * @url No reference documentation yet...
-// */
-// public Task translate(String appName, String text, Bundle translationOptionsMap) {
-// return Tasks.call(getExecutor(), () -> {
-// FirebaseApp firebaseApp = FirebaseApp.getInstance(appName);
-// FirebaseNaturalLanguage naturalLanguage = FirebaseNaturalLanguage.getInstance(firebaseApp);
-// FirebaseTranslatorOptions translatorOptions = getOptions(translationOptionsMap);
-// FirebaseTranslator translator = naturalLanguage.getTranslator(translatorOptions);
-// return Tasks.await(translator.translate(text));
-// });
-// }
-//
-// /**
-// * @url No reference documentation yet...
-// */
-// public Task>> modelManagerGetAvailableModels(String appName) {
-// return Tasks.call(getExecutor(), () -> {
-// FirebaseApp firebaseApp = FirebaseApp.getInstance(appName);
-// FirebaseTranslateModelManager translateModelManager = FirebaseTranslateModelManager.getInstance();
-// Set modelsRaw = Tasks.await(translateModelManager.getAvailableModels(
-// firebaseApp));
-//
-// List> modelsArray = new ArrayList<>(modelsRaw.size());
-// for (FirebaseTranslateRemoteModel modelRaw : modelsRaw) {
-// Map modelMap = new HashMap<>();
-// modelMap.put("language", modelRaw.getLanguage());
-// modelMap.put("languageCode", modelRaw.getLanguageCode());
-// modelMap.put("backendModelName", modelRaw.getModelNameForBackend());
-// modelMap.put("persistUniqueModelName", modelRaw.getUniqueModelNameForPersist());
-// modelsArray.add(modelMap);
-// }
-//
-// return modelsArray;
-// });
-// }
-//
-// /**
-// * @url No reference documentation yet...
-// */
-// public Task modelManagerDeleteDownloadedModel(String appName, int language) {
-// return Tasks.call(getExecutor(), () -> {
-// FirebaseApp firebaseApp = FirebaseApp.getInstance(appName);
-// FirebaseTranslateModelManager translateModelManager = FirebaseTranslateModelManager.getInstance();
-// FirebaseTranslateRemoteModel model = new FirebaseTranslateRemoteModel.Builder(language)
-// .setFirebaseApp(firebaseApp)
-// .build();
-// Tasks.await(translateModelManager.deleteDownloadedModel(model));
-// return null;
-// });
-// }
-//
-// /**
-// * @url No reference documentation yet...
-// */
-// public Task modelManagerDownloadRemoteModelIfNeeded(
-// String appName,
-// int language,
-// Bundle downloadConditionsBundle
-// ) {
-// return Tasks.call(getExecutor(), () -> {
-// FirebaseApp firebaseApp = FirebaseApp.getInstance(appName);
-// FirebaseTranslateModelManager translateModelManager = FirebaseTranslateModelManager.getInstance();
-// FirebaseModelDownloadConditions downloadConditions = getDownloadConditions(
-// downloadConditionsBundle);
-// FirebaseTranslateRemoteModel model = new FirebaseTranslateRemoteModel.Builder(language)
-// .setDownloadConditions(downloadConditions)
-// .setFirebaseApp(firebaseApp)
-// .build();
-// Tasks.await(translateModelManager.downloadRemoteModelIfNeeded(model));
-// return null;
-// });
-// }
-//
-// private FirebaseModelDownloadConditions getDownloadConditions(Bundle downloadConditionsBundle) {
-// FirebaseModelDownloadConditions.Builder conditionsBuilder = new FirebaseModelDownloadConditions.Builder();
-//
-// if (downloadConditionsBundle.containsKey("requireCharging") && downloadConditionsBundle.getBoolean(
-// "requireCharging")) {
-// if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
-// conditionsBuilder.requireCharging();
-// }
-// }
-//
-// if (downloadConditionsBundle.containsKey("requireDeviceIdle") && downloadConditionsBundle.getBoolean(
-// "requireDeviceIdle")) {
-// if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
-// conditionsBuilder.requireDeviceIdle();
-// }
-// }
-//
-// if (downloadConditionsBundle.containsKey("requireWifi") && downloadConditionsBundle.getBoolean(
-// "requireWifi")) {
-// conditionsBuilder.requireWifi();
-// }
-//
-// return conditionsBuilder.build();
-// }
-//
-// private FirebaseTranslatorOptions getOptions(Bundle translationOptionsBundle) {
-// FirebaseTranslatorOptions.Builder optionsBuilder = new FirebaseTranslatorOptions.Builder();
-//
-// if (translationOptionsBundle.containsKey("sourceLanguage")) {
-// optionsBuilder.setSourceLanguage((int) ((double) translationOptionsBundle.get("sourceLanguage")));
-// } else {
-// optionsBuilder.setSourceLanguage(FirebaseTranslateLanguage.EN);
-// }
-//
-// if (translationOptionsBundle.containsKey("targetLanguage")) {
-// optionsBuilder.setTargetLanguage((int) ((double) translationOptionsBundle.get("targetLanguage")));
-// } else {
-// optionsBuilder.setTargetLanguage(FirebaseTranslateLanguage.EN);
-// }
-//
-// return optionsBuilder.build();
-// }
-//
-// @Override
-// public Map getConstants() {
-// Map constantsMap = new HashMap<>();
-// Map languagesMap = new HashMap<>();
-// Set languages = FirebaseTranslateLanguage.getAllLanguages();
-// for (Integer language : languages) {
-// languagesMap.put(FirebaseTranslateLanguage.languageCodeForLanguage(language), language);
-// }
-// constantsMap.put("TRANSLATE_LANGUAGES", languagesMap);
-// return constantsMap;
-// }
-}
diff --git a/packages/ml-natural-language/android/src/reactnative/AndroidManifest.xml b/packages/ml-natural-language/android/src/reactnative/AndroidManifest.xml
deleted file mode 100644
index cc9b0e0efef..00000000000
--- a/packages/ml-natural-language/android/src/reactnative/AndroidManifest.xml
+++ /dev/null
@@ -1,2 +0,0 @@
-
-
diff --git a/packages/ml-natural-language/android/src/reactnative/java/io/invertase/firebase/ml/naturallanguage/RNFirebaseMLNaturalLanguageIdModule.java b/packages/ml-natural-language/android/src/reactnative/java/io/invertase/firebase/ml/naturallanguage/RNFirebaseMLNaturalLanguageIdModule.java
deleted file mode 100644
index 0fdc2d31a7c..00000000000
--- a/packages/ml-natural-language/android/src/reactnative/java/io/invertase/firebase/ml/naturallanguage/RNFirebaseMLNaturalLanguageIdModule.java
+++ /dev/null
@@ -1,94 +0,0 @@
-package io.invertase.firebase.ml.naturallanguage;
-
-/*
- * Copyright (c) 2016-present Invertase Limited & Contributors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this library except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-import com.facebook.react.bridge.Arguments;
-import com.facebook.react.bridge.Promise;
-import com.facebook.react.bridge.ReactApplicationContext;
-import com.facebook.react.bridge.ReactMethod;
-import com.facebook.react.bridge.ReadableMap;
-
-import java.util.Objects;
-
-import io.invertase.firebase.common.ReactNativeFirebaseModule;
-
-class RNFirebaseMLNaturalLanguageIdModule extends ReactNativeFirebaseModule {
- private static final String SERVICE_NAME = "MLNaturalLanguageId";
- private final UniversalFirebaseMLNaturalLanguageIdModule module;
-
- RNFirebaseMLNaturalLanguageIdModule(ReactApplicationContext reactContext) {
- super(reactContext, SERVICE_NAME);
- this.module = new UniversalFirebaseMLNaturalLanguageIdModule(reactContext, SERVICE_NAME);
- }
-
- /**
- * @url https://firebase.google.com/docs/reference/android/com/google/firebase/ml/naturallanguage/languageid/FirebaseLanguageIdentification.html#identifyLanguage(java.lang.String)
- */
- @ReactMethod
- public void identifyLanguage(
- String appName,
- String text,
- ReadableMap identificationOptionsMap,
- Promise promise
- ) {
- module
- .identifyLanguage(appName, text, Arguments.toBundle(identificationOptionsMap))
- .addOnCompleteListener(task -> {
- if (task.isSuccessful()) {
- promise.resolve(task.getResult());
- } else {
- String[] errorCodeAndMessage = UniversalFirebaseMLNaturalLanguageCommon.getErrorCodeAndMessageFromException(
- task.getException());
- rejectPromiseWithCodeAndMessage(
- promise,
- errorCodeAndMessage[0],
- errorCodeAndMessage[1],
- errorCodeAndMessage[2]
- );
- }
- });
- }
-
- /**
- * @url https://firebase.google.com/docs/reference/android/com/google/firebase/ml/naturallanguage/languageid/FirebaseLanguageIdentification.html#identifyPossibleLanguages(java.lang.String)
- */
- @ReactMethod
- public void identifyPossibleLanguages(
- String appName,
- String text,
- ReadableMap identificationOptionsMap,
- Promise promise
- ) {
- module
- .identifyPossibleLanguages(appName, text, Arguments.toBundle(identificationOptionsMap))
- .addOnCompleteListener(task -> {
- if (task.isSuccessful()) {
- promise.resolve(Arguments.fromList(Objects.requireNonNull(task.getResult())));
- } else {
- String[] errorCodeAndMessage = UniversalFirebaseMLNaturalLanguageCommon.getErrorCodeAndMessageFromException(
- task.getException());
- rejectPromiseWithCodeAndMessage(
- promise,
- errorCodeAndMessage[0],
- errorCodeAndMessage[1],
- errorCodeAndMessage[2]
- );
- }
- });
- }
-}
diff --git a/packages/ml-natural-language/android/src/reactnative/java/io/invertase/firebase/ml/naturallanguage/RNFirebaseMLNaturalLanguageSmartReplyModule.java b/packages/ml-natural-language/android/src/reactnative/java/io/invertase/firebase/ml/naturallanguage/RNFirebaseMLNaturalLanguageSmartReplyModule.java
deleted file mode 100644
index 0b3405ce742..00000000000
--- a/packages/ml-natural-language/android/src/reactnative/java/io/invertase/firebase/ml/naturallanguage/RNFirebaseMLNaturalLanguageSmartReplyModule.java
+++ /dev/null
@@ -1,65 +0,0 @@
-package io.invertase.firebase.ml.naturallanguage;
-
-/*
- * Copyright (c) 2016-present Invertase Limited & Contributors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this library except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-import com.facebook.react.bridge.*;
-import io.invertase.firebase.common.ReactNativeFirebaseModule;
-
-import java.util.Objects;
-
-class RNFirebaseMLNaturalLanguageSmartReplyModule extends ReactNativeFirebaseModule {
- private static final String SERVICE_NAME = "MLNaturalLanguageSmartReply";
- private final UniversalFirebaseMLNaturalLanguageSmartReplyModule module;
-
- RNFirebaseMLNaturalLanguageSmartReplyModule(ReactApplicationContext reactContext) {
- super(reactContext, SERVICE_NAME);
- this.module = new UniversalFirebaseMLNaturalLanguageSmartReplyModule(
- reactContext,
- SERVICE_NAME
- );
- }
-
- @Override
- public void onCatalystInstanceDestroy() {
- super.onCatalystInstanceDestroy();
- module.onTearDown();
- }
-
- /**
- * @url https://firebase.google.com/docs/reference/android/com/google/firebase/ml/naturallanguage/smartreply/FirebaseSmartReply.html#public-tasksmartreplysuggestionresultsuggestreplieslistfirebasetextmessage-textmessages
- */
- @ReactMethod
- public void suggestReplies(String appName, ReadableArray messages, Promise promise) {
- module
- .suggestReplies(appName, messages.toArrayList())
- .addOnCompleteListener(getExecutor(), task -> {
- if (task.isSuccessful()) {
- promise.resolve(Arguments.fromList(Objects.requireNonNull(task.getResult())));
- } else {
- String[] errorCodeAndMessage = UniversalFirebaseMLNaturalLanguageCommon.getErrorCodeAndMessageFromException(
- task.getException());
- rejectPromiseWithCodeAndMessage(
- promise,
- errorCodeAndMessage[0],
- errorCodeAndMessage[1],
- errorCodeAndMessage[2]
- );
- }
- });
- }
-}
diff --git a/packages/ml-natural-language/android/src/reactnative/java/io/invertase/firebase/ml/naturallanguage/RNFirebaseMLNaturalLanguageTranslateModule.java b/packages/ml-natural-language/android/src/reactnative/java/io/invertase/firebase/ml/naturallanguage/RNFirebaseMLNaturalLanguageTranslateModule.java
deleted file mode 100644
index b81b861a47d..00000000000
--- a/packages/ml-natural-language/android/src/reactnative/java/io/invertase/firebase/ml/naturallanguage/RNFirebaseMLNaturalLanguageTranslateModule.java
+++ /dev/null
@@ -1,137 +0,0 @@
-package io.invertase.firebase.ml.naturallanguage;
-
-/*
- * Copyright (c) 2016-present Invertase Limited & Contributors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this library except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-import com.facebook.react.bridge.ReactApplicationContext;
-import io.invertase.firebase.common.ReactNativeFirebaseModule;
-
-class RNFirebaseMLNaturalLanguageTranslateModule extends ReactNativeFirebaseModule {
- private static final String SERVICE_NAME = "MLNaturalLanguageTranslate";
- private final UniversalFirebaseMLNaturalLanguageTranslateModule module;
-
- RNFirebaseMLNaturalLanguageTranslateModule(ReactApplicationContext reactContext) {
- super(reactContext, SERVICE_NAME);
- this.module = new UniversalFirebaseMLNaturalLanguageTranslateModule(reactContext, SERVICE_NAME);
- }
-
-// TODO not available on iOS until SDK 6.0.0
-
-// /**
-// * @url No reference documentation yet...
-// */
-// @ReactMethod
-// public void translate(
-// String appName,
-// String text,
-// ReadableMap translationOptionsMap,
-// Promise promise
-// ) {
-// module
-// .translate(appName, text, Arguments.toBundle(translationOptionsMap))
-// .addOnCompleteListener(task -> {
-// if (task.isSuccessful()) {
-// promise.resolve(task.getResult());
-// } else {
-// String[] errorCodeAndMessage = UniversalFirebaseMLNaturalLanguageCommon.getErrorCodeAndMessageFromException(
-// task.getException());
-// rejectPromiseWithCodeAndMessage(
-// promise,
-// errorCodeAndMessage[0],
-// errorCodeAndMessage[1],
-// errorCodeAndMessage[2]
-// );
-// }
-// });
-// }
-//
-// /**
-// * @url No reference documentation yet...
-// */
-// @ReactMethod
-// public void modelManagerGetAvailableModels(String appName, Promise promise) {
-// module.modelManagerGetAvailableModels(appName).addOnCompleteListener(task -> {
-// if (task.isSuccessful()) {
-// promise.resolve(Arguments.fromList(Objects.requireNonNull(task.getResult())));
-// } else {
-// String[] errorCodeAndMessage = UniversalFirebaseMLNaturalLanguageCommon.getErrorCodeAndMessageFromException(
-// task.getException());
-// rejectPromiseWithCodeAndMessage(
-// promise,
-// errorCodeAndMessage[0],
-// errorCodeAndMessage[1],
-// errorCodeAndMessage[2]
-// );
-// }
-// });
-// }
-//
-// /**
-// * @url No reference documentation yet...
-// */
-// @ReactMethod
-// public void modelManagerDeleteDownloadedModel(String appName, int language, Promise promise) {
-// module.modelManagerDeleteDownloadedModel(appName, language).addOnCompleteListener(task -> {
-// if (task.isSuccessful()) {
-// promise.resolve(task.getResult());
-// } else {
-// String[] errorCodeAndMessage = UniversalFirebaseMLNaturalLanguageCommon.getErrorCodeAndMessageFromException(
-// task.getException());
-// rejectPromiseWithCodeAndMessage(
-// promise,
-// errorCodeAndMessage[0],
-// errorCodeAndMessage[1],
-// errorCodeAndMessage[2]
-// );
-// }
-// });
-// }
-//
-// /**
-// * @url No reference documentation yet...
-// */
-// @ReactMethod
-// public void modelManagerDownloadRemoteModelIfNeeded(
-// String appName,
-// int language,
-// ReadableMap downloadConditionsMap,
-// Promise promise
-// ) {
-// module
-// .modelManagerDownloadRemoteModelIfNeeded(appName, language, Arguments.toBundle(downloadConditionsMap))
-// .addOnCompleteListener(task -> {
-// if (task.isSuccessful()) {
-// promise.resolve(task.getResult());
-// } else {
-// String[] errorCodeAndMessage = UniversalFirebaseMLNaturalLanguageCommon.getErrorCodeAndMessageFromException(
-// task.getException());
-// rejectPromiseWithCodeAndMessage(
-// promise,
-// errorCodeAndMessage[0],
-// errorCodeAndMessage[1],
-// errorCodeAndMessage[2]
-// );
-// }
-// });
-// }
-//
-//
-// @Override
-// public Map getConstants() {
-// return module.getConstants();
-// }
-}
diff --git a/packages/ml-natural-language/e2e/languageId.e2e.js b/packages/ml-natural-language/e2e/languageId.e2e.js
deleted file mode 100644
index ae7c8002aa4..00000000000
--- a/packages/ml-natural-language/e2e/languageId.e2e.js
+++ /dev/null
@@ -1,111 +0,0 @@
-/*
- * Copyright (c) 2016-present Invertase Limited & Contributors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this library except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-describe('naturalLanguage() -> Language ID', () => {
- describe('identifyLanguage()', () => {
- it('returns a string of the identified language', async () => {
- const languageDe = await firebase.naturalLanguage().identifyLanguage('Hallo welt');
- const languageEn = await firebase.naturalLanguage().identifyLanguage('Hello world');
- const languageFr = await firebase.naturalLanguage().identifyLanguage('Bonjour le monde');
- should.equal(languageDe, 'de');
- should.equal(languageEn, 'en');
- should.equal(languageFr, 'fr');
- });
-
- it('accepts a `confidenceThreshold` option', async () => {
- const languageDeDefault = await firebase.naturalLanguage().identifyLanguage('Hallo');
- const languageDeLowConfidence = await firebase.naturalLanguage().identifyLanguage('Hallo', {
- confidenceThreshold: 0.2,
- });
- should.equal(languageDeDefault, 'und');
- should.equal(languageDeLowConfidence, 'de');
- });
-
- it('throws an error if text is not a string', async () => {
- try {
- firebase.naturalLanguage().identifyLanguage(false);
- return Promise.reject(new Error('Did not throw'));
- } catch (e) {
- e.message.should.containEql('must be a string value');
- return Promise.resolve();
- }
- });
-
- it('throws an error if options is not an object', async () => {
- try {
- firebase.naturalLanguage().identifyLanguage('hello', false);
- return Promise.reject(new Error('Did not throw'));
- } catch (e) {
- e.message.should.containEql('must be an object');
- return Promise.resolve();
- }
- });
-
- it('throws an error if options.confidenceThreshold is not a float value', async () => {
- try {
- firebase.naturalLanguage().identifyLanguage('hello', { confidenceThreshold: 'boop' });
- return Promise.reject(new Error('Did not throw'));
- } catch (e) {
- e.message.should.containEql('must be a float value between 0 and 1');
- return Promise.resolve();
- }
- });
-
- it('throws an error if options.confidenceThreshold is greater than 1', async () => {
- try {
- firebase.naturalLanguage().identifyLanguage('hello', { confidenceThreshold: 1.2 });
- return Promise.reject(new Error('Did not throw'));
- } catch (e) {
- e.message.should.containEql('must be a float value between 0 and 1');
- return Promise.resolve();
- }
- });
-
- it('throws an error if options.confidenceThreshold is less than 0', async () => {
- try {
- firebase.naturalLanguage().identifyLanguage('hello', { confidenceThreshold: -1.2 });
- return Promise.reject(new Error('Did not throw'));
- } catch (e) {
- e.message.should.containEql('must be a float value between 0 and 1');
- return Promise.resolve();
- }
- });
- });
-
- describe('identifyPossibleLanguages()', () => {
- it('returns an array of the identified languages and their confidence', async () => {
- const languages = await firebase.naturalLanguage().identifyPossibleLanguages('hello');
- languages.should.be.an.Array();
- languages.length.should.be.greaterThan(3);
- languages[0].language.should.equal('en');
- languages[0].confidence.should.be.a.Number();
- languages[0].confidence.should.be.greaterThan(0.7);
- });
-
- it('accepts a `confidenceThreshold` option', async () => {
- const languages = await firebase.naturalLanguage().identifyPossibleLanguages('hello', {
- confidenceThreshold: 0.7,
- });
- languages.should.be.an.Array();
- languages.length.should.equal(1);
- languages[0].language.should.equal('en');
- languages[0].confidence.should.be.a.Number();
- languages[0].confidence.should.be.greaterThan(0.7);
- });
- // arg validation not required, uses same validator as identifyLanguage
- });
-});
diff --git a/packages/ml-natural-language/e2e/mlKitLanguage.e2e.js b/packages/ml-natural-language/e2e/mlKitLanguage.e2e.js
deleted file mode 100644
index 7a10484e3ab..00000000000
--- a/packages/ml-natural-language/e2e/mlKitLanguage.e2e.js
+++ /dev/null
@@ -1,80 +0,0 @@
-/*
- * Copyright (c) 2016-present Invertase Limited & Contributors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this library except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-describe('naturalLanguage()', () => {
- describe('namespace', () => {
- it('accessible from firebase.app()', () => {
- const app = firebase.app();
- should.exist(app.naturalLanguage);
- app.naturalLanguage().app.should.equal(app);
- });
-
- it('supports multiple apps', async () => {
- firebase.naturalLanguage().app.name.should.equal('[DEFAULT]');
- firebase
- .naturalLanguage(firebase.app('secondaryFromNative'))
- .app.name.should.equal('secondaryFromNative');
-
- firebase
- .app('secondaryFromNative')
- .naturalLanguage()
- .app.name.should.equal('secondaryFromNative');
- });
-
- it('throws an error if language id native module does not exist', async () => {
- const method = firebase.naturalLanguage().native.identifyLanguage;
- firebase.naturalLanguage()._nativeModule = Object.assign(
- {},
- firebase.naturalLanguage()._nativeModule,
- );
- delete firebase.naturalLanguage()._nativeModule.identifyLanguage;
- try {
- firebase.naturalLanguage().identifyLanguage();
- return Promise.reject(new Error('Did not throw'));
- } catch (e) {
- e.message.should.containEql(
- "You attempted to use an optional API that's not enabled natively",
- );
- e.message.should.containEql('Language Identification');
- firebase.naturalLanguage()._nativeModule.identifyLanguage = method;
- Object.freeze(firebase.naturalLanguage()._nativeModule);
- return Promise.resolve();
- }
- });
-
- xit('throws an error if smart replies native module does not exist', async () => {
- const method = firebase.naturalLanguage().native.getSuggestedReplies;
- firebase.naturalLanguage()._nativeModule = Object.assign(
- {},
- firebase.naturalLanguage()._nativeModule,
- );
- delete firebase.naturalLanguage()._nativeModule.getSuggestedReplies;
- try {
- firebase.naturalLanguage().newSmartReplyConversation();
- return Promise.reject(new Error('Did not throw'));
- } catch (e) {
- e.message.should.containEql(
- "You attempted to use an optional API that's not enabled natively",
- );
- e.message.should.containEql('Smart Replies');
- firebase.naturalLanguage()._nativeModule.getSuggestedReplies = method;
- Object.freeze(firebase.naturalLanguage()._nativeModule);
- return Promise.resolve();
- }
- });
- });
-});
diff --git a/packages/ml-natural-language/e2e/smartReply.e2e.js b/packages/ml-natural-language/e2e/smartReply.e2e.js
deleted file mode 100644
index 061d8b64748..00000000000
--- a/packages/ml-natural-language/e2e/smartReply.e2e.js
+++ /dev/null
@@ -1,197 +0,0 @@
-/*
- * Copyright (c) 2016-present Invertase Limited & Contributors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this library except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-describe('naturalLanguage() -> Smart Replies', () => {
- describe('suggestReplies()', () => {
- it('throws if messages is not an array', () => {
- try {
- firebase.naturalLanguage().suggestReplies({});
- return Promise.reject(new Error('Did not throw'));
- } catch (e) {
- e.message.should.containEql("'messages' must be an array value");
- return Promise.resolve();
- }
- });
-
- it('resolves an empty array if empty array if provided', async () => {
- const replies = await firebase.naturalLanguage().suggestReplies([]);
- replies.should.be.Array();
- replies.length.should.eql(0);
- });
-
- it('returns suggested replies', async () => {
- const replies = await firebase.naturalLanguage().suggestReplies([
- { text: 'We should catchup some time!' },
- { text: 'I know right, it has been a while..', userId: 'invertase', isLocalUser: false },
- { text: 'Lets meet up!' },
- {
- text: 'Definitely, how about we go for lunch this week?',
- userId: 'invertase',
- isLocalUser: false,
- },
- ]);
-
- replies.should.be.Array();
- replies.length.should.equal(3);
-
- replies.forEach($ => {
- $.text.should.be.String();
- $.text.length.should.be.greaterThan(0);
- });
-
- const replies2 = await firebase
- .naturalLanguage()
- .suggestReplies([
- { text: replies[0].text },
- { text: 'Great, does Friday work for you?', userId: 'invertase', isLocalUser: false },
- ]);
-
- replies2[0].text.should.be.String();
- replies2[0].text.length.should.be.greaterThan(0);
- });
-
- describe('TextMessage', () => {
- it('throws if message is not an object', () => {
- try {
- firebase.naturalLanguage().suggestReplies([123]);
- return Promise.reject(new Error('Did not throw'));
- } catch (e) {
- e.message.should.containEql("'textMessage' expected an object value");
- return Promise.resolve();
- }
- });
-
- describe('.text', () => {
- it('throws if text option not provided', () => {
- try {
- firebase.naturalLanguage().suggestReplies([{}]);
- return Promise.reject(new Error('Did not throw'));
- } catch (e) {
- e.message.should.containEql("'textMessage.text' expected a string value");
- return Promise.resolve();
- }
- });
-
- it('throws if text option is not a string', () => {
- try {
- firebase.naturalLanguage().suggestReplies([{ text: 123 }]);
- return Promise.reject(new Error('Did not throw'));
- } catch (e) {
- e.message.should.containEql("'textMessage.text' expected a string value");
- return Promise.resolve();
- }
- });
-
- it('throws if text length is zero', () => {
- try {
- firebase.naturalLanguage().suggestReplies([{ text: '' }]);
- return Promise.reject(new Error('Did not throw'));
- } catch (e) {
- e.message.should.containEql("'textMessage.text' expected string value to not be empty");
- return Promise.resolve();
- }
- });
- });
-
- describe('.userId', () => {
- it('throws if local user true and id provided', () => {
- try {
- firebase.naturalLanguage().suggestReplies([{ text: 'foo', userId: 'bar' }]);
- return Promise.reject(new Error('Did not throw'));
- } catch (e) {
- e.message.should.containEql(
- "'textMessage.userId' expected 'textMessage.isLocalUser' to be false when setting a user ID",
- );
- return Promise.resolve();
- }
- });
-
- it('throws if text userId not provided', () => {
- try {
- firebase.naturalLanguage().suggestReplies([{ text: 'foo', isLocalUser: false }]);
- return Promise.reject(new Error('Did not throw'));
- } catch (e) {
- e.message.should.containEql("'textMessage.userId' expected a string value");
- return Promise.resolve();
- }
- });
-
- it('throws if userId option is not a string', () => {
- try {
- firebase
- .naturalLanguage()
- .suggestReplies([{ text: 'foo', isLocalUser: false, userId: 123 }]);
- return Promise.reject(new Error('Did not throw'));
- } catch (e) {
- e.message.should.containEql("'textMessage.userId' expected a string value");
- return Promise.resolve();
- }
- });
-
- it('throws if userId length is zero', () => {
- try {
- firebase
- .naturalLanguage()
- .suggestReplies([{ text: 'foo', isLocalUser: false, userId: '' }]);
- return Promise.reject(new Error('Did not throw'));
- } catch (e) {
- e.message.should.containEql(
- "'textMessage.userId' expected string value to not be empty",
- );
- return Promise.resolve();
- }
- });
-
- it('sets a user id', () => {
- firebase
- .naturalLanguage()
- .suggestReplies([{ text: 'foo', isLocalUser: false, userId: 'bar' }]);
- });
- });
-
- describe('.timestamp', () => {
- it('throws if timestamp is not a number', () => {
- try {
- firebase.naturalLanguage().suggestReplies([{ text: 'foo', timestamp: 'baz' }]);
- return Promise.reject(new Error('Did not throw'));
- } catch (e) {
- e.message.should.containEql("'textMessage.timestamp' expected number value");
- return Promise.resolve();
- }
- });
-
- it('sets a timestamp', () => {
- firebase.naturalLanguage().suggestReplies([{ text: 'foo', timestamp: Date.now() + 123 }]);
- });
- });
-
- describe('.isLocalUser', () => {
- it('throws if isLocalUser is not a boolean', () => {
- try {
- firebase
- .naturalLanguage()
- .suggestReplies([{ text: 'foo', userId: 'bar', isLocalUser: 'baz' }]);
- return Promise.reject(new Error('Did not throw'));
- } catch (e) {
- e.message.should.containEql("'textMessage.isLocalUser' expected boolean value");
- return Promise.resolve();
- }
- });
- });
- });
- });
-});
diff --git a/packages/ml-natural-language/e2e/translate.e2e.js b/packages/ml-natural-language/e2e/translate.e2e.js
deleted file mode 100644
index 71b19bf61f1..00000000000
--- a/packages/ml-natural-language/e2e/translate.e2e.js
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Copyright (c) 2016-present Invertase Limited & Contributors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this library except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-// TODO not available on iOS until SDK 6.0.0
-// xdescribe('naturalLanguage() -> Translate', () => {
-// before(async () => {
-// await firebase.naturalLanguage().translateModelManager.downloadRemoteModelIfNeeded('de');
-// });
-//
-// describe('translate()', () => {
-// it('translates test from the specified sourceLanguage to targetLanguage', async () => {
-// const translatedText = await firebase
-// .naturalLanguage()
-// .translate('Hello world', { sourceLanguage: 'en', targetLanguage: 'de' });
-// translatedText.should.equal('Hallo Welt');
-// });
-// });
-//
-// describe('translateModelManager()', () => {
-// it('returns a new instance of TranslateModelManager', async () => {
-// const { translateModelManager } = firebase.naturalLanguage();
-// translateModelManager.should.be.instanceOf(
-// jet.require('packages/ml-natural-language/lib/TranslateModelManager'),
-// );
-// });
-// });
-//
-// describe('TranslateModelManager', () => {
-// describe('downloadRemoteModelIfNeeded()', () => {
-// it('downloads the specified language model', async () => {
-// const { translateModelManager } = firebase.naturalLanguage();
-// await translateModelManager.downloadRemoteModelIfNeeded('de');
-// });
-// });
-// });
-// });
diff --git a/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RCTConvert+FIRLanguageIdentificationOptions.h b/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RCTConvert+FIRLanguageIdentificationOptions.h
deleted file mode 100644
index b6b6eb1a56a..00000000000
--- a/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RCTConvert+FIRLanguageIdentificationOptions.h
+++ /dev/null
@@ -1,27 +0,0 @@
-/**
- * Copyright (c) 2016-present Invertase Limited & Contributors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this library except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-#import
-#import
-
-@interface RCTConvert (FIRLanguageIdentificationOptions)
-#if __has_include()
-
-+ (FIRLanguageIdentificationOptions *)firLanguageIdOptionsFromDict:(NSDictionary *)options;
-
-#endif
-@end
diff --git a/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RCTConvert+FIRLanguageIdentificationOptions.m b/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RCTConvert+FIRLanguageIdentificationOptions.m
deleted file mode 100644
index 04bab8bdcbf..00000000000
--- a/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RCTConvert+FIRLanguageIdentificationOptions.m
+++ /dev/null
@@ -1,38 +0,0 @@
-/**
- * Copyright (c) 2016-present Invertase Limited & Contributors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this library except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-#import "RCTConvert+FIRLanguageIdentificationOptions.h"
-
-@implementation RCTConvert (FIRApp)
-#if __has_include()
-
-+ (FIRLanguageIdentificationOptions *)firLanguageIdOptionsFromDict:(NSDictionary *)options {
- if (options[@"confidenceThreshold"] == nil) {
- if (options[@"multipleLanguages"] != nil) {
- return [[FIRLanguageIdentificationOptions alloc] initWithConfidenceThreshold:FIRDefaultIdentifyPossibleLanguagesConfidenceThreshold];
- } else {
- return [[FIRLanguageIdentificationOptions alloc] initWithConfidenceThreshold:FIRDefaultIdentifyLanguageConfidenceThreshold];
- }
- }
-
- float confidenceThreshold = [options[@"confidenceThreshold"] floatValue];
- return [[FIRLanguageIdentificationOptions alloc] initWithConfidenceThreshold:confidenceThreshold];
-}
-
-RCT_CUSTOM_CONVERTER(FIRLanguageIdentificationOptions *, FIRLanguageIdentificationOptions, [self firLanguageIdOptionsFromDict:[self NSDictionary:json]]);
-#endif
-@end
diff --git a/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RCTConvert+FIRTextMessageArray.h b/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RCTConvert+FIRTextMessageArray.h
deleted file mode 100644
index 406e0c24b47..00000000000
--- a/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RCTConvert+FIRTextMessageArray.h
+++ /dev/null
@@ -1,26 +0,0 @@
-/**
- * Copyright (c) 2016-present Invertase Limited & Contributors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this library except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-#import
-
-#import
-
-@interface RCTConvert (FIRTextMessageArray)
-#if __has_include()
-+ (FIRTextMessage *)FIRTextMessage:(id)json;
-#endif
-@end
diff --git a/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RCTConvert+FIRTextMessageArray.m b/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RCTConvert+FIRTextMessageArray.m
deleted file mode 100644
index 3641b77004e..00000000000
--- a/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RCTConvert+FIRTextMessageArray.m
+++ /dev/null
@@ -1,36 +0,0 @@
-/**
- * Copyright (c) 2016-present Invertase Limited & Contributors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this library except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-#import "RCTConvert+FIRTextMessageArray.h"
-
-@implementation RCTConvert (FIRTextMessageArray)
-#if __has_include()
-+ (FIRTextMessage *)FIRTextMessage:(id)json {
- NSDictionary *messageDict = [self NSDictionary:json];
- FIRTextMessage *firTextMessage = [
- [FIRTextMessage alloc]
- initWithText:messageDict[@"text"]
- timestamp:[[messageDict valueForKey:@"timestamp"] doubleValue]
- userID:messageDict[@"userId"] ? messageDict[@"userId"] : @""
- isLocalUser:messageDict[@"isLocalUser"] ? YES : NO
- ];
- return firTextMessage;
-}
-
-RCT_ARRAY_CONVERTER(FIRTextMessage)
-#endif
-@end
diff --git a/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RNFBMLNaturalLanguageIdModule.m b/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RNFBMLNaturalLanguageIdModule.m
deleted file mode 100644
index 5c129d16a86..00000000000
--- a/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RNFBMLNaturalLanguageIdModule.m
+++ /dev/null
@@ -1,95 +0,0 @@
-/**
- * Copyright (c) 2016-present Invertase Limited & Contributors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this library except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-#import "RNFBMLNaturalLanguageIdModule.h"
-#if __has_include()
-
-#import
-#import "RNFBSharedUtils.h"
-
-
-#define DEPENDENCY_EXISTS=1
-#endif
-
-
-@implementation RNFBMLNaturalLanguageIdModule
-#pragma mark -
-#pragma mark Module Setup
-
-RCT_EXPORT_MODULE();
-
-#pragma mark -
-#pragma mark Firebase Mlkit Language Id Methods
-
-#ifdef DEPENDENCY_EXISTS
-
-RCT_EXPORT_METHOD(identifyLanguage:
- (FIRApp *) firebaseApp
- : (NSString *)text
- : (FIRLanguageIdentificationOptions *)identificationOptions
- : (RCTPromiseResolveBlock)resolve
- : (RCTPromiseRejectBlock)reject
-) {
- FIRNaturalLanguage *nL = [FIRNaturalLanguage naturalLanguageForApp:firebaseApp];
- FIRLanguageIdentification *languageId = [nL languageIdentificationWithOptions:identificationOptions];
- FIRIdentifyLanguageCallback completion = ^(NSString *_Nullable languageCode, NSError *_Nullable error) {
- if (error != nil) {
- [self promiseRejectMLKitException:reject error:error];
- } else {
- resolve(languageCode);
- }
- };
- [languageId identifyLanguageForText:text completion:completion];
-}
-
-RCT_EXPORT_METHOD(identifyPossibleLanguages:
- (FIRApp *) firebaseApp
- : (NSString *)text
- : (FIRLanguageIdentificationOptions *)identificationOptions
- : (RCTPromiseResolveBlock)resolve
- : (RCTPromiseRejectBlock)reject
-) {
- FIRNaturalLanguage *nL = [FIRNaturalLanguage naturalLanguageForApp:firebaseApp];
- FIRLanguageIdentification *languageId = [nL languageIdentificationWithOptions:identificationOptions];
- FIRIdentifyPossibleLanguagesCallback completion = ^(NSArray *identifiedLanguages, NSError *error) {
- if (error != nil) {
- [self promiseRejectMLKitException:reject error:error];
- } else {
- NSMutableArray *languages = [[NSMutableArray alloc] initWithCapacity:identifiedLanguages.count];
- for (FIRIdentifiedLanguage *identifiedLanguage in identifiedLanguages) {
- [languages addObject:@{
- @"language": identifiedLanguage.languageCode,
- @"confidence": @(identifiedLanguage.confidence)
- }];
- }
- resolve(languages);
- }
- };
- [languageId identifyPossibleLanguagesForText:text completion:completion];
-}
-
-- (void)promiseRejectMLKitException:(RCTPromiseRejectBlock)reject error:(NSError *)error {
- // TODO no way to distinguish between the error codes like Android supports
- [RNFBSharedUtils rejectPromiseWithUserInfo:reject userInfo:(NSMutableDictionary *) @{
- @"code": @"unknown",
- @"message": [error localizedDescription],
- }];
-}
-
-#endif
-
-@end
diff --git a/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RNFBMLNaturalLanguageSmartReplyModule.h b/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RNFBMLNaturalLanguageSmartReplyModule.h
deleted file mode 100644
index b6a0510774e..00000000000
--- a/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RNFBMLNaturalLanguageSmartReplyModule.h
+++ /dev/null
@@ -1,24 +0,0 @@
-/**
- * Copyright (c) 2016-present Invertase Limited & Contributors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this library except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-#import
-
-#import
-
-@interface RNFBMLNaturalLanguageSmartReplyModule : NSObject
-
-@end
diff --git a/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RNFBMLNaturalLanguageSmartReplyModule.m b/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RNFBMLNaturalLanguageSmartReplyModule.m
deleted file mode 100644
index 9fb79b90bf9..00000000000
--- a/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RNFBMLNaturalLanguageSmartReplyModule.m
+++ /dev/null
@@ -1,81 +0,0 @@
-/**
- * Copyright (c) 2016-present Invertase Limited & Contributors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this library except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-#import "RNFBMLNaturalLanguageSmartReplyModule.h"
-
-#if __has_include()
-#import
-#import "RNFBSharedUtils.h"
-#define DEPENDENCY_EXISTS=1
-#endif
-
-@implementation RNFBMLNaturalLanguageSmartReplyModule
-#pragma mark -
-#pragma mark Module Setup
-
-RCT_EXPORT_MODULE();
-
-#pragma mark -
-#pragma mark Firebase Mlkit Smart Reply Methods
-
-#ifdef DEPENDENCY_EXISTS
-RCT_EXPORT_METHOD(suggestReplies:
- (FIRApp *) firebaseApp
- : (NSArray *)messages
- : (RCTPromiseResolveBlock)resolve
- : (RCTPromiseRejectBlock)reject
-) {
- FIRNaturalLanguage *naturalLanguage = [FIRNaturalLanguage naturalLanguage];
- FIRSmartReply *smartReply = [naturalLanguage smartReply];
-
- FIRSmartReplyCallback completion = ^(
- FIRSmartReplySuggestionResult *_Nullable result,
- NSError *_Nullable error
- ) {
- if (error != nil) {
- [self promiseRejectMLKitException:reject error:error];
- return;
- }
-
- if (result.status == FIRSmartReplyResultStatusSuccess) {
- NSMutableArray *smartReplies = [[NSMutableArray alloc] initWithCapacity:result.suggestions.count];
-
- for (FIRSmartReplySuggestion *suggestion in result.suggestions) {
- NSMutableDictionary *smartReplyDict = [NSMutableDictionary dictionary];
- smartReplyDict[@"text"] = suggestion.text;
- [smartReplies addObject:smartReplyDict];
- }
-
- resolve(smartReplies);
- } else {
- resolve(@[]);
- }
- };
-
- [smartReply suggestRepliesForMessages:messages completion:completion];
-}
-
-- (void)promiseRejectMLKitException:(RCTPromiseRejectBlock)reject error:(NSError *)error {
- // TODO no way to distinguish between the error codes like Android supports
- [RNFBSharedUtils rejectPromiseWithUserInfo:reject userInfo:(NSMutableDictionary *) @{
- @"code": @"unknown",
- @"message": [error localizedDescription],
- }];
-}
-#endif
-
-@end
diff --git a/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RNFBMLNaturalLanguageTranslateModule.h b/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RNFBMLNaturalLanguageTranslateModule.h
deleted file mode 100644
index d17b0c0dcbe..00000000000
--- a/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RNFBMLNaturalLanguageTranslateModule.h
+++ /dev/null
@@ -1,23 +0,0 @@
-/**
- * Copyright (c) 2016-present Invertase Limited & Contributors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this library except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-// TODO not supported until SDK 6.0.0
-//
-//#import
-//#import
-//
-//@interface RNFBMLNaturalLanguageTranslateModule : NSObject
-//@end
diff --git a/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RNFBMLNaturalLanguageTranslateModule.m b/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RNFBMLNaturalLanguageTranslateModule.m
deleted file mode 100644
index 840d5c22cb2..00000000000
--- a/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RNFBMLNaturalLanguageTranslateModule.m
+++ /dev/null
@@ -1,37 +0,0 @@
-/**
- * Copyright (c) 2016-present Invertase Limited & Contributors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this library except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-// TODO not supported until SDK 6.0.0
-//
-//
-//#import
-//#import "RNFBMLNaturalLanguageTranslateModule.h"
-//
-//
-//@implementation RNFBMLNaturalLanguageTranslateModule
-//#pragma mark -
-//#pragma mark Module Setup
-//
-//RCT_EXPORT_MODULE();
-//
-//- (dispatch_queue_t)methodQueue {
-// return dispatch_get_main_queue();
-//}
-//
-//#pragma mark -
-//#pragma mark Firebase Mlkit Translate Methods
-//@end
diff --git a/packages/ml-natural-language/lib/TranslateModelManager.js b/packages/ml-natural-language/lib/TranslateModelManager.js
deleted file mode 100644
index cd3f731ceb0..00000000000
--- a/packages/ml-natural-language/lib/TranslateModelManager.js
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Copyright (c) 2016-present Invertase Limited & Contributors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this library except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-// TODO not available on iOS until SDK 6.0.0
-// export default class TranslateModelManager {
-// constructor(ml) {
-// this.ml = ml;
-// }
-//
-// downloadRemoteModelIfNeeded(language, downloadConditions = {}) {
-// // TODO(salakar) arg validation + tests
-// // downloadConditions:
-// // requireCharging
-// // requireDeviceIdle
-// // requireDeviceIdle
-// const languageId = this.ml.native.TRANSLATE_LANGUAGES[language];
-// return this.ml.native.modelManagerDownloadRemoteModelIfNeeded(languageId, downloadConditions);
-// }
-//
-// // TODO no ios support until SDK v6.0.0
-// deleteDownloadedModel(language) {
-// return this.ml.native.modelManagerDeleteDownloadedModel(language);
-// }
-//
-// getAvailableModels() {
-// return this.ml.native.modelManagerGetAvailableModels();
-// }
-// }
diff --git a/packages/ml-natural-language/lib/index.d.ts b/packages/ml-natural-language/lib/index.d.ts
deleted file mode 100644
index ef4e4e12e67..00000000000
--- a/packages/ml-natural-language/lib/index.d.ts
+++ /dev/null
@@ -1,271 +0,0 @@
-/*
- * Copyright (c) 2016-present Invertase Limited & Contributors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this library except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-import { ReactNativeFirebase } from '@react-native-firebase/app';
-
-/**
- * Firebase ML Kit package for React Native.
- *
- * #### Example 1
- *
- * Access the firebase export from the `naturalLanguage` package:
- *
- * ```js
- * import { firebase } from '@react-native-firebase/ml-natural-language';
- *
- * // firebase.naturalLanguage().X
- * ```
- *
- * #### Example 2
- *
- * Using the default export from the `naturalLanguage` package:
- *
- * ```js
- * import naturalLanguage from '@react-native-firebase/ml-natural-language';
- *
- * // naturalLanguage().X
- * ```
- *
- * #### Example 3
- *
- * Using the default export from the `app` package:
- *
- * ```js
- * import firebase from '@react-native-firebase/app';
- * import '@react-native-firebase/ml-natural-language';
- *
- * // firebase.naturalLanguage().X
- * ```
- *
- * @firebase ml-natural-language
- */
-export namespace FirebaseLanguageTypes {
- import FirebaseModule = ReactNativeFirebase.FirebaseModule;
-
- // eslint-disable-next-line @typescript-eslint/no-empty-interface
- export interface Statics {}
-
- /**
- * An interface representing the language identification options to be used with the
- * `identifyLanguage` and `identifyPossibleLanguages` methods.
- */
- export interface LanguageIdentificationOptions {
- /**
- * The confidence threshold for language identification. The identified languages will have a
- * confidence higher or equal to the confidence threshold. The value should be between 0 and 1, e.g. 0.5.
- *
- * If no value is set, a default value is used instead.
- *
- */
- confidenceThreshold?: number;
- }
-
- /**
- * An identified language for the given input text. Returned as an Array of IdentifiedLanguage from
- * `identifyPossibleLanguages`.
- */
- export interface IdentifiedLanguage {
- /**
- * The [BCP-47 language code](https://en.wikipedia.org/wiki/IETF_language_tag) for the language, e.g. 'en'.
- */
- language: string;
-
- /**
- * The confidence score of the language. A float value between 0 and 1.
- */
- confidence: number;
- }
-
- /**
- * An interface representing a suggested reply, an array of these are returned from `suggestReplies`.
- *
- * #### Example
- *
- * ```js
- * const replies = await firebase.naturalLanguage().suggestReplies([
- * { text: "Hey, long time no speak!", },
- * { text: 'I know right, it has been a while..', userId: 'xxxx', isLocalUser: false },
- * { text: 'We should catchup some time!', },
- * { text: 'Definitely, how about we go for lunch this week?', userId: 'xxxx', isLocalUser: false },
- * ]);
- *
- * replies.forEach(reply => {
- * console.log(reply.text);
- * });
- *
- * ```
- *
- */
- export interface SuggestedReply {
- /**
- * The smart reply text.
- */
- text: string;
- }
-
- /**
- * The Firebase ML Kit service interface.
- *
- * > This module is available for the default app only.
- *
- * #### Example
- *
- * Get the ML Kit service for the default app:
- *
- * ```js
- * const defaultAppMLKit = firebase.naturalLanguage();
- * ```
- */
- export class Module extends FirebaseModule {
- /**
- * Identifies the main language for the given text.
- *
- * Returns a promise that resolves with a [BCP-47 language code](https://en.wikipedia.org/wiki/IETF_language_tag) of the detected language.
- *
- * If the language was undetected or unknown the code returned is `und`.
- *
- * #### Example
- *
- * ```js
- * const language = await firebase.naturalLanguage().identifyLanguage('Hello there. General Kenobi.');
- * console.warn(language); // en
- *
- * const unknownLanguage = await firebase.naturalLanguage().identifyLanguage('foo bar baz', { confidenceThreshold: 0.9 });
- * console.warn(language); // und
- * ```
- *
- * @param text The input text to use for identifying the language. Inputs longer than 200 characters are truncated to 200 characters, as longer input does not improve the detection accuracy.
- * @param options See `LanguageIdentificationOptions`.
- */
- identifyLanguage(text: string, options?: LanguageIdentificationOptions): Promise;
-
- /**
- * Identifies possible languages for the given text.
- *
- * #### Example
- *
- * ```js
- * const identifiedLanguages = firebase.naturalLanguage().identifyPossibleLanguages('hello world');
- * console.warn(identifiedLanguages[0].language); // en
- * ```
- *
- * @param text The input text to use for identifying the language. Inputs longer than 200 characters are truncated to 200 characters, as longer input does not improve the detection accuracy.
- * @param options See `LanguageIdentificationOptions`.
- */
- identifyPossibleLanguages(
- text: string,
- options?: LanguageIdentificationOptions,
- ): Promise;
-
- /**
- * Returns suggested replies for a conversation.
- *
- * #### Example
- *
- * ```js
- * const replies = await firebase.naturalLanguage().suggestReplies([
- * { text: "Hey, long time no speak!", },
- * { text: 'I know right, it has been a while..', userId: 'xxxx', isLocalUser: false },
- * { text: 'We should catchup some time!', },
- * { text: 'Definitely, how about we go for lunch this week?', userId: 'xxxx', isLocalUser: false },
- * ]);
- * ```
- *
- * @param messages An array of `TextMessage` interfaces.
- */
- suggestReplies(messages: TextMessage[]): Promise;
- }
-
- /**
- * A `TextMessage` interface provided to `suggestReplies()`.
- */
- export interface TextMessage {
- /**
- * The message text.
- *
- * This is required and must not be an empty string.
- */
- text: string;
-
- /**
- * Whether the message is a local user. If false, a `userId` must be provided for the message.
- *
- * Defaults to true.
- */
- isLocalUser?: boolean;
-
- /**
- * A user ID of a remote user.
- *
- * Used to help better identify users to provide more accurate replies.
- */
- userId?: string;
-
- /**
- * The timestamp of the message in milliseconds.
- *
- * Defaults to now (`Date.now()`).
- */
- timestamp?: number;
- }
-}
-
-declare const defaultExport: ReactNativeFirebase.FirebaseModuleWithStaticsAndApp<
- FirebaseLanguageTypes.Module,
- FirebaseLanguageTypes.Statics
->;
-
-export const firebase: ReactNativeFirebase.Module & {
- naturalLanguage: typeof defaultExport;
- app(
- name?: string,
- ): ReactNativeFirebase.FirebaseApp & { naturalLanguage(): FirebaseLanguageTypes.Module };
-};
-
-export default defaultExport;
-
-/**
- * Attach namespace to `firebase.` and `FirebaseApp.`.
- */
-declare module '@react-native-firebase/app' {
- namespace ReactNativeFirebase {
- import FirebaseModuleWithStaticsAndApp = ReactNativeFirebase.FirebaseModuleWithStaticsAndApp;
-
- interface Module {
- naturalLanguage: FirebaseModuleWithStaticsAndApp<
- FirebaseLanguageTypes.Module,
- FirebaseLanguageTypes.Statics
- >;
- }
-
- interface FirebaseApp {
- naturalLanguage(): FirebaseLanguageTypes.Module;
- }
-
- interface FirebaseJsonConfig {
- /**
- * If `true`, the Language ID Model will be installed onto the device.
- */
- ml_natural_language_language_id_model: boolean;
-
- /**
- * If `true`, the Smart Reply Model will be installed onto the device.
- */
- ml_natural_language_smart_reply_model: boolean;
- }
- }
-}
diff --git a/packages/ml-natural-language/lib/index.js b/packages/ml-natural-language/lib/index.js
deleted file mode 100644
index f619d4e8064..00000000000
--- a/packages/ml-natural-language/lib/index.js
+++ /dev/null
@@ -1,161 +0,0 @@
-/*
- * Copyright (c) 2016-present Invertase Limited & Contributors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this library except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-import {
- isArray,
- isNumber,
- isObject,
- isString,
- isUndefined,
- validateOptionalNativeDependencyExists,
-} from '@react-native-firebase/app/lib/common';
-import {
- createModuleNamespace,
- FirebaseModule,
- getFirebaseRoot,
-} from '@react-native-firebase/app/lib/internal';
-import validateTextMessage from './validateTextMessage';
-import version from './version';
-
-// TODO not available on iOS until SDK 6.0.0
-// import TranslateModelManager from './TranslateModelManager';
-
-const statics = {};
-const namespace = 'naturalLanguage';
-const nativeModuleName = [
- 'RNFBMLNaturalLanguageIdModule',
- 'RNFBMLNaturalLanguageTranslateModule',
- 'RNFBMLNaturalLanguageSmartReplyModule',
-];
-
-function validateIdentifyLanguageArgs(text, options, methodName) {
- if (!isString(text)) {
- throw new Error(
- `firebase.naturalLanguage().${methodName}(*, _) 'text' must be a string value.`,
- );
- }
-
- if (!isObject(options)) {
- throw new Error(
- `firebase.naturalLanguage().${methodName}(_, *) 'options' must be an object or undefined.`,
- );
- }
-
- if (
- !isUndefined(options.confidenceThreshold) &&
- (!isNumber(options.confidenceThreshold) ||
- options.confidenceThreshold < 0 ||
- options.confidenceThreshold > 1)
- ) {
- throw new Error(
- `firebase.naturalLanguage().${methodName}(_, *) 'options.confidenceThreshold' must be a float value between 0 and 1.`,
- );
- }
-}
-
-class FirebaseMlKitLanguageModule extends FirebaseModule {
- identifyLanguage(text, options = {}) {
- validateOptionalNativeDependencyExists(
- 'ml_natural_language_language_id_model',
- 'ML Kit Language Identification',
- !!this.native.identifyLanguage,
- );
- validateIdentifyLanguageArgs(text, options, 'identifyLanguage');
- return this.native.identifyLanguage(text.slice(0, 200), options);
- }
-
- identifyPossibleLanguages(text, options = {}) {
- validateOptionalNativeDependencyExists(
- 'ml_natural_language_language_id_model',
- 'ML Kit Language Identification',
- !!this.native.identifyPossibleLanguages,
- );
- validateIdentifyLanguageArgs(text, options, 'identifyPossibleLanguages');
- return this.native.identifyPossibleLanguages(
- text.slice(0, 200),
- Object.assign({}, options, { multipleLanguages: true }),
- );
- }
-
- suggestReplies(messages) {
- if (!isArray(messages)) {
- throw new Error(
- "firebase.naturalLanguage().suggestReplies(*) 'messages' must be an array value.",
- );
- }
-
- if (messages.length === 0) {
- return Promise.resolve([]);
- }
-
- const validated = [];
-
- for (let i = 0; i < messages.length; i++) {
- try {
- validated.push(validateTextMessage(messages[i]));
- } catch (e) {
- throw new Error(
- `firebase.naturalLanguage().suggestReplies(*) 'messages' object at index ${i} threw an error. ${e.message}.`,
- );
- }
- }
-
- return this.native.suggestReplies(validated);
- }
-}
-
-// import { SDK_VERSION } from '@react-native-firebase/mlkit';
-export const SDK_VERSION = version;
-
-// import naturalLanguage from '@react-native-firebase/mlkit';
-// naturalLanguage().X(...);
-export default createModuleNamespace({
- statics,
- version,
- namespace,
- nativeModuleName,
- nativeEvents: false,
- hasMultiAppSupport: true,
- hasCustomUrlOrRegionSupport: false,
- ModuleClass: FirebaseMlKitLanguageModule,
-});
-
-// import naturalLanguage, { firebase } from '@react-native-firebase/mlkit';
-// naturalLanguage().X(...);
-// firebase.naturalLanguage().X(...);
-export const firebase = getFirebaseRoot();
-
-// TODO not available on Firebase iOS until SDK 6.0.0, add in RNFB >6.1
-// --------------------------
-// LANGUAGE_TRANSLATE
-// --------------------------
-// translate(text, translationOptions) {
-// const _translationOptions = {};
-//
-// // retrieve the language id integers
-// const { sourceLanguage, targetLanguage } = translationOptions;
-// _translationOptions.sourceLanguage = this.native.TRANSLATE_LANGUAGES[sourceLanguage];
-// _translationOptions.targetLanguage = this.native.TRANSLATE_LANGUAGES[targetLanguage];
-// // translationOptions required:
-// // sourceLanguage
-// // targetLanguage
-// return this.native.translate(text, _translationOptions);
-// }
-//
-// get translateModelManager() {
-// return new TranslateModelManager(this);
-// }
diff --git a/packages/ml-natural-language/lib/validateTextMessage.js b/packages/ml-natural-language/lib/validateTextMessage.js
deleted file mode 100644
index 9b39f474628..00000000000
--- a/packages/ml-natural-language/lib/validateTextMessage.js
+++ /dev/null
@@ -1,81 +0,0 @@
-/*
- * Copyright (c) 2016-present Invertase Limited & Contributors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this library except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-import {
- hasOwnProperty,
- isBoolean,
- isNumber,
- isObject,
- isString,
-} from '@react-native-firebase/app/lib/common';
-
-export default function validateTextMessage(textMessage) {
- if (!isObject(textMessage)) {
- throw new Error("'textMessage' expected an object value");
- }
-
- const out = {
- timestamp: Date.now(),
- isLocalUser: true,
- };
-
- if (!isString(textMessage.text)) {
- throw new Error("'textMessage.text' expected a string value");
- }
-
- if (textMessage.text.length === 0) {
- throw new Error("'textMessage.text' expected string value to not be empty");
- }
-
- out.text = textMessage.text;
-
- if (hasOwnProperty(textMessage, 'timestamp')) {
- if (!isNumber(textMessage.timestamp)) {
- throw new Error("'textMessage.timestamp' expected number value (milliseconds)");
- }
-
- out.timestamp = textMessage.timestamp;
- }
-
- if (hasOwnProperty(textMessage, 'isLocalUser')) {
- if (!isBoolean(textMessage.isLocalUser)) {
- throw new Error("'textMessage.isLocalUser' expected boolean value");
- }
-
- out.isLocalUser = textMessage.isLocalUser;
- }
-
- if (out.isLocalUser && hasOwnProperty(textMessage, 'userId')) {
- throw new Error(
- "'textMessage.userId' expected 'textMessage.isLocalUser' to be false when setting a user ID.",
- );
- } else if (!out.isLocalUser && !hasOwnProperty(textMessage, 'userId')) {
- throw new Error("'textMessage.userId' expected a string value");
- } else if (!out.isLocalUser && hasOwnProperty(textMessage, 'userId')) {
- if (!isString(textMessage.userId)) {
- throw new Error("'textMessage.userId' expected a string value");
- }
-
- if (textMessage.userId.length === 0) {
- throw new Error("'textMessage.userId' expected string value to not be empty");
- }
-
- out.userId = textMessage.userId;
- }
-
- return out;
-}
diff --git a/packages/ml-natural-language/package.json b/packages/ml-natural-language/package.json
deleted file mode 100644
index a49342b0137..00000000000
--- a/packages/ml-natural-language/package.json
+++ /dev/null
@@ -1,38 +0,0 @@
-{
- "name": "@react-native-firebase/ml-natural-language",
- "version": "7.4.11",
- "author": "Invertase (http://invertase.io)",
- "description": "React Native Firebase - Firebase ML Kit brings the power of machine learning to your React Native application, supporting both Android & iOS.",
- "main": "lib/index.js",
- "types": "lib/index.d.ts",
- "scripts": {
- "build": "genversion --semi lib/version.js",
- "build:clean": "rimraf android/build && rimraf ios/build",
- "prepare": "yarn run build"
- },
- "repository": {
- "type": "git",
- "url": "https://github.com/invertase/react-native-firebase/tree/master/packages/ml-natural-language"
- },
- "license": "Apache-2.0",
- "keywords": [
- "react",
- "react-native",
- "firebase",
- "mlkit",
- "identify language",
- "smart replies",
- "machine learning",
- "barcode",
- "label",
- "natural language",
- "nlp",
- "vision"
- ],
- "peerDependencies": {
- "@react-native-firebase/app": "8.4.7"
- },
- "publishConfig": {
- "access": "public"
- }
-}
diff --git a/packages/ml-natural-language/type-test.ts b/packages/ml-natural-language/type-test.ts
deleted file mode 100644
index bee7bb9abcf..00000000000
--- a/packages/ml-natural-language/type-test.ts
+++ /dev/null
@@ -1,53 +0,0 @@
-import firebase from '@react-native-firebase/app';
-import * as language from '@react-native-firebase/ml-natural-language';
-
-console.log(language.default().app);
-
-// checks module exists at root
-console.log(firebase.naturalLanguage().app.name);
-
-// checks module exists at app level
-console.log(firebase.app().naturalLanguage().app.name);
-
-// checks statics exist
-console.log(firebase.naturalLanguage.SDK_VERSION);
-
-// checks statics exist on defaultExport
-console.log(firebase.SDK_VERSION);
-
-// checks root exists
-console.log(firebase.SDK_VERSION);
-
-// checks firebase named export exists on module
-console.log(language.firebase.SDK_VERSION);
-
-// checks multi-app support exists
-console.log(firebase.naturalLanguage(firebase.app()).app.name);
-
-firebase
- .naturalLanguage()
- .identifyLanguage('foo', {
- confidenceThreshold: 0.3,
- })
- .then(str => str.replace);
-
-firebase
- .naturalLanguage()
- .identifyPossibleLanguages('foo', {
- confidenceThreshold: 0.3,
- })
- .then(languages => languages.forEach($ => $.confidence));
-
-firebase
- .naturalLanguage()
- .suggestReplies([
- {
- text: 'foo',
- isLocalUser: true,
- userId: '123',
- timestamp: 123,
- },
- ])
- .then(replies => {
- replies.forEach($ => $.text);
- });
diff --git a/packages/ml-vision/.npmignore b/packages/ml-vision/.npmignore
deleted file mode 100644
index 29e5aa19bbf..00000000000
--- a/packages/ml-vision/.npmignore
+++ /dev/null
@@ -1,66 +0,0 @@
-# Built application files
-android/*/build/
-
-# Crashlytics configuations
-android/com_crashlytics_export_strings.xml
-
-# Local configuration file (sdk path, etc)
-android/local.properties
-
-# Gradle generated files
-android/.gradle/
-
-# Signing files
-android/.signing/
-
-# User-specific configurations
-android/.idea/gradle.xml
-android/.idea/libraries/
-android/.idea/workspace.xml
-android/.idea/tasks.xml
-android/.idea/.name
-android/.idea/compiler.xml
-android/.idea/copyright/profiles_settings.xml
-android/.idea/encodings.xml
-android/.idea/misc.xml
-android/.idea/modules.xml
-android/.idea/scopes/scope_settings.xml
-android/.idea/vcs.xml
-android/*.iml
-
-# Xcode
-*.pbxuser
-*.mode1v3
-*.mode2v3
-*.perspectivev3
-*.xcuserstate
-ios/Pods
-ios/build
-*project.xcworkspace*
-*xcuserdata*
-
-# OS-specific files
-.DS_Store
-.DS_Store?
-._*
-.Spotlight-V100
-.Trashes
-ehthumbs.db
-Thumbs.dbandroid/gradle
-android/gradlew
-android/build
-android/gradlew.bat
-android/gradle/
-
-.idea
-coverage
-yarn.lock
-e2e/
-.github
-.vscode
-.nyc_output
-android/.settings
-*.coverage.json
-.circleci
-.eslintignore
-type-test.ts
diff --git a/packages/ml-vision/LICENSE b/packages/ml-vision/LICENSE
deleted file mode 100644
index ef3ed44f066..00000000000
--- a/packages/ml-vision/LICENSE
+++ /dev/null
@@ -1,32 +0,0 @@
-Apache-2.0 License
-------------------
-
-Copyright (c) 2016-present Invertase Limited & Contributors
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this library except in compliance with the License.
-
-You may obtain a copy of the Apache-2.0 License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-
-Creative Commons Attribution 3.0 License
-----------------------------------------
-
-Copyright (c) 2016-present Invertase Limited & Contributors
-
-Documentation and other instructional materials provided for this project
-(including on a separate documentation repository or it's documentation website) are
-licensed under the Creative Commons Attribution 3.0 License. Code samples/blocks
-contained therein are licensed under the Apache License, Version 2.0 (the "License"), as above.
-
-You may obtain a copy of the Creative Commons Attribution 3.0 License at
-
- https://creativecommons.org/licenses/by/3.0/
diff --git a/packages/ml-vision/README.md b/packages/ml-vision/README.md
deleted file mode 100644
index de5fcec63ef..00000000000
--- a/packages/ml-vision/README.md
+++ /dev/null
@@ -1,31 +0,0 @@
-
-
-
-
-
React Native Firebase - ML Kit Vision
-
-
----
-
-# DEPRECATED
-
-This package is deprecated and should no longer be used.
-
-Google has split mobile machine learning functionality into two pieces:
-
-1. "On-Device" inferences - this will be handled via the standalone ["Google ML Kit"](https://developers.google.com/ml-kit) libraries, and the related [`react-native-mlkit`](https://github.com/invertase/react-native-mlkit) package. This includes any APIs where the device uses a local model to make inferences
-
-1. "Cloud" inferences - these will continue in Firebase, but are now in the ["Firebase ML"](https://firebase.google.com/docs/ml) library, and will be available from the new consolidated `@react-native-firebase/ml` package
-
-More information on the transition is available here: https://firebase.google.com/docs/ml#cloud_vs_on-device
-
----
-
-
-
-
- Built and maintained with 💛 by Invertase .
-
-
-
----
diff --git a/packages/ml-vision/android/.editorconfig b/packages/ml-vision/android/.editorconfig
deleted file mode 100644
index 670398e9904..00000000000
--- a/packages/ml-vision/android/.editorconfig
+++ /dev/null
@@ -1,10 +0,0 @@
-# editorconfig
-root = true
-
-[*]
-indent_style = space
-indent_size = 2
-end_of_line = lf
-charset = utf-8
-trim_trailing_whitespace = true
-insert_final_newline = true
diff --git a/packages/ml-vision/android/lint.xml b/packages/ml-vision/android/lint.xml
deleted file mode 100644
index c3dd72aca07..00000000000
--- a/packages/ml-vision/android/lint.xml
+++ /dev/null
@@ -1,5 +0,0 @@
-
-
-
-
-
diff --git a/packages/ml-vision/android/ml-models.gradle b/packages/ml-vision/android/ml-models.gradle
deleted file mode 100644
index af7ec7e37f6..00000000000
--- a/packages/ml-vision/android/ml-models.gradle
+++ /dev/null
@@ -1,49 +0,0 @@
-apply from: file("./../../app/android/firebase-json.gradle")
-
-def mlModels = [
- 'ml_vision_face_model',
- 'ml_vision_image_label_model',
- // 'ml_vision_object_detection_model', // object tracking -> TODO 6.1
-]
-
-dependencies {
- if (rootProject.ext && rootProject.ext.firebaseJson) {
- mlModels.each { modelFlag ->
- if (rootProject.ext.firebaseJson.isFlagEnabled(modelFlag) == true) {
- rootProject.logger.info ":${project.name} model enabled: '${modelFlag}'"
- implementation "com.google.firebase:firebase-${modelFlag.replaceAll("_", "-")}"
- } else {
- rootProject.logger.warn ":${project.name} model disabled: '${modelFlag}'"
- }
- }
- } else {
- rootProject.logger.warn ":${project.name} skipping optional models as no firebaseJson extension found, you may be missing a firebase.json file in the root of your React Native project, or you've not installed the @react-native-firebase/app package and included it in your app build."
- }
-}
-
-def manifestModels = [
- 'ml_vision_ocr_model',
- 'ml_vision_face_model',
- 'ml_vision_barcode_model',
- 'ml_vision_label_model',
- // 'ml_vision_ica_model', // object tracking -> TODO 6.1
-]
-
-def manifestModelsString = ""
-
-manifestModels.each { modelFlag ->
- if (rootProject.ext && rootProject.ext.firebaseJson && rootProject.ext.firebaseJson.isFlagEnabled(modelFlag) == true) {
- def modelIdentifier = modelFlag.replace("ml_vision_", "").replace("_model", "")
- if (manifestModelsString.length() > 0) {
- manifestModelsString += "," + modelIdentifier
- } else {
- manifestModelsString += modelIdentifier
- }
- }
-}
-
-android {
- defaultConfig {
- manifestPlaceholders = [visionModels: manifestModelsString]
- }
-}
diff --git a/packages/ml-vision/android/settings.gradle b/packages/ml-vision/android/settings.gradle
deleted file mode 100644
index 2c89304799d..00000000000
--- a/packages/ml-vision/android/settings.gradle
+++ /dev/null
@@ -1 +0,0 @@
-rootProject.name = '@react-native-firebase_ml-vision'
diff --git a/packages/ml-vision/android/src/main/AndroidManifest.xml b/packages/ml-vision/android/src/main/AndroidManifest.xml
deleted file mode 100644
index ed3a069d689..00000000000
--- a/packages/ml-vision/android/src/main/AndroidManifest.xml
+++ /dev/null
@@ -1,9 +0,0 @@
-
-
-
-
-
-
diff --git a/packages/ml-vision/android/src/main/java/io/invertase/firebase/ml/vision/UniversalFirebaseMLVisionBarcodeDetectorModule.java b/packages/ml-vision/android/src/main/java/io/invertase/firebase/ml/vision/UniversalFirebaseMLVisionBarcodeDetectorModule.java
deleted file mode 100644
index 602fa6f4566..00000000000
--- a/packages/ml-vision/android/src/main/java/io/invertase/firebase/ml/vision/UniversalFirebaseMLVisionBarcodeDetectorModule.java
+++ /dev/null
@@ -1,282 +0,0 @@
-package io.invertase.firebase.ml.vision;
-
-/*
- * Copyright (c) 2016-present Invertase Limited & Contributors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this library except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-
-import android.content.Context;
-import android.os.Bundle;
-import com.google.android.gms.tasks.Task;
-import com.google.android.gms.tasks.Tasks;
-import com.google.firebase.FirebaseApp;
-import com.google.firebase.ml.vision.FirebaseVision;
-import com.google.firebase.ml.vision.barcode.FirebaseVisionBarcode;
-import com.google.firebase.ml.vision.barcode.FirebaseVisionBarcodeDetector;
-import com.google.firebase.ml.vision.barcode.FirebaseVisionBarcodeDetectorOptions;
-import com.google.firebase.ml.vision.common.FirebaseVisionImage;
-import io.invertase.firebase.common.SharedUtils;
-import io.invertase.firebase.common.UniversalFirebaseModule;
-
-import java.util.*;
-
-import static io.invertase.firebase.ml.vision.UniversalFirebaseMLVisionCommon.*;
-
-@SuppressWarnings("ConstantConditions")
-class UniversalFirebaseMLVisionBarcodeDetectorModule extends UniversalFirebaseModule {
-
- UniversalFirebaseMLVisionBarcodeDetectorModule(Context context, String serviceName) {
- super(context, serviceName);
- }
-
- Task>> barcodeDetectorProcessImage(String appName, String stringUri, Bundle barcodeDetectorOptions) {
- return Tasks.call(getExecutor(), () -> {
- FirebaseApp firebaseApp = FirebaseApp.getInstance(appName);
- FirebaseVisionBarcodeDetectorOptions options = getBarcodeDetectorOptions(barcodeDetectorOptions);
-
- FirebaseVisionBarcodeDetector visionBarcodeDetector = FirebaseVision.getInstance(firebaseApp)
- .getVisionBarcodeDetector(options);
-
- FirebaseVisionImage image = FirebaseVisionImage.fromFilePath(
- getContext(),
- SharedUtils.getUri(stringUri)
- );
-
- List detectedBarcodesRaw = Tasks.await(visionBarcodeDetector.detectInImage(image));
-
- return getBarcodesList(detectedBarcodesRaw);
- });
- }
-
- private List> getBarcodesList(List detectedBarcodesRaw) {
- List> detectedBarcodesFormatted = new ArrayList<>(detectedBarcodesRaw.size());
-
- for (FirebaseVisionBarcode barcode : detectedBarcodesRaw) {
- Map barcodeMap = new HashMap<>();
- barcodeMap.put(KEY_BOUNDING_BOX, SharedUtils.rectToIntArray(barcode.getBoundingBox()));
- barcodeMap.put(KEY_CORNER_POINTS, SharedUtils.pointsToIntsList(barcode.getCornerPoints()));
- barcodeMap.put(KEY_FORMAT, barcode.getFormat());
- barcodeMap.put(KEY_VALUE_TYPE, barcode.getValueType());
- barcodeMap.put(KEY_DISPLAY_VALUE, barcode.getDisplayValue());
- barcodeMap.put(KEY_RAW_VALUE, barcode.getRawValue());
-
- // `calendarEvent`
- addCalendarEventFromBarcodeToMap(barcode, barcodeMap);
-
- // `contactInfo`
- addContactInfoFromBarcodeToMap(barcode, barcodeMap);
-
- // driverLicense
- addDriverLicenseFromBarcodeToMap(barcode, barcodeMap);
-
- // email
- addEmailFromBarcodeToMap(barcode, barcodeMap);
-
- // geoPoint
- addGeoPointFromBarcodeToMap(barcode, barcodeMap);
-
- // phone
- addPhoneFromBarcodeToMap(barcode, barcodeMap);
-
- // sms
- addSmsFromBarcodeToMap(barcode, barcodeMap);
-
- // url
- addUrlFromBarcodeToMap(barcode, barcodeMap);
-
- // wifi
- addWifiFromBarcodeToMap(barcode, barcodeMap);
-
- detectedBarcodesFormatted.add(barcodeMap);
- }
-
- return detectedBarcodesFormatted;
- }
-
- private void addDriverLicenseFromBarcodeToMap(FirebaseVisionBarcode barcode, Map barcodeMap) {
- if (barcode.getDriverLicense() == null) return;
- Map driverLicenseMap = new HashMap<>();
- FirebaseVisionBarcode.DriverLicense driverLicense = barcode.getDriverLicense();
- driverLicenseMap.put("addressCity", driverLicense.getAddressCity());
- driverLicenseMap.put("addressState", driverLicense.getAddressState());
- driverLicenseMap.put("addressStreet", driverLicense.getAddressStreet());
- driverLicenseMap.put("addressZip", driverLicense.getAddressZip());
- driverLicenseMap.put("birthDate", driverLicense.getBirthDate());
- driverLicenseMap.put("documentType", driverLicense.getDocumentType());
- driverLicenseMap.put("expiryDate", driverLicense.getExpiryDate());
- driverLicenseMap.put("firstName", driverLicense.getFirstName());
- driverLicenseMap.put("gender", driverLicense.getGender());
- driverLicenseMap.put("issueDate", driverLicense.getIssueDate());
- driverLicenseMap.put("issuingCountry", driverLicense.getIssuingCountry());
- driverLicenseMap.put("lastName", driverLicense.getLastName());
- driverLicenseMap.put("licenseNumber", driverLicense.getLicenseNumber());
- driverLicenseMap.put("middleName", driverLicense.getMiddleName());
- barcodeMap.put("driverLicense", driverLicenseMap);
- }
-
- private void addGeoPointFromBarcodeToMap(FirebaseVisionBarcode barcode, Map barcodeMap) {
- if (barcode.getGeoPoint() == null) return;
- List latLng = new ArrayList<>(2);
- FirebaseVisionBarcode.GeoPoint geoPoint = barcode.getGeoPoint();
- latLng.add(geoPoint.getLat());
- latLng.add(geoPoint.getLng());
- barcodeMap.put(KEY_GEO_POINT, latLng);
- }
-
- private void addSmsFromBarcodeToMap(FirebaseVisionBarcode barcode, Map barcodeMap) {
- if (barcode.getSms() == null) return;
- Map smsMap = new HashMap<>();
- FirebaseVisionBarcode.Sms sms = barcode.getSms();
- smsMap.put("message", sms.getMessage());
- smsMap.put("phoneNumber", sms.getPhoneNumber());
- barcodeMap.put(KEY_SMS, smsMap);
- }
-
- private void addUrlFromBarcodeToMap(FirebaseVisionBarcode barcode, Map barcodeMap) {
- if (barcode.getUrl() == null) return;
- Map urlMap = new HashMap<>();
- FirebaseVisionBarcode.UrlBookmark url = barcode.getUrl();
- urlMap.put("title", url.getTitle());
- urlMap.put("url", url.getUrl());
- barcodeMap.put(KEY_URL, urlMap);
- }
-
- private void addWifiFromBarcodeToMap(FirebaseVisionBarcode barcode, Map barcodeMap) {
- if (barcode.getWifi() == null) return;
- Map wifiMap = new HashMap<>();
- FirebaseVisionBarcode.WiFi wiFi = barcode.getWifi();
- wifiMap.put("encryptionType", wiFi.getEncryptionType());
- wifiMap.put("password", wiFi.getPassword());
- wifiMap.put("ssid", wiFi.getSsid());
- barcodeMap.put(KEY_WIFI, wifiMap);
- }
-
- private void addEmailFromBarcodeToMap(FirebaseVisionBarcode barcode, Map barcodeMap) {
- if (barcode.getEmail() == null) return;
- barcodeMap.put(KEY_EMAIL, getEmailMap(barcode.getEmail()));
- }
-
- private void addPhoneFromBarcodeToMap(FirebaseVisionBarcode barcode, Map barcodeMap) {
- if (barcode.getPhone() == null) return;
- barcodeMap.put(KEY_PHONE, getPhoneMap(barcode.getPhone()));
- }
-
- private void addCalendarEventFromBarcodeToMap(FirebaseVisionBarcode barcode, Map barcodeMap) {
- if (barcode.getCalendarEvent() == null) return;
- Map calendarEventMap = new HashMap<>();
- FirebaseVisionBarcode.CalendarEvent calendarEvent = barcode.getCalendarEvent();
- calendarEventMap.put("description", calendarEvent.getDescription());
- calendarEventMap.put("end", calendarEvent.getEnd().getRawValue());
- calendarEventMap.put("location", calendarEvent.getLocation());
- calendarEventMap.put("organizer", calendarEvent.getOrganizer());
- calendarEventMap.put("start", calendarEvent.getStart().getRawValue());
- calendarEventMap.put("status", calendarEvent.getStatus());
- calendarEventMap.put("summary", calendarEvent.getSummary());
- barcodeMap.put(KEY_CALENDAR_EVENT, calendarEventMap);
- }
-
- private void addContactInfoFromBarcodeToMap(FirebaseVisionBarcode barcode, Map barcodeMap) {
- if (barcode.getContactInfo() == null) return;
- FirebaseVisionBarcode.ContactInfo contactInfo = barcode.getContactInfo();
- Map contactInfoMap = new HashMap<>();
-
- contactInfoMap.put("title", contactInfo.getTitle());
- contactInfoMap.put("organization", contactInfo.getOrganization());
- if (contactInfo.getUrls() == null) {
- contactInfoMap.put("urls", new String[]{});
- } else {
- contactInfoMap.put("urls", contactInfo.getUrls());
- }
-
- // phones
- List phonesListRaw = contactInfo.getPhones();
- List> phonesListFormatted = new ArrayList<>(phonesListRaw.size());
- for (FirebaseVisionBarcode.Phone phone : phonesListRaw) {
- phonesListFormatted.add(getPhoneMap(phone));
- }
- contactInfoMap.put("phones", phonesListFormatted);
-
- // emails
- List emailsListRaw = contactInfo.getEmails();
- List> emailsListFormatted = new ArrayList<>(emailsListRaw.size());
- for (FirebaseVisionBarcode.Email email : emailsListRaw) {
- emailsListFormatted.add(getEmailMap(email));
- }
- contactInfoMap.put("emails", emailsListFormatted);
-
- // person name
- contactInfoMap.put("name", getPersonNameMap(contactInfo.getName()));
-
- // addresses
- List addressListRaw = contactInfo.getAddresses();
- List> addressListFormatted = new ArrayList<>(addressListRaw.size());
- for (FirebaseVisionBarcode.Address email : addressListRaw) {
- addressListFormatted.add(getAddressMap(email));
- }
- contactInfoMap.put("addresses", addressListFormatted);
-
- barcodeMap.put(KEY_CONTACT_INFO, contactInfoMap);
- }
-
- private Map getAddressMap(FirebaseVisionBarcode.Address address) {
- Map addressMap = new HashMap<>();
- addressMap.put("lines", address.getAddressLines());
- addressMap.put("type", address.getType());
- return addressMap;
- }
-
- private Map getPersonNameMap(FirebaseVisionBarcode.PersonName personName) {
- Map personNameMap = new HashMap<>(7);
- personNameMap.put("first", personName.getFirst());
- personNameMap.put("formatted", personName.getFormattedName());
- personNameMap.put("last", personName.getLast());
- personNameMap.put("middle", personName.getMiddle());
- personNameMap.put("prefix", personName.getPrefix());
- personNameMap.put("pronunciation", personName.getPronunciation());
- personNameMap.put("suffix", personName.getSuffix());
- return personNameMap;
- }
-
- private Map getEmailMap(FirebaseVisionBarcode.Email email) {
- Map emailMap = new HashMap<>(3);
- emailMap.put("address", email.getAddress());
- emailMap.put("body", email.getBody());
- emailMap.put("subject", email.getSubject());
- return emailMap;
- }
-
- private Map getPhoneMap(FirebaseVisionBarcode.Phone phone) {
- Map phoneMap = new HashMap<>();
- phoneMap.put("number", phone.getNumber());
- phoneMap.put("type", phone.getType());
- return phoneMap;
- }
-
- private FirebaseVisionBarcodeDetectorOptions getBarcodeDetectorOptions(Bundle barcodeDetectorOptionsBundle) {
- FirebaseVisionBarcodeDetectorOptions.Builder builder = new FirebaseVisionBarcodeDetectorOptions.Builder();
-
- int[] formats = barcodeDetectorOptionsBundle.getIntArray("barcodeFormats");
- if (formats == null) return builder.build();
-
- if (formats.length == 1) {
- builder.setBarcodeFormats(formats[0]);
- } else if (formats.length > 1) {
- builder.setBarcodeFormats(formats[0], Arrays.copyOfRange(formats, 1, formats.length));
- }
-
- return builder.build();
- }
-}
diff --git a/packages/ml-vision/android/src/main/java/io/invertase/firebase/ml/vision/UniversalFirebaseMLVisionFaceDetectorModule.java b/packages/ml-vision/android/src/main/java/io/invertase/firebase/ml/vision/UniversalFirebaseMLVisionFaceDetectorModule.java
deleted file mode 100644
index 72d6768b56f..00000000000
--- a/packages/ml-vision/android/src/main/java/io/invertase/firebase/ml/vision/UniversalFirebaseMLVisionFaceDetectorModule.java
+++ /dev/null
@@ -1,275 +0,0 @@
-package io.invertase.firebase.ml.vision;
-
-/*
- * Copyright (c) 2016-present Invertase Limited & Contributors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this library except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-
-import android.content.Context;
-import android.os.Bundle;
-import com.google.android.gms.tasks.Task;
-import com.google.android.gms.tasks.Tasks;
-import com.google.firebase.FirebaseApp;
-import com.google.firebase.ml.vision.FirebaseVision;
-import com.google.firebase.ml.vision.common.FirebaseVisionImage;
-import com.google.firebase.ml.vision.common.FirebaseVisionPoint;
-import com.google.firebase.ml.vision.face.*;
-import io.invertase.firebase.common.SharedUtils;
-import io.invertase.firebase.common.UniversalFirebaseModule;
-
-import java.util.*;
-
-import static io.invertase.firebase.ml.vision.UniversalFirebaseMLVisionCommon.*;
-
-class UniversalFirebaseMLVisionFaceDetectorModule extends UniversalFirebaseModule {
- UniversalFirebaseMLVisionFaceDetectorModule(Context context, String serviceName) {
- super(context, serviceName);
- }
-
- Task>> faceDetectorProcessImage(
- String appName,
- String stringUri,
- Bundle faceDetectorOptionsBundle
- ) {
- return Tasks.call(getExecutor(), () -> {
- FirebaseApp firebaseApp = FirebaseApp.getInstance(appName);
- FirebaseVisionFaceDetectorOptions options = getFaceDetectorOptions(faceDetectorOptionsBundle);
- FirebaseVisionFaceDetector visionFaceDetector = FirebaseVision.getInstance(firebaseApp)
- .getVisionFaceDetector(options);
- FirebaseVisionImage image = FirebaseVisionImage.fromFilePath(
- getContext(),
- SharedUtils.getUri(stringUri)
- );
-
- List visionFacesRaw = Tasks.await(visionFaceDetector.detectInImage(image));
- List> visionFacesFormatted = new ArrayList<>(visionFacesRaw.size());
-
- for (FirebaseVisionFace visionFaceRaw : visionFacesRaw) {
- Map visionFaceFormatted = new HashMap<>();
-
- visionFaceFormatted.put(
- KEY_BOUNDING_BOX,
- SharedUtils.rectToIntArray(visionFaceRaw.getBoundingBox())
- );
- visionFaceFormatted.put(KEY_HEAD_EULER_ANGLE_Y, visionFaceRaw.getHeadEulerAngleY());
- visionFaceFormatted.put(KEY_HEAD_EULER_ANGLE_Z, visionFaceRaw.getHeadEulerAngleZ());
- visionFaceFormatted.put(
- KEY_LEFT_EYE_OPEN_PROBABILITY,
- visionFaceRaw.getLeftEyeOpenProbability()
- );
- visionFaceFormatted.put(
- KEY_RIGHT_EYE_OPEN_PROBABILITY,
- visionFaceRaw.getRightEyeOpenProbability()
- );
-
- visionFaceFormatted.put(KEY_SMILING_PROBABILITY, visionFaceRaw.getSmilingProbability());
- visionFaceFormatted.put(KEY_TRACKING_ID, visionFaceRaw.getTrackingId());
-
- List> faceContoursFormatted;
-
- if (options.getContourMode() == FirebaseVisionFaceDetectorOptions.NO_CONTOURS) {
- faceContoursFormatted = new ArrayList<>(0);
- } else {
- faceContoursFormatted = new ArrayList<>(14);
- faceContoursFormatted.add(getContourMap(visionFaceRaw.getContour(FirebaseVisionFaceContour.ALL_POINTS)));
- faceContoursFormatted.add(getContourMap(visionFaceRaw.getContour(FirebaseVisionFaceContour.FACE)));
- faceContoursFormatted.add(getContourMap(visionFaceRaw.getContour(FirebaseVisionFaceContour.LEFT_EYEBROW_TOP)));
- faceContoursFormatted.add(getContourMap(visionFaceRaw.getContour(FirebaseVisionFaceContour.LEFT_EYEBROW_BOTTOM)));
- faceContoursFormatted.add(getContourMap(visionFaceRaw.getContour(FirebaseVisionFaceContour.RIGHT_EYEBROW_TOP)));
- faceContoursFormatted.add(getContourMap(visionFaceRaw.getContour(FirebaseVisionFaceContour.RIGHT_EYEBROW_BOTTOM)));
- faceContoursFormatted.add(getContourMap(visionFaceRaw.getContour(FirebaseVisionFaceContour.LEFT_EYE)));
- faceContoursFormatted.add(getContourMap(visionFaceRaw.getContour(FirebaseVisionFaceContour.RIGHT_EYE)));
- faceContoursFormatted.add(getContourMap(visionFaceRaw.getContour(FirebaseVisionFaceContour.UPPER_LIP_TOP)));
- faceContoursFormatted.add(getContourMap(visionFaceRaw.getContour(FirebaseVisionFaceContour.UPPER_LIP_BOTTOM)));
- faceContoursFormatted.add(getContourMap(visionFaceRaw.getContour(FirebaseVisionFaceContour.LOWER_LIP_TOP)));
- faceContoursFormatted.add(getContourMap(visionFaceRaw.getContour(FirebaseVisionFaceContour.LOWER_LIP_BOTTOM)));
- faceContoursFormatted.add(getContourMap(visionFaceRaw.getContour(FirebaseVisionFaceContour.NOSE_BRIDGE)));
- faceContoursFormatted.add(getContourMap(visionFaceRaw.getContour(FirebaseVisionFaceContour.NOSE_BOTTOM)));
- }
-
- visionFaceFormatted.put(KEY_FACE_CONTOURS, faceContoursFormatted);
-
- List> faceLandmarksFormatted = new ArrayList<>(14);
- if (visionFaceRaw.getLandmark(FirebaseVisionFaceLandmark.MOUTH_BOTTOM) != null) {
- faceLandmarksFormatted.add(getLandmarkMap(
- Objects.requireNonNull(visionFaceRaw.getLandmark(FirebaseVisionFaceLandmark.MOUTH_BOTTOM)))
- );
- }
-
- if (visionFaceRaw.getLandmark(FirebaseVisionFaceLandmark.MOUTH_RIGHT) != null) {
- faceLandmarksFormatted.add(getLandmarkMap(
- Objects.requireNonNull(visionFaceRaw.getLandmark(FirebaseVisionFaceLandmark.MOUTH_RIGHT)))
- );
- }
-
- if (visionFaceRaw.getLandmark(FirebaseVisionFaceLandmark.MOUTH_LEFT) != null) {
- faceLandmarksFormatted.add(getLandmarkMap(
- Objects.requireNonNull(visionFaceRaw.getLandmark(FirebaseVisionFaceLandmark.MOUTH_LEFT)))
- );
- }
-
- if (visionFaceRaw.getLandmark(FirebaseVisionFaceLandmark.RIGHT_EYE) != null) {
- faceLandmarksFormatted.add(getLandmarkMap(
- Objects.requireNonNull(visionFaceRaw.getLandmark(FirebaseVisionFaceLandmark.RIGHT_EYE)))
- );
- }
-
- if (visionFaceRaw.getLandmark(FirebaseVisionFaceLandmark.LEFT_EYE) != null) {
- faceLandmarksFormatted.add(getLandmarkMap(
- Objects.requireNonNull(visionFaceRaw.getLandmark(FirebaseVisionFaceLandmark.LEFT_EYE)))
- );
- }
-
- if (visionFaceRaw.getLandmark(FirebaseVisionFaceLandmark.RIGHT_EAR) != null) {
- faceLandmarksFormatted.add(getLandmarkMap(
- Objects.requireNonNull(visionFaceRaw.getLandmark(FirebaseVisionFaceLandmark.RIGHT_EAR)))
- );
- }
-
- if (visionFaceRaw.getLandmark(FirebaseVisionFaceLandmark.LEFT_EAR) != null) {
- faceLandmarksFormatted.add(getLandmarkMap(
- Objects.requireNonNull(visionFaceRaw.getLandmark(FirebaseVisionFaceLandmark.LEFT_EAR)))
- );
- }
-
- if (visionFaceRaw.getLandmark(FirebaseVisionFaceLandmark.RIGHT_CHEEK) != null) {
- faceLandmarksFormatted.add(getLandmarkMap(
- Objects.requireNonNull(visionFaceRaw.getLandmark(FirebaseVisionFaceLandmark.RIGHT_CHEEK)))
- );
- }
-
- if (visionFaceRaw.getLandmark(FirebaseVisionFaceLandmark.LEFT_CHEEK) != null) {
- faceLandmarksFormatted.add(getLandmarkMap(
- Objects.requireNonNull(visionFaceRaw.getLandmark(FirebaseVisionFaceLandmark.LEFT_CHEEK)))
- );
- }
-
- if (visionFaceRaw.getLandmark(FirebaseVisionFaceLandmark.NOSE_BASE) != null) {
- faceLandmarksFormatted.add(getLandmarkMap(
- Objects.requireNonNull(visionFaceRaw.getLandmark(FirebaseVisionFaceLandmark.NOSE_BASE)))
- );
- }
-
- visionFaceFormatted.put(KEY_LANDMARKS, faceLandmarksFormatted);
- visionFacesFormatted.add(visionFaceFormatted);
- }
-
- return visionFacesFormatted;
- });
- }
-
- private Map getLandmarkMap(FirebaseVisionFaceLandmark visionFaceLandmark) {
- Map visionFaceLandmarkMap = new HashMap<>();
- visionFaceLandmarkMap.put(KEY_TYPE, visionFaceLandmark.getLandmarkType());
- visionFaceLandmarkMap.put(KEY_POSITION, getVisionPointMap(visionFaceLandmark.getPosition()));
- return visionFaceLandmarkMap;
- }
-
- private float[] getVisionPointMap(FirebaseVisionPoint visionPoint) {
- return new float[]{visionPoint.getX(), visionPoint.getY()};
- }
-
- private Map getContourMap(FirebaseVisionFaceContour visionFaceContour) {
- Map visionFaceContourMap = new HashMap<>();
-
- List pointsListRaw = visionFaceContour.getPoints();
- List pointsListFormatted = new ArrayList<>(pointsListRaw.size());
- for (FirebaseVisionPoint pointRaw : pointsListRaw) {
- pointsListFormatted.add(getVisionPointMap(pointRaw));
- }
-
- visionFaceContourMap.put(KEY_TYPE, visionFaceContour.getFaceContourType());
- visionFaceContourMap.put(KEY_POINTS, pointsListFormatted);
-
- return visionFaceContourMap;
- }
-
-
- private FirebaseVisionFaceDetectorOptions getFaceDetectorOptions(Bundle faceDetectorOptionsBundle) {
- FirebaseVisionFaceDetectorOptions.Builder builder = new FirebaseVisionFaceDetectorOptions.Builder();
-
- if (faceDetectorOptionsBundle.getBoolean(KEY_ENABLE_TRACKING)) {
- builder.enableTracking();
- }
-
- if (faceDetectorOptionsBundle.containsKey(KEY_CLASSIFICATION_MODE)) {
- int classificationMode = (int) faceDetectorOptionsBundle.getDouble(KEY_CLASSIFICATION_MODE);
- switch (classificationMode) {
- case FirebaseVisionFaceDetectorOptions.NO_CLASSIFICATIONS:
- builder.setClassificationMode(FirebaseVisionFaceDetectorOptions.NO_CLASSIFICATIONS);
- break;
- case FirebaseVisionFaceDetectorOptions.ALL_CLASSIFICATIONS:
- builder.setClassificationMode(FirebaseVisionFaceDetectorOptions.ALL_CLASSIFICATIONS);
- break;
- default:
- throw new IllegalArgumentException(
- "Invalid 'classificationMode' Face Detector option, must be either 1 or 2.");
- }
- }
-
- if (faceDetectorOptionsBundle.containsKey(KEY_CONTOUR_MODE)) {
- int contourMode = (int) faceDetectorOptionsBundle.getDouble(KEY_CONTOUR_MODE);
- switch (contourMode) {
- case FirebaseVisionFaceDetectorOptions.NO_CONTOURS:
- builder.setContourMode(FirebaseVisionFaceDetectorOptions.NO_CONTOURS);
- break;
- case FirebaseVisionFaceDetectorOptions.ALL_CONTOURS:
- builder.setContourMode(FirebaseVisionFaceDetectorOptions.ALL_CONTOURS);
- break;
- default:
- throw new IllegalArgumentException(
- "Invalid 'contourMode' Face Detector option, must be either 1 or 2.");
- }
- }
-
- if (faceDetectorOptionsBundle.containsKey(KEY_LANDMARK_MODE)) {
- int landmarkMode = (int) faceDetectorOptionsBundle.getDouble(KEY_LANDMARK_MODE);
- switch (landmarkMode) {
- case FirebaseVisionFaceDetectorOptions.NO_LANDMARKS:
- builder.setLandmarkMode(FirebaseVisionFaceDetectorOptions.NO_LANDMARKS);
- break;
- case FirebaseVisionFaceDetectorOptions.ALL_LANDMARKS:
- builder.setLandmarkMode(FirebaseVisionFaceDetectorOptions.ALL_LANDMARKS);
- break;
- default:
- throw new IllegalArgumentException(
- "Invalid 'landmarkMode' Face Detector option, must be either 1 or 2.");
- }
- }
-
- if (faceDetectorOptionsBundle.containsKey(KEY_MIN_FACE_SIZE)) {
- float minFaceSize = (float) faceDetectorOptionsBundle.getDouble(KEY_MIN_FACE_SIZE);
- builder.setMinFaceSize(minFaceSize);
- }
-
- if (faceDetectorOptionsBundle.containsKey(KEY_PERFORMANCE_MODE)) {
- int performanceMode = (int) faceDetectorOptionsBundle.getDouble(KEY_PERFORMANCE_MODE);
- switch (performanceMode) {
- case FirebaseVisionFaceDetectorOptions.FAST:
- builder.setPerformanceMode(FirebaseVisionFaceDetectorOptions.FAST);
- break;
- case FirebaseVisionFaceDetectorOptions.ACCURATE:
- builder.setPerformanceMode(FirebaseVisionFaceDetectorOptions.ACCURATE);
- break;
- default:
- throw new IllegalArgumentException(
- "Invalid 'performanceMode' Face Detector option, must be either 1 or 2.");
- }
- }
-
- return builder.build();
- }
-}
diff --git a/packages/ml-vision/android/src/reactnative/AndroidManifest.xml b/packages/ml-vision/android/src/reactnative/AndroidManifest.xml
deleted file mode 100644
index 35065179e96..00000000000
--- a/packages/ml-vision/android/src/reactnative/AndroidManifest.xml
+++ /dev/null
@@ -1,2 +0,0 @@
-
-
diff --git a/packages/ml-vision/android/src/reactnative/java/io/invertase/firebase/ml/vision/RNFirebaseMLVisionImageLabelerModule.java b/packages/ml-vision/android/src/reactnative/java/io/invertase/firebase/ml/vision/RNFirebaseMLVisionImageLabelerModule.java
deleted file mode 100644
index 04d7a3922ba..00000000000
--- a/packages/ml-vision/android/src/reactnative/java/io/invertase/firebase/ml/vision/RNFirebaseMLVisionImageLabelerModule.java
+++ /dev/null
@@ -1,73 +0,0 @@
-package io.invertase.firebase.ml.vision;
-
-/*
- * Copyright (c) 2016-present Invertase Limited & Contributors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this library except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-import com.facebook.react.bridge.*;
-import io.invertase.firebase.common.ReactNativeFirebaseModule;
-
-public class RNFirebaseMLVisionImageLabelerModule extends ReactNativeFirebaseModule {
- private static final String SERVICE_NAME = "MLVisionImageLabeler";
- private final UniversalFirebaseMLVisionImageLabelerModule module;
-
- RNFirebaseMLVisionImageLabelerModule(ReactApplicationContext reactContext) {
- super(reactContext, SERVICE_NAME);
- this.module = new UniversalFirebaseMLVisionImageLabelerModule(reactContext, SERVICE_NAME);
- }
-
- @ReactMethod
- public void imageLabelerProcessImage(String appName, String stringUri, ReadableMap imageLabelerOptions, Promise promise) {
- this.module.imageLabelerProcessImage(appName, stringUri, Arguments.toBundle(imageLabelerOptions))
- .addOnCompleteListener(task -> {
- if (task.isSuccessful()) {
- promise.resolve(
- Arguments.makeNativeArray(task.getResult())
- );
- } else {
- String[] errorCodeAndMessage = UniversalFirebaseMLVisionCommon.getErrorCodeAndMessageFromException(
- task.getException());
- rejectPromiseWithCodeAndMessage(
- promise,
- errorCodeAndMessage[0],
- errorCodeAndMessage[1],
- errorCodeAndMessage[2]
- );
- }
- });
- }
-
- @ReactMethod
- public void cloudImageLabelerProcessImage(String appName, String stringUri, ReadableMap cloudImageLabelerOptions, Promise promise) {
- this.module.cloudImageLabelerProcessImage(appName, stringUri, Arguments.toBundle(cloudImageLabelerOptions))
- .addOnCompleteListener(task -> {
- if (task.isSuccessful()) {
- promise.resolve(
- Arguments.makeNativeArray(task.getResult())
- );
- } else {
- String[] errorCodeAndMessage = UniversalFirebaseMLVisionCommon.getErrorCodeAndMessageFromException(
- task.getException());
- rejectPromiseWithCodeAndMessage(
- promise,
- errorCodeAndMessage[0],
- errorCodeAndMessage[1],
- errorCodeAndMessage[2]
- );
- }
- });
- }
-}
diff --git a/packages/ml-vision/android/src/reactnative/java/io/invertase/firebase/ml/vision/RNFirebaseMLVisionTextRecognizerModule.java b/packages/ml-vision/android/src/reactnative/java/io/invertase/firebase/ml/vision/RNFirebaseMLVisionTextRecognizerModule.java
deleted file mode 100644
index bd481451656..00000000000
--- a/packages/ml-vision/android/src/reactnative/java/io/invertase/firebase/ml/vision/RNFirebaseMLVisionTextRecognizerModule.java
+++ /dev/null
@@ -1,78 +0,0 @@
-package io.invertase.firebase.ml.vision;
-
-/*
- * Copyright (c) 2016-present Invertase Limited & Contributors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this library except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-import com.facebook.react.bridge.*;
-import io.invertase.firebase.common.ReactNativeFirebaseModule;
-
-public class RNFirebaseMLVisionTextRecognizerModule extends ReactNativeFirebaseModule {
- private static final String SERVICE_NAME = "MLVisionTextRecognizer";
- private final UniversalFirebaseMLVisionTextRecognizerModule module;
-
- RNFirebaseMLVisionTextRecognizerModule(ReactApplicationContext reactContext) {
- super(reactContext, SERVICE_NAME);
- this.module = new UniversalFirebaseMLVisionTextRecognizerModule(reactContext, SERVICE_NAME);
- }
-
- @ReactMethod
- public void textRecognizerProcessImage(
- String appName,
- String stringUri,
- Promise promise
- ) {
- module.textRecognizerProcessImage(appName, stringUri)
- .addOnCompleteListener(getExecutor(), task -> {
- if (task.isSuccessful()) {
- promise.resolve(Arguments.makeNativeMap(task.getResult()));
- } else {
- String[] errorCodeAndMessage = UniversalFirebaseMLVisionCommon.getErrorCodeAndMessageFromException(
- task.getException());
- rejectPromiseWithCodeAndMessage(
- promise,
- errorCodeAndMessage[0],
- errorCodeAndMessage[1],
- errorCodeAndMessage[2]
- );
- }
- });
- }
-
- @ReactMethod
- public void cloudTextRecognizerProcessImage(
- String appName,
- String stringUri,
- ReadableMap cloudTextRecognizerOptions,
- Promise promise
- ) {
- module.cloudTextRecognizerProcessImage(appName, stringUri, Arguments.toBundle(cloudTextRecognizerOptions))
- .addOnCompleteListener(getExecutor(), task -> {
- if (task.isSuccessful()) {
- promise.resolve(Arguments.makeNativeMap(task.getResult()));
- } else {
- String[] errorCodeAndMessage = UniversalFirebaseMLVisionCommon.getErrorCodeAndMessageFromException(
- task.getException());
- rejectPromiseWithCodeAndMessage(
- promise,
- errorCodeAndMessage[0],
- errorCodeAndMessage[1],
- errorCodeAndMessage[2]
- );
- }
- });
- }
-}
diff --git a/packages/ml-vision/android/src/reactnative/java/io/invertase/firebase/ml/vision/ReactNativeFirebaseMLVisionPackage.java b/packages/ml-vision/android/src/reactnative/java/io/invertase/firebase/ml/vision/ReactNativeFirebaseMLVisionPackage.java
deleted file mode 100644
index 212722a828c..00000000000
--- a/packages/ml-vision/android/src/reactnative/java/io/invertase/firebase/ml/vision/ReactNativeFirebaseMLVisionPackage.java
+++ /dev/null
@@ -1,62 +0,0 @@
-package io.invertase.firebase.ml.vision;
-
-/*
- * Copyright (c) 2016-present Invertase Limited & Contributors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this library except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-import com.facebook.react.ReactPackage;
-import com.facebook.react.bridge.NativeModule;
-import com.facebook.react.bridge.ReactApplicationContext;
-import com.facebook.react.uimanager.ViewManager;
-import io.invertase.firebase.common.ReactNativeFirebaseJSON;
-
-import javax.annotation.Nonnull;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-
-@SuppressWarnings("unused")
-public class ReactNativeFirebaseMLVisionPackage implements ReactPackage {
- @Nonnull
- @Override
- public List createNativeModules(@Nonnull ReactApplicationContext reactContext) {
- List modules = new ArrayList<>();
- modules.add(new RNFirebaseMLVisionBarcodeDetectorModule(reactContext));
- modules.add(new RNFirebaseMLVisionTextRecognizerModule(reactContext));
- modules.add(new RNFirebaseMLVisionLandmarkRecognizerModule(reactContext));
- modules.add(new RNFirebaseMLVisionDocumentTextRecognizerModule(reactContext));
-
- if (ReactNativeFirebaseJSON
- .getSharedInstance()
- .getBooleanValue("ml_vision_face_model", false)) {
- modules.add(new RNFirebaseMLVisionFaceDetectorModule(reactContext));
- }
-
- if (ReactNativeFirebaseJSON
- .getSharedInstance()
- .getBooleanValue("ml_vision_image_label_model", false)) {
- modules.add(new RNFirebaseMLVisionImageLabelerModule(reactContext));
- }
-
- return modules;
- }
-
- @Nonnull
- @Override
- public List createViewManagers(@Nonnull ReactApplicationContext reactContext) {
- return Collections.emptyList();
- }
-}
diff --git a/packages/ml-vision/e2e/barcode.e2e.js b/packages/ml-vision/e2e/barcode.e2e.js
deleted file mode 100644
index eb471ce4271..00000000000
--- a/packages/ml-vision/e2e/barcode.e2e.js
+++ /dev/null
@@ -1,121 +0,0 @@
-/*
- * Copyright (c) 2016-present Invertase Limited & Contributors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this library except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-let testImageFile;
-
-function barcodeValidate(barcode) {
- barcode.should.be.Object();
-
- barcode.boundingBox.should.be.Array();
- barcode.boundingBox.length.should.eql(4);
- barcode.boundingBox.forEach($ => $.should.be.Number());
-
- barcode.cornerPoints.should.be.Array();
- barcode.cornerPoints.length.should.eql(4);
- barcode.cornerPoints.forEach($ => {
- $.should.be.Array();
- $.length.should.eql(2);
- $.forEach(_ => _.should.be.Number());
- });
-
- barcode.format.should.be.Number();
- barcode.valueType.should.be.Number();
-
- barcode.displayValue.should.be.String();
- barcode.rawValue.should.be.String();
-}
-
-describe('mlkit.vision.barcode', () => {
- before(async () => {
- testImageFile = `${firebase.utils.FilePath.DOCUMENT_DIRECTORY}/barcode.png`;
- await firebase
- .storage()
- .ref('vision/barcode.png')
- .writeToFile(testImageFile);
- });
-
- describe('barcodeDetectorProcessImage()', () => {
- it('should throw if image path is not a string', () => {
- try {
- firebase.vision().barcodeDetectorProcessImage(123);
- return Promise.reject(new Error('Did not throw an Error.'));
- } catch (error) {
- error.message.should.containEql("'localImageFilePath' expected a string local file path");
- return Promise.resolve();
- }
- });
-
- it('should return a valid response', async () => {
- const res = await firebase.vision().barcodeDetectorProcessImage(testImageFile);
-
- res.should.be.Array();
- res.length.should.be.greaterThan(0);
- res.forEach($ => barcodeValidate($));
- });
- });
-
- describe('VisionBarcodeDetectorOptions', () => {
- it('throws if not an object', async () => {
- try {
- await firebase.vision().barcodeDetectorProcessImage(testImageFile, '123');
- return Promise.reject(new Error('Did not throw an Error.'));
- } catch (error) {
- error.message.should.containEql("'barcodeDetectorOptions' expected an object value");
- return Promise.resolve();
- }
- });
-
- describe('barcodeFormats', () => {
- it('should throw if not an array', async () => {
- try {
- await firebase.vision().barcodeDetectorProcessImage(testImageFile, {
- barcodeFormats: 'foo',
- });
- return Promise.reject(new Error('Did not throw an Error.'));
- } catch (error) {
- error.message.should.containEql(
- "'barcodeDetectorOptions.barcodeFormats' must be an array of VisionBarcodeFormat types",
- );
- return Promise.resolve();
- }
- });
-
- it('should throw if array item is invalid type', async () => {
- try {
- await firebase.vision().barcodeDetectorProcessImage(testImageFile, {
- barcodeFormats: [firebase.vision.VisionBarcodeFormat.AZTEC, 'foobar'],
- });
- return Promise.reject(new Error('Did not throw an Error.'));
- } catch (error) {
- error.message.should.containEql(
- "'barcodeDetectorOptions.barcodeFormats' type at index 1 is invalid",
- );
- return Promise.resolve();
- }
- });
-
- it('sets formats', async () => {
- await firebase.vision().barcodeDetectorProcessImage(testImageFile, {
- barcodeFormats: [
- firebase.vision.VisionBarcodeFormat.AZTEC,
- firebase.vision.VisionBarcodeFormat.DATA_MATRIX,
- ],
- });
- });
- });
- });
-});
diff --git a/packages/ml-vision/e2e/face.e2e.js b/packages/ml-vision/e2e/face.e2e.js
deleted file mode 100644
index 5727021246d..00000000000
--- a/packages/ml-vision/e2e/face.e2e.js
+++ /dev/null
@@ -1,272 +0,0 @@
-/*
- * Copyright (c) 2016-present Invertase Limited & Contributors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this library except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-let testImageFile;
-
-describe('mlkit.vision.face', () => {
- before(async () => {
- testImageFile = `${firebase.utils.FilePath.DOCUMENT_DIRECTORY}/faces.jpg`;
- await firebase
- .storage()
- .ref('vision/faces.jpg')
- .writeToFile(testImageFile);
- });
-
- describe('faceDetectorProcessImage()', () => {
- it('should throw if image path is not a string', () => {
- try {
- firebase.vision().faceDetectorProcessImage(123);
- return Promise.reject(new Error('Did not throw an Error.'));
- } catch (error) {
- error.message.should.containEql("'localImageFilePath' expected a string local file path");
- return Promise.resolve();
- }
- });
-
- it('returns basic face object with no options enabled', async () => {
- const res = await firebase.vision().faceDetectorProcessImage(testImageFile);
-
- res.should.be.Array();
- res.length.should.be.greaterThan(0);
-
- res.forEach(i => {
- // Currently disabled
- i.trackingId.should.eql(-1);
-
- i.rightEyeOpenProbability.should.eql(-1);
- i.leftEyeOpenProbability.should.eql(-1);
- i.smilingProbability.should.eql(-1);
-
- i.landmarks.length.should.eql(0);
- i.faceContours.length.should.eql(0);
-
- i.boundingBox.length.should.eql(4);
-
- i.headEulerAngleZ.should.be.Number();
- i.headEulerAngleY.should.be.Number();
- });
- });
-
- it('returns classifications if enabled', async () => {
- const res = await firebase.vision().faceDetectorProcessImage(testImageFile, {
- classificationMode: 2,
- });
-
- res.should.be.Array();
- res.length.should.be.greaterThan(0);
-
- res.forEach(i => {
- i.rightEyeOpenProbability.should.greaterThan(-1);
- i.leftEyeOpenProbability.should.greaterThan(-1);
- i.smilingProbability.should.greaterThan(-1);
- });
- });
-
- it('returns landmarks if enabled', async () => {
- const res = await firebase.vision().faceDetectorProcessImage(testImageFile, {
- landmarkMode: 2,
- });
- res.should.be.Array();
- res.length.should.be.greaterThan(0);
-
- res.forEach(i => {
- i.landmarks.length.should.be.greaterThan(0);
-
- i.landmarks.forEach(l => {
- l.type.should.be.Number();
- l.type.should.be.greaterThan(-1);
- l.position.length.should.be.eql(2);
- l.position.forEach(p => p.should.be.Number());
- });
- });
- });
-
- it('returns contours if enabled', async () => {
- const res = await firebase.vision().faceDetectorProcessImage(testImageFile, {
- contourMode: 2,
- });
- res.should.be.Array();
- res.length.should.be.greaterThan(0);
-
- res.forEach(i => {
- i.faceContours.length.should.be.greaterThan(0);
-
- i.faceContours.forEach(l => {
- l.type.should.be.Number();
- l.type.should.be.greaterThan(-1);
- l.points.length.should.be.greaterThan(1);
- l.points.forEach(p => {
- p.should.be.Array();
- p.length.should.be.eql(2);
- });
- });
- });
- });
- });
-
- describe('VisionFaceDetectorOptions', () => {
- it('throws if not an object', async () => {
- try {
- await firebase.vision().faceDetectorProcessImage(testImageFile, '123');
- return Promise.reject(new Error('Did not throw an Error.'));
- } catch (error) {
- error.message.should.containEql("'faceDetectorOptions' expected an object value");
- return Promise.resolve();
- }
- });
-
- describe('classificationMode', () => {
- it('throws if mode is incorrect', async () => {
- try {
- await firebase.vision().faceDetectorProcessImage(testImageFile, {
- classificationMode: 'foo',
- });
- return Promise.reject(new Error('Did not throw an Error.'));
- } catch (error) {
- error.message.should.containEql(
- "'faceDetectorOptions.classificationMode' invalid classification mode",
- );
- return Promise.resolve();
- }
- });
-
- it('sets classificationMode', async () => {
- await firebase.vision().faceDetectorProcessImage(testImageFile, {
- classificationMode:
- firebase.vision.VisionFaceDetectorClassificationMode.NO_CLASSIFICATIONS,
- });
-
- await firebase.vision().faceDetectorProcessImage(testImageFile, {
- classificationMode:
- firebase.vision.VisionFaceDetectorClassificationMode.ALL_CLASSIFICATIONS,
- });
- });
- });
-
- describe('contourMode', () => {
- it('throws if mode is incorrect', async () => {
- try {
- await firebase.vision().faceDetectorProcessImage(testImageFile, {
- contourMode: 'foo',
- });
- return Promise.reject(new Error('Did not throw an Error.'));
- } catch (error) {
- error.message.should.containEql("'faceDetectorOptions.contourMode' invalid contour mode");
- return Promise.resolve();
- }
- });
-
- it('sets contourMode', async () => {
- await firebase.vision().faceDetectorProcessImage(testImageFile, {
- contourMode: firebase.vision.VisionFaceDetectorContourMode.NO_CONTOURS,
- });
-
- await firebase.vision().faceDetectorProcessImage(testImageFile, {
- contourMode: firebase.vision.VisionFaceDetectorContourMode.ALL_CONTOURS,
- });
- });
- });
-
- describe('performanceMode', () => {
- it('throws if mode is incorrect', async () => {
- try {
- await firebase.vision().faceDetectorProcessImage(testImageFile, {
- performanceMode: 'foo',
- });
- return Promise.reject(new Error('Did not throw an Error.'));
- } catch (error) {
- error.message.should.containEql(
- "'faceDetectorOptions.performanceMode' invalid performance mode",
- );
- return Promise.resolve();
- }
- });
-
- it('sets performanceMode', async () => {
- await firebase.vision().faceDetectorProcessImage(testImageFile, {
- performanceMode: firebase.vision.VisionFaceDetectorPerformanceMode.FAST,
- });
-
- await firebase.vision().faceDetectorProcessImage(testImageFile, {
- performanceMode: firebase.vision.VisionFaceDetectorPerformanceMode.ACCURATE,
- });
- });
- });
-
- describe('landmarkMode', () => {
- it('throws if mode is incorrect', async () => {
- try {
- await firebase.vision().faceDetectorProcessImage(testImageFile, {
- landmarkMode: 'foo',
- });
- return Promise.reject(new Error('Did not throw an Error.'));
- } catch (error) {
- error.message.should.containEql(
- "'faceDetectorOptions.landmarkMode' invalid landmark mode",
- );
- return Promise.resolve();
- }
- });
-
- it('sets landmarkMode', async () => {
- await firebase.vision().faceDetectorProcessImage(testImageFile, {
- landmarkMode: firebase.vision.VisionFaceDetectorLandmarkMode.NO_LANDMARKS,
- });
-
- await firebase.vision().faceDetectorProcessImage(testImageFile, {
- landmarkMode: firebase.vision.VisionFaceDetectorLandmarkMode.ALL_LANDMARKS,
- });
- });
- });
-
- describe('minFaceSize', () => {
- it('throws if size is not a number', async () => {
- try {
- await firebase.vision().faceDetectorProcessImage(testImageFile, {
- minFaceSize: '0.1',
- });
- return Promise.reject(new Error('Did not throw an Error.'));
- } catch (error) {
- error.message.should.containEql(
- "'faceDetectorOptions.minFaceSize' expected a number value between 0 & 1",
- );
- return Promise.resolve();
- }
- });
-
- it('throws if size is not valid', async () => {
- try {
- await firebase.vision().faceDetectorProcessImage(testImageFile, {
- minFaceSize: -1,
- });
- return Promise.reject(new Error('Did not throw an Error.'));
- } catch (error) {
- error.message.should.containEql(
- "'faceDetectorOptions.minFaceSize' expected value to be between 0 & 1",
- );
- return Promise.resolve();
- }
- });
-
- it('sets minFaceSize', async () => {
- await firebase.vision().faceDetectorProcessImage(testImageFile, {
- minFaceSize: 0.3,
- });
- });
- });
- });
-});
diff --git a/packages/ml-vision/ios/RNFBMLVision.xcodeproj/project.pbxproj b/packages/ml-vision/ios/RNFBMLVision.xcodeproj/project.pbxproj
deleted file mode 100644
index 90c5b755119..00000000000
--- a/packages/ml-vision/ios/RNFBMLVision.xcodeproj/project.pbxproj
+++ /dev/null
@@ -1,384 +0,0 @@
-// !$*UTF8*$!
-{
- archiveVersion = 1;
- classes = {
- };
- objectVersion = 48;
- objects = {
-
-/* Begin PBXBuildFile section */
- 8B06D3F322F84F7200A5B542 /* RNFBMLVisionLandmarkRecognizerModule.m in Sources */ = {isa = PBXBuildFile; fileRef = 8B06D3F222F84F7200A5B542 /* RNFBMLVisionLandmarkRecognizerModule.m */; };
- 8B06D3FC22F863AE00A5B542 /* RNFBMLVisionCommon.m in Sources */ = {isa = PBXBuildFile; fileRef = 8B06D3FB22F863AE00A5B542 /* RNFBMLVisionCommon.m */; };
- 8B06D40022F8748C00A5B542 /* RNFBMLVisionFaceDetectorModule.m in Sources */ = {isa = PBXBuildFile; fileRef = 8B06D3FF22F8748C00A5B542 /* RNFBMLVisionFaceDetectorModule.m */; };
- 8B06D40622F97B4900A5B542 /* RNFBMLVisionDocumentTextRecognizerModule.m in Sources */ = {isa = PBXBuildFile; fileRef = 8B06D40522F97B4900A5B542 /* RNFBMLVisionDocumentTextRecognizerModule.m */; };
- 8B06D40A22F989EF00A5B542 /* RNFBMLVisionTextRecognizerModule.m in Sources */ = {isa = PBXBuildFile; fileRef = 8B06D40922F989EF00A5B542 /* RNFBMLVisionTextRecognizerModule.m */; };
- 8B06D40E22F99DF900A5B542 /* RNFBMLVisionImageLabelerModule.m in Sources */ = {isa = PBXBuildFile; fileRef = 8B06D40D22F99DF900A5B542 /* RNFBMLVisionImageLabelerModule.m */; };
- 8B06D41222F9A15A00A5B542 /* RNFBMLVisionBarcodeDetectorModule.m in Sources */ = {isa = PBXBuildFile; fileRef = 8B06D41122F9A15A00A5B542 /* RNFBMLVisionBarcodeDetectorModule.m */; };
-/* End PBXBuildFile section */
-
-/* Begin PBXCopyFilesBuildPhase section */
- 2744B98021F45429004F8E3F /* CopyFiles */ = {
- isa = PBXCopyFilesBuildPhase;
- buildActionMask = 2147483647;
- dstPath = "";
- dstSubfolderSpec = 16;
- files = (
- );
- runOnlyForDeploymentPostprocessing = 0;
- };
-/* End PBXCopyFilesBuildPhase section */
-
-/* Begin PBXFileReference section */
- 2744B98221F45429004F8E3F /* libRNFBMLVision.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = libRNFBMLVision.a; sourceTree = BUILT_PRODUCTS_DIR; };
- 8B06D3F122F84F6500A5B542 /* RNFBMLVisionLandmarkRecognizerModule.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = RNFBMLVisionLandmarkRecognizerModule.h; sourceTree = ""; };
- 8B06D3F222F84F7200A5B542 /* RNFBMLVisionLandmarkRecognizerModule.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = RNFBMLVisionLandmarkRecognizerModule.m; sourceTree = ""; };
- 8B06D3FA22F863A400A5B542 /* RNFBMLVisionCommon.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = RNFBMLVisionCommon.h; sourceTree = ""; };
- 8B06D3FB22F863AE00A5B542 /* RNFBMLVisionCommon.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = RNFBMLVisionCommon.m; sourceTree = ""; };
- 8B06D3FE22F8747F00A5B542 /* RNFBMLVisionFaceDetectorModule.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = RNFBMLVisionFaceDetectorModule.h; sourceTree = ""; };
- 8B06D3FF22F8748C00A5B542 /* RNFBMLVisionFaceDetectorModule.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = RNFBMLVisionFaceDetectorModule.m; sourceTree = ""; };
- 8B06D40422F97B3600A5B542 /* RNFBMLVisionDocumentTextRecognizerModule.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = RNFBMLVisionDocumentTextRecognizerModule.h; sourceTree = ""; };
- 8B06D40522F97B4900A5B542 /* RNFBMLVisionDocumentTextRecognizerModule.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = RNFBMLVisionDocumentTextRecognizerModule.m; sourceTree = ""; };
- 8B06D40822F989E400A5B542 /* RNFBMLVisionTextRecognizerModule.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = RNFBMLVisionTextRecognizerModule.h; sourceTree = ""; };
- 8B06D40922F989EF00A5B542 /* RNFBMLVisionTextRecognizerModule.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = RNFBMLVisionTextRecognizerModule.m; sourceTree = ""; };
- 8B06D40C22F99DEF00A5B542 /* RNFBMLVisionImageLabelerModule.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = RNFBMLVisionImageLabelerModule.h; sourceTree = ""; };
- 8B06D40D22F99DF900A5B542 /* RNFBMLVisionImageLabelerModule.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = RNFBMLVisionImageLabelerModule.m; sourceTree = ""; };
- 8B06D41022F9A14B00A5B542 /* RNFBMLVisionBarcodeDetectorModule.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = RNFBMLVisionBarcodeDetectorModule.h; sourceTree = ""; };
- 8B06D41122F9A15A00A5B542 /* RNFBMLVisionBarcodeDetectorModule.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = RNFBMLVisionBarcodeDetectorModule.m; sourceTree = ""; };
-/* End PBXFileReference section */
-
-/* Begin PBXFrameworksBuildPhase section */
- 2744B97F21F45429004F8E3F /* Frameworks */ = {
- isa = PBXFrameworksBuildPhase;
- buildActionMask = 2147483647;
- files = (
- );
- runOnlyForDeploymentPostprocessing = 0;
- };
-/* End PBXFrameworksBuildPhase section */
-
-/* Begin PBXGroup section */
- 2744B97521F452B8004F8E3F /* Products */ = {
- isa = PBXGroup;
- children = (
- 2744B98221F45429004F8E3F /* libRNFBMLVision.a */,
- );
- name = Products;
- sourceTree = "";
- };
- 2744B98321F45429004F8E3F /* RNFBMLVision */ = {
- isa = PBXGroup;
- children = (
- 8B06D3F122F84F6500A5B542 /* RNFBMLVisionLandmarkRecognizerModule.h */,
- 8B06D3F222F84F7200A5B542 /* RNFBMLVisionLandmarkRecognizerModule.m */,
- 8B06D3FA22F863A400A5B542 /* RNFBMLVisionCommon.h */,
- 8B06D3FB22F863AE00A5B542 /* RNFBMLVisionCommon.m */,
- 8B06D3FE22F8747F00A5B542 /* RNFBMLVisionFaceDetectorModule.h */,
- 8B06D3FF22F8748C00A5B542 /* RNFBMLVisionFaceDetectorModule.m */,
- 8B06D40422F97B3600A5B542 /* RNFBMLVisionDocumentTextRecognizerModule.h */,
- 8B06D40522F97B4900A5B542 /* RNFBMLVisionDocumentTextRecognizerModule.m */,
- 8B06D40822F989E400A5B542 /* RNFBMLVisionTextRecognizerModule.h */,
- 8B06D40922F989EF00A5B542 /* RNFBMLVisionTextRecognizerModule.m */,
- 8B06D40C22F99DEF00A5B542 /* RNFBMLVisionImageLabelerModule.h */,
- 8B06D40D22F99DF900A5B542 /* RNFBMLVisionImageLabelerModule.m */,
- 8B06D41022F9A14B00A5B542 /* RNFBMLVisionBarcodeDetectorModule.h */,
- 8B06D41122F9A15A00A5B542 /* RNFBMLVisionBarcodeDetectorModule.m */,
- );
- path = RNFBMLVision;
- sourceTree = "";
- };
- 3323F52AAFE26B7384BE4DE3 = {
- isa = PBXGroup;
- children = (
- 2744B98321F45429004F8E3F /* RNFBMLVision */,
- 2744B97521F452B8004F8E3F /* Products */,
- );
- sourceTree = "";
- };
-/* End PBXGroup section */
-
-/* Begin PBXNativeTarget section */
- 2744B98121F45429004F8E3F /* RNFBMLVision */ = {
- isa = PBXNativeTarget;
- buildConfigurationList = 2744B98821F45429004F8E3F /* Build configuration list for PBXNativeTarget "RNFBMLVision" */;
- buildPhases = (
- 2744B97E21F45429004F8E3F /* Sources */,
- 2744B97F21F45429004F8E3F /* Frameworks */,
- 2744B98021F45429004F8E3F /* CopyFiles */,
- );
- buildRules = (
- );
- dependencies = (
- );
- name = RNFBMLVision;
- productName = RNFBMLVision;
- productReference = 2744B98221F45429004F8E3F /* libRNFBMLVision.a */;
- productType = "com.apple.product-type.library.static";
- };
-/* End PBXNativeTarget section */
-
-/* Begin PBXProject section */
- 3323F95273A95DB34F55C6D7 /* Project object */ = {
- isa = PBXProject;
- attributes = {
- CLASSPREFIX = RNFBMLVision;
- LastUpgradeCheck = 1010;
- ORGANIZATIONNAME = Invertase;
- TargetAttributes = {
- 2744B98121F45429004F8E3F = {
- CreatedOnToolsVersion = 10.1;
- ProvisioningStyle = Automatic;
- };
- };
- };
- buildConfigurationList = 3323F1C5716BA966BBBB95A4 /* Build configuration list for PBXProject "RNFBMLVision" */;
- compatibilityVersion = "Xcode 8.0";
- developmentRegion = English;
- hasScannedForEncodings = 0;
- knownRegions = (
- English,
- en,
- );
- mainGroup = 3323F52AAFE26B7384BE4DE3;
- productRefGroup = 2744B97521F452B8004F8E3F /* Products */;
- projectDirPath = "";
- projectRoot = "";
- targets = (
- 2744B98121F45429004F8E3F /* RNFBMLVision */,
- );
- };
-/* End PBXProject section */
-
-/* Begin PBXSourcesBuildPhase section */
- 2744B97E21F45429004F8E3F /* Sources */ = {
- isa = PBXSourcesBuildPhase;
- buildActionMask = 2147483647;
- files = (
- 8B06D40E22F99DF900A5B542 /* RNFBMLVisionImageLabelerModule.m in Sources */,
- 8B06D40622F97B4900A5B542 /* RNFBMLVisionDocumentTextRecognizerModule.m in Sources */,
- 8B06D40A22F989EF00A5B542 /* RNFBMLVisionTextRecognizerModule.m in Sources */,
- 8B06D3F322F84F7200A5B542 /* RNFBMLVisionLandmarkRecognizerModule.m in Sources */,
- 8B06D3FC22F863AE00A5B542 /* RNFBMLVisionCommon.m in Sources */,
- 8B06D40022F8748C00A5B542 /* RNFBMLVisionFaceDetectorModule.m in Sources */,
- 8B06D41222F9A15A00A5B542 /* RNFBMLVisionBarcodeDetectorModule.m in Sources */,
- );
- runOnlyForDeploymentPostprocessing = 0;
- };
-/* End PBXSourcesBuildPhase section */
-
-/* Begin XCBuildConfiguration section */
- 2744B98921F45429004F8E3F /* Debug */ = {
- isa = XCBuildConfiguration;
- buildSettings = {
- ALWAYS_SEARCH_USER_PATHS = NO;
- CLANG_ANALYZER_NONNULL = YES;
- CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
- CLANG_CXX_LANGUAGE_STANDARD = "gnu++14";
- CLANG_CXX_LIBRARY = "libc++";
- CLANG_ENABLE_MODULES = YES;
- CLANG_ENABLE_OBJC_ARC = YES;
- CLANG_ENABLE_OBJC_WEAK = YES;
- CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
- CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
- CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
- CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
- CODE_SIGN_IDENTITY = "iPhone Developer";
- CODE_SIGN_STYLE = Automatic;
- COPY_PHASE_STRIP = NO;
- DEBUG_INFORMATION_FORMAT = dwarf;
- GCC_C_LANGUAGE_STANDARD = gnu11;
- GCC_DYNAMIC_NO_PIC = NO;
- GCC_OPTIMIZATION_LEVEL = 0;
- GCC_PREPROCESSOR_DEFINITIONS = (
- "DEBUG=1",
- "$(inherited)",
- );
- GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
- GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
- IPHONEOS_DEPLOYMENT_TARGET = 10.0;
- MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE;
- MTL_FAST_MATH = YES;
- OTHER_LDFLAGS = "-ObjC";
- PRODUCT_NAME = "$(TARGET_NAME)";
- SDKROOT = iphoneos;
- SKIP_INSTALL = YES;
- TARGETED_DEVICE_FAMILY = "1,2";
- };
- name = Debug;
- };
- 2744B98A21F45429004F8E3F /* Release */ = {
- isa = XCBuildConfiguration;
- buildSettings = {
- ALWAYS_SEARCH_USER_PATHS = NO;
- CLANG_ANALYZER_NONNULL = YES;
- CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
- CLANG_CXX_LANGUAGE_STANDARD = "gnu++14";
- CLANG_CXX_LIBRARY = "libc++";
- CLANG_ENABLE_MODULES = YES;
- CLANG_ENABLE_OBJC_ARC = YES;
- CLANG_ENABLE_OBJC_WEAK = YES;
- CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
- CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
- CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
- CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
- CODE_SIGN_IDENTITY = "iPhone Developer";
- CODE_SIGN_STYLE = Automatic;
- COPY_PHASE_STRIP = NO;
- DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
- ENABLE_NS_ASSERTIONS = NO;
- GCC_C_LANGUAGE_STANDARD = gnu11;
- GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
- GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
- IPHONEOS_DEPLOYMENT_TARGET = 10.0;
- MTL_ENABLE_DEBUG_INFO = NO;
- MTL_FAST_MATH = YES;
- OTHER_LDFLAGS = "-ObjC";
- PRODUCT_NAME = "$(TARGET_NAME)";
- SDKROOT = iphoneos;
- SKIP_INSTALL = YES;
- TARGETED_DEVICE_FAMILY = "1,2";
- VALIDATE_PRODUCT = YES;
- };
- name = Release;
- };
- 3323F77D701E1896E6D239CF /* Release */ = {
- isa = XCBuildConfiguration;
- buildSettings = {
- CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
- CLANG_WARN_BOOL_CONVERSION = YES;
- CLANG_WARN_COMMA = YES;
- CLANG_WARN_CONSTANT_CONVERSION = YES;
- CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
- CLANG_WARN_EMPTY_BODY = YES;
- CLANG_WARN_ENUM_CONVERSION = YES;
- CLANG_WARN_INFINITE_RECURSION = YES;
- CLANG_WARN_INT_CONVERSION = YES;
- CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
- CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
- CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
- CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
- CLANG_WARN_STRICT_PROTOTYPES = YES;
- CLANG_WARN_SUSPICIOUS_MOVE = YES;
- CLANG_WARN_UNREACHABLE_CODE = YES;
- CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
- ENABLE_STRICT_OBJC_MSGSEND = YES;
- FRAMEWORK_SEARCH_PATHS = (
- "$(inherited)",
- "${BUILT_PRODUCTS_DIR}/**",
- "${SRCROOT}/../../../ios/Firebase/**",
- "$(FIREBASE_SEARCH_PATH)/Firebase/**",
- "$(SRCROOT)/../../../ios/Pods/FirebaseMlkitLanguage/Frameworks",
- "$(SRCROOT)/../../../tests/ios/Pods/FirebaseMlkitLanguage/Frameworks",
- );
- GCC_NO_COMMON_BLOCKS = YES;
- GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
- GCC_WARN_ABOUT_RETURN_TYPE = YES;
- GCC_WARN_UNDECLARED_SELECTOR = YES;
- GCC_WARN_UNINITIALIZED_AUTOS = YES;
- GCC_WARN_UNUSED_FUNCTION = YES;
- GCC_WARN_UNUSED_VARIABLE = YES;
- HEADER_SEARCH_PATHS = (
- "$(inherited)",
- "$(REACT_SEARCH_PATH)/React/**",
- "$(SRCROOT)/../../react-native/React/**",
- "$(SRCROOT)/../../react-native-firebase/ios/**",
- "$(FIREBASE_SEARCH_PATH)/Firebase/**",
- "${SRCROOT}/../../../ios/Firebase/**",
- "${SRCROOT}/../../../ios/Pods/Headers/Public/**",
- "${SRCROOT}/../../../tests/ios/Pods/Headers/Public/**",
- "$(SRCROOT)/../../../node_modules/react-native/React/**",
- "$(SRCROOT)/../../../node_modules/react-native-firebase/ios/**",
- "$(SRCROOT)/../../../packages/app/ios/**",
- );
- IPHONEOS_DEPLOYMENT_TARGET = 10.0;
- LIBRARY_SEARCH_PATHS = "$(inherited)";
- MACH_O_TYPE = staticlib;
- OTHER_LDFLAGS = "$(inherited)";
- PRODUCT_NAME = "$(TARGET_NAME)";
- SKIP_INSTALL = YES;
- };
- name = Release;
- };
- 3323F7E33E1559A2B9826720 /* Debug */ = {
- isa = XCBuildConfiguration;
- buildSettings = {
- CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
- CLANG_WARN_BOOL_CONVERSION = YES;
- CLANG_WARN_COMMA = YES;
- CLANG_WARN_CONSTANT_CONVERSION = YES;
- CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
- CLANG_WARN_EMPTY_BODY = YES;
- CLANG_WARN_ENUM_CONVERSION = YES;
- CLANG_WARN_INFINITE_RECURSION = YES;
- CLANG_WARN_INT_CONVERSION = YES;
- CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
- CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
- CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
- CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
- CLANG_WARN_STRICT_PROTOTYPES = YES;
- CLANG_WARN_SUSPICIOUS_MOVE = YES;
- CLANG_WARN_UNREACHABLE_CODE = YES;
- CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
- ENABLE_STRICT_OBJC_MSGSEND = YES;
- ENABLE_TESTABILITY = YES;
- FRAMEWORK_SEARCH_PATHS = (
- "$(inherited)",
- "${BUILT_PRODUCTS_DIR}/**",
- "${SRCROOT}/../../../ios/Firebase/**",
- "$(FIREBASE_SEARCH_PATH)/Firebase/**",
- "$(SRCROOT)/../../../ios/Pods/FirebaseMlkitLanguage/Frameworks",
- );
- GCC_NO_COMMON_BLOCKS = YES;
- GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
- GCC_WARN_ABOUT_RETURN_TYPE = YES;
- GCC_WARN_UNDECLARED_SELECTOR = YES;
- GCC_WARN_UNINITIALIZED_AUTOS = YES;
- GCC_WARN_UNUSED_FUNCTION = YES;
- GCC_WARN_UNUSED_VARIABLE = YES;
- HEADER_SEARCH_PATHS = (
- "$(inherited)",
- "$(REACT_SEARCH_PATH)/React/**",
- "$(SRCROOT)/../../react-native/React/**",
- "$(SRCROOT)/../../react-native-firebase/ios/**",
- "$(FIREBASE_SEARCH_PATH)/Firebase/**",
- "${SRCROOT}/../../../ios/Firebase/**",
- "${SRCROOT}/../../../ios/Pods/Headers/Public/**",
- "${SRCROOT}/../../../tests/ios/Pods/Headers/Public/**",
- "$(SRCROOT)/../../../node_modules/react-native/React/**",
- "$(SRCROOT)/../../../node_modules/react-native-firebase/ios/**",
- "$(SRCROOT)/../../../packages/app/ios/**",
- );
- IPHONEOS_DEPLOYMENT_TARGET = 10.0;
- LIBRARY_SEARCH_PATHS = "$(inherited)";
- MACH_O_TYPE = staticlib;
- ONLY_ACTIVE_ARCH = YES;
- OTHER_LDFLAGS = "$(inherited)";
- PRODUCT_NAME = "$(TARGET_NAME)";
- SKIP_INSTALL = YES;
- };
- name = Debug;
- };
-/* End XCBuildConfiguration section */
-
-/* Begin XCConfigurationList section */
- 2744B98821F45429004F8E3F /* Build configuration list for PBXNativeTarget "RNFBMLVision" */ = {
- isa = XCConfigurationList;
- buildConfigurations = (
- 2744B98921F45429004F8E3F /* Debug */,
- 2744B98A21F45429004F8E3F /* Release */,
- );
- defaultConfigurationIsVisible = 0;
- defaultConfigurationName = Release;
- };
- 3323F1C5716BA966BBBB95A4 /* Build configuration list for PBXProject "RNFBMLVision" */ = {
- isa = XCConfigurationList;
- buildConfigurations = (
- 3323F7E33E1559A2B9826720 /* Debug */,
- 3323F77D701E1896E6D239CF /* Release */,
- );
- defaultConfigurationIsVisible = 0;
- defaultConfigurationName = Release;
- };
-/* End XCConfigurationList section */
- };
- rootObject = 3323F95273A95DB34F55C6D7 /* Project object */;
-}
diff --git a/packages/ml-vision/ios/RNFBMLVision.xcodeproj/project.xcworkspace/contents.xcworkspacedata b/packages/ml-vision/ios/RNFBMLVision.xcodeproj/project.xcworkspace/contents.xcworkspacedata
deleted file mode 100644
index 919434a6254..00000000000
--- a/packages/ml-vision/ios/RNFBMLVision.xcodeproj/project.xcworkspace/contents.xcworkspacedata
+++ /dev/null
@@ -1,7 +0,0 @@
-
-
-
-
-
diff --git a/packages/ml-vision/ios/RNFBMLVision.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist b/packages/ml-vision/ios/RNFBMLVision.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist
deleted file mode 100644
index 18d981003d6..00000000000
--- a/packages/ml-vision/ios/RNFBMLVision.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist
+++ /dev/null
@@ -1,8 +0,0 @@
-
-
-
-
- IDEDidComputeMac32BitWarning
-
-
-
diff --git a/packages/ml-vision/ios/RNFBMLVision.xcodeproj/project.xcworkspace/xcshareddata/WorkspaceSettings.xcsettings b/packages/ml-vision/ios/RNFBMLVision.xcodeproj/project.xcworkspace/xcshareddata/WorkspaceSettings.xcsettings
deleted file mode 100644
index 0c67376ebac..00000000000
--- a/packages/ml-vision/ios/RNFBMLVision.xcodeproj/project.xcworkspace/xcshareddata/WorkspaceSettings.xcsettings
+++ /dev/null
@@ -1,5 +0,0 @@
-
-
-
-
-
diff --git a/packages/ml-vision/ios/RNFBMLVision.xcodeproj/xcshareddata/IDETemplateMacros.plist b/packages/ml-vision/ios/RNFBMLVision.xcodeproj/xcshareddata/IDETemplateMacros.plist
deleted file mode 100644
index 63f0a6e5dd8..00000000000
--- a/packages/ml-vision/ios/RNFBMLVision.xcodeproj/xcshareddata/IDETemplateMacros.plist
+++ /dev/null
@@ -1,24 +0,0 @@
-
-
-
-
- FILEHEADER
-
-/**
- * Copyright (c) 2016-present Invertase Limited & Contributors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this library except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-
diff --git a/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionBarcodeDetectorModule.h b/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionBarcodeDetectorModule.h
deleted file mode 100644
index 7e40c1b65e7..00000000000
--- a/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionBarcodeDetectorModule.h
+++ /dev/null
@@ -1,24 +0,0 @@
-/**
- * Copyright (c) 2016-present Invertase Limited & Contributors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this library except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-#import
-#import
-#import
-
-@interface RNFBMLVisionBarcodeDetectorModule : NSObject
-
-@end
\ No newline at end of file
diff --git a/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionBarcodeDetectorModule.m b/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionBarcodeDetectorModule.m
deleted file mode 100644
index 23e0a0b16ec..00000000000
--- a/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionBarcodeDetectorModule.m
+++ /dev/null
@@ -1,257 +0,0 @@
-/**
- * Copyright (c) 2016-present Invertase Limited & Contributors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this library except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-#import
-#import
-#import "RNFBMLVisionBarcodeDetectorModule.h"
-#import "RNFBMLVisionCommon.h"
-
-@implementation RNFBMLVisionBarcodeDetectorModule
-#pragma mark -
-#pragma mark Module Setup
-
-RCT_EXPORT_MODULE();
-
-#pragma mark -
-#pragma mark Firebase ML Kit Vision Methods
-
-RCT_EXPORT_METHOD(barcodeDetectorProcessImage:
- (FIRApp *) firebaseApp
- : (NSString *)filePath
- : (NSDictionary *)barcodeDetectorOptions
- : (RCTPromiseResolveBlock)resolve
- : (RCTPromiseRejectBlock)reject
-) {
- [RNFBMLVisionCommon UIImageForFilePath:filePath completion:^(NSArray *errorCodeMessageArray, UIImage *image) {
- if (errorCodeMessageArray != nil) {
- [RNFBSharedUtils rejectPromiseWithUserInfo:reject userInfo:(NSMutableDictionary *) @{
- @"code": errorCodeMessageArray[0],
- @"message": errorCodeMessageArray[1],
- }];
- return;
- }
-
- FIRVisionImage *visionImage = [[FIRVisionImage alloc] initWithImage:image];
- FIRVision *vision = [FIRVision visionForApp:firebaseApp];
-
- FIRVisionBarcodeFormat barcodeFormat = nil;
-
- if (barcodeDetectorOptions[@"barcodeFormats"]) {
- NSArray *formats = barcodeDetectorOptions[@"barcodeFormats"];
- for (id format in formats) {
- if (barcodeFormat == nil) {
- barcodeFormat = [format integerValue];
- } else {
- barcodeFormat |= [format integerValue];
- }
- }
- } else {
- barcodeFormat = FIRVisionBarcodeFormatAll;
- }
-
- FIRVisionBarcodeDetectorOptions *options = [[FIRVisionBarcodeDetectorOptions alloc] initWithFormats:barcodeFormat];
-
- FIRVisionBarcodeDetector *barcodeDetector = [vision barcodeDetectorWithOptions:options];
- [barcodeDetector detectInImage:visionImage completion:^(NSArray *barcodes, NSError *error) {
- if (error != nil) {
- [RNFBSharedUtils rejectPromiseWithUserInfo:reject userInfo:(NSMutableDictionary *) @{
- @"code": @"unknown",
- @"message": [error localizedDescription],
- }];
- return;
- }
-
- if (barcodes == nil) {
- resolve(@[]);
- return;
- }
-
- resolve([self getBarcodesList:barcodes]);
- }];
- }];
-}
-
-- (NSArray *)getBarcodesList:(NSArray *)barcodes {
- NSMutableArray *barcodeListFormatted = [[NSMutableArray alloc] init];
-
- for (FIRVisionBarcode *barcode in barcodes) {
- NSMutableDictionary *formattedBarcode = [[NSMutableDictionary alloc] init];
-
- formattedBarcode[@"boundingBox"] = [RNFBMLVisionCommon rectToIntArray:barcode.frame];
- formattedBarcode[@"cornerPoints"] = [RNFBMLVisionCommon visionPointsToArray:barcode.cornerPoints];
- formattedBarcode[@"format"] = @(barcode.format);
- formattedBarcode[@"valueType"] = @(barcode.valueType);
- formattedBarcode[@"displayValue"] = barcode.displayValue;
- formattedBarcode[@"rawValue"] = barcode.rawValue;
-
- if (barcode.email != nil) formattedBarcode[@"email"] = [self getEmailMap:barcode.email];
- if (barcode.phone != nil) formattedBarcode[@"phone"] = [self getPhoneMap:barcode.phone];
- if (barcode.sms != nil) formattedBarcode[@"sms"] = [self getSMSMap:barcode.sms];
- if (barcode.URL != nil) formattedBarcode[@"url"] = [self getURLMap:barcode.URL];
- if (barcode.wifi != nil) formattedBarcode[@"wifi"] = [self getWiFiMap:barcode.wifi];
- if (barcode.geoPoint != nil) formattedBarcode[@"geoPoint"] = [self getGeoPointList:barcode.geoPoint];
- if (barcode.contactInfo != nil) formattedBarcode[@"contactInfo"] = [self getContactInfoMap:barcode.contactInfo];
- if (barcode.calendarEvent != nil) formattedBarcode[@"calendarEvent"] = [self getCalendarEventMap:barcode.calendarEvent];
- if (barcode.driverLicense != nil) formattedBarcode[@"driverLicense"] = [self getDriverLicenseMap:barcode.driverLicense];
-
- [barcodeListFormatted addObject:formattedBarcode];
- }
-
- return barcodeListFormatted;
-}
-
-- (NSDictionary *)getEmailMap:(FIRVisionBarcodeEmail *)email {
- return @{
- @"address": email.address ?: (id) [NSNull null],
- @"body": email.body ?: (id) [NSNull null],
- @"subject": email.subject ?: (id) [NSNull null],
- };
-}
-
-- (NSDictionary *)getPhoneMap:(FIRVisionBarcodePhone *)phone {
- return @{
- @"number": phone.number ?: (id) [NSNull null],
- @"type": @(phone.type),
- };
-}
-
-- (NSDictionary *)getSMSMap:(FIRVisionBarcodeSMS *)sms {
- return @{
- @"message": sms.message ?: (id) [NSNull null],
- @"phoneNumber": sms.phoneNumber ?: (id) [NSNull null],
- };
-}
-
-- (NSDictionary *)getURLMap:(FIRVisionBarcodeURLBookmark *)url {
- return @{
- @"title": url.title ?: (id) [NSNull null],
- @"url": url.url ?: (id) [NSNull null],
- };
-}
-
-- (NSDictionary *)getWiFiMap:(FIRVisionBarcodeWiFi *)wifi {
- return @{
- @"encryptionType": @(wifi.type),
- @"password": wifi.password ?: (id) [NSNull null],
- @"ssid": wifi.ssid ?: (id) [NSNull null],
- };
-}
-
-- (NSArray *)getGeoPointList:(FIRVisionBarcodeGeoPoint *)geoPoint {
- return @[@(geoPoint.latitude), @(geoPoint.longitude)];
-}
-
-- (NSDictionary *)getPersonNameMap:(FIRVisionBarcodePersonName *)name {
- return @{
- @"first": name.first ?: (id) [NSNull null],
- @"formatted": name.formattedName ?: (id) [NSNull null],
- @"last": name.last ?: (id) [NSNull null],
- @"middle": name.middle ?: (id) [NSNull null],
- @"prefix": name.prefix ?: (id) [NSNull null],
- @"pronunciation": name.pronounciation ?: (id) [NSNull null],
- @"suffix": name.suffix ?: (id) [NSNull null],
- };
-}
-
-- (NSDictionary *)getAddressMap:(FIRVisionBarcodeAddress *)address {
- return @{
- @"lines": address.addressLines ?: @[],
- @"type": @(address.type),
- };
-}
-
-- (NSDictionary *)getContactInfoMap:(FIRVisionBarcodeContactInfo *)contactInfo {
- NSMutableDictionary *contactInfoFormatted = [@{
- @"title": contactInfo.jobTitle ?: (id) [NSNull null],
- @"organisation": contactInfo.organization ?: (id) [NSNull null],
- } mutableCopy];
-
- // Name
- if (contactInfo.name != nil) {
- contactInfoFormatted[@"name"] = [self getPersonNameMap:contactInfo.name];
- }
-
- // URLs
- NSMutableArray *urls = [@[] mutableCopy];
- if (contactInfo.urls != nil) {
- for (NSString *url in contactInfo.urls) {
- [urls addObject:url];
- }
- }
- contactInfoFormatted[@"urls"] = urls;
-
- // Phones
- NSMutableArray *phones = [@[] mutableCopy];
- if (contactInfo.phones != nil) {
- for (FIRVisionBarcodePhone *phone in contactInfo.phones) {
- [phones addObject:[self getPhoneMap:phone]];
- }
- }
- contactInfoFormatted[@"phones"] = phones;
-
- // Emails
- NSMutableArray *emails = [@[] mutableCopy];
- if (contactInfo.emails != nil) {
- for (FIRVisionBarcodeEmail *email in contactInfo.emails) {
- [emails addObject:[self getEmailMap:email]];
- }
- }
- contactInfoFormatted[@"emails"] = phones;
-
- // Addresses
- NSMutableArray *addresses = [@[] mutableCopy];
- if (contactInfo.addresses != nil) {
- for (FIRVisionBarcodeAddress *address in contactInfo.addresses) {
- [emails addObject:[self getAddressMap:address]];
- }
- }
- contactInfoFormatted[@"addresses"] = addresses;
-
- return contactInfoFormatted;
-}
-
-- (NSDictionary *)getCalendarEventMap:(FIRVisionBarcodeCalendarEvent *)event {
- return @{
- @"description": event.description ?: (id) [NSNull null],
- @"end": event.end ? [RNFBSharedUtils getISO8601String:event.end] : (id) [NSNull null],
- @"location": event.location ?: (id) [NSNull null],
- @"organizer": event.organizer ?: (id) [NSNull null],
- @"start": event.start ? [RNFBSharedUtils getISO8601String:event.start] : (id) [NSNull null],
- @"status": event.status ?: (id) [NSNull null],
- @"summary": event.summary ?: (id) [NSNull null],
- };
-}
-
-- (NSDictionary *)getDriverLicenseMap:(FIRVisionBarcodeDriverLicense *)license {
- return @{
- @"addressCity": license.addressCity ?: (id) [NSNull null],
- @"addressState": license.addressState ?: (id) [NSNull null],
- @"addressZip": license.addressZip ?: (id) [NSNull null],
- @"birthDate": license.birthDate ?: (id) [NSNull null],
- @"documentType": license.documentType ?: (id) [NSNull null],
- @"expiryDate": license.expiryDate ?: (id) [NSNull null],
- @"firstName": license.firstName ?: (id) [NSNull null],
- @"gender": license.gender ?: (id) [NSNull null],
- @"issueDate": license.issuingDate ?: (id) [NSNull null],
- @"issuingCountry": license.issuingCountry ?: (id) [NSNull null],
- @"lastName": license.lastName ?: (id) [NSNull null],
- @"licenseNumber": license.licenseNumber ?: (id) [NSNull null],
- @"middleName": license.middleName ?: (id) [NSNull null],
- };
-}
-
-@end
diff --git a/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionCommon.m b/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionCommon.m
deleted file mode 100644
index 82fe1da2124..00000000000
--- a/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionCommon.m
+++ /dev/null
@@ -1,175 +0,0 @@
-/**
- * Copyright (c) 2016-present Invertase Limited & Contributors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this library except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-
-#import
-#import
-#import "RNFBMLVisionCommon.h"
-
-@implementation RNFBMLVisionCommon
-
-+ (NSArray *)rectToIntArray:(CGRect)rect {
- CGSize size = rect.size;
- CGPoint point = rect.origin;
- return @[@(point.x), @(point.y), @(point.x + size.width), @(point.y + size.height)];
-}
-
-+ (NSDictionary *)contourToDict:(FIRVisionFaceContour *)visionFaceContour {
- NSMutableDictionary *visionFaceContourDict = [[NSMutableDictionary alloc] init];
-
- if (visionFaceContour == nil) {
- return visionFaceContourDict;
- }
-
- NSMutableArray *pointsFormatted = [[NSMutableArray alloc] init];
- for (FIRVisionPoint *point in visionFaceContour.points) {
- [pointsFormatted addObject:[self arrayForFIRVisionPoint:point]];
- }
-
- visionFaceContourDict[@"type"] = [self contourTypeToInt:visionFaceContour.type];
- visionFaceContourDict[@"points"] = pointsFormatted;
-
- return visionFaceContourDict;
-}
-
-+ (NSNumber *)contourTypeToInt:(NSString *)faceContourType {
- if ([@"All" isEqualToString:faceContourType]) {
- return @1;
- }
- if ([@"Face" isEqualToString:faceContourType]) {
- return @2;
- }
- if ([@"LeftEyebrowTop" isEqualToString:faceContourType]) {
- return @3;
- }
- if ([@"LeftEyebrowBottom" isEqualToString:faceContourType]) {
- return @4;
- }
- if ([@"RightEyebrowTop" isEqualToString:faceContourType]) {
- return @5;
- }
- if ([@"RightEyebrowBottom" isEqualToString:faceContourType]) {
- return @6;
- }
- if ([@"LeftEye" isEqualToString:faceContourType]) {
- return @7;
- }
- if ([@"RightEye" isEqualToString:faceContourType]) {
- return @8;
- }
- if ([@"UpperLipTop" isEqualToString:faceContourType]) {
- return @9;
- }
- if ([@"UpperLipBottom" isEqualToString:faceContourType]) {
- return @10;
- }
- if ([@"LowerLipTop" isEqualToString:faceContourType]) {
- return @11;
- }
- if ([@"LowerLipBottom" isEqualToString:faceContourType]) {
- return @12;
- }
- if ([@"NoseBridge" isEqualToString:faceContourType]) {
- return @13;
- }
- if ([@"NoseBottom" isEqualToString:faceContourType]) {
- return @14;
- }
- return @-1;
-}
-
-+ (NSDictionary *)landmarkToDict:(FIRVisionFaceLandmark *)visionFaceLandmark {
- NSMutableDictionary *visionFaceLandmarkDict = [[NSMutableDictionary alloc] init];
-
- if (visionFaceLandmark == nil) {
- return visionFaceLandmarkDict;
- }
-
- visionFaceLandmarkDict[@"type"] = [self landmarkTypeToInt:visionFaceLandmark.type];
- visionFaceLandmarkDict[@"position"] = [self arrayForFIRVisionPoint:visionFaceLandmark.position];
- return visionFaceLandmarkDict;
-}
-
-+ (NSNumber *)landmarkTypeToInt:(NSString *)faceLandmarkType {
- if ([@"MouthBottom" isEqualToString:faceLandmarkType]) {
- return @0;
- }
- if ([@"MouthRight" isEqualToString:faceLandmarkType]) {
- return @11;
- }
- if ([@"MouthLeft" isEqualToString:faceLandmarkType]) {
- return @5;
- }
- if ([@"LeftEar" isEqualToString:faceLandmarkType]) {
- return @3;
- }
- if ([@"RightEar" isEqualToString:faceLandmarkType]) {
- return @9;
- }
- if ([@"LeftEye" isEqualToString:faceLandmarkType]) {
- return @4;
- }
- if ([@"RightEye" isEqualToString:faceLandmarkType]) {
- return @10;
- }
- if ([@"LeftCheek" isEqualToString:faceLandmarkType]) {
- return @1;
- }
- if ([@"RightCheek" isEqualToString:faceLandmarkType]) {
- return @7;
- }
- if ([@"NoseBase" isEqualToString:faceLandmarkType]) {
- return @6;
- }
- return @-1;
-}
-
-+ (NSArray *)visionPointsToArray:(NSArray *_Nullable)points {
- if (points == nil) {
- return @[];
- }
-
- NSMutableArray *pointsArray = [[NSMutableArray alloc] init];
- for (NSValue *point in points) {
- [pointsArray addObject:[self arrayForCGPoint:point.CGPointValue]];
- }
-
- return pointsArray;
-}
-
-+ (NSArray *)arrayForCGPoint:(CGPoint)point {
- return @[@(point.x), @(point.y)];
-}
-
-+ (NSArray *)arrayForFIRVisionPoint:(FIRVisionPoint *)point {
- return @[point.x, point.y];
-}
-
-+ (void)UIImageForFilePath:(NSString *)localFilePath completion:(void (^)(
- NSArray *errorCodeMessageArray,
- UIImage *image
-))completion {
- if (![[NSFileManager defaultManager] fileExistsAtPath:localFilePath]) {
- completion(@[@"file-not-found", @"The local file specified does not exist on the device."], nil);
- } else {
- dispatch_async(dispatch_get_main_queue(), ^{
- completion(nil, [RCTConvert UIImage:localFilePath]);
- });
- }
-}
-
-@end
diff --git a/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionDocumentTextRecognizerModule.h b/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionDocumentTextRecognizerModule.h
deleted file mode 100644
index 1a4c094bfd3..00000000000
--- a/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionDocumentTextRecognizerModule.h
+++ /dev/null
@@ -1,24 +0,0 @@
-/**
- * Copyright (c) 2016-present Invertase Limited & Contributors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this library except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-#import
-#import
-#import
-
-@interface RNFBMLVisionDocumentTextRecognizerModule : NSObject
-
-@end
\ No newline at end of file
diff --git a/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionFaceDetectorModule.m b/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionFaceDetectorModule.m
deleted file mode 100644
index 891c3d334eb..00000000000
--- a/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionFaceDetectorModule.m
+++ /dev/null
@@ -1,149 +0,0 @@
-/**
- * Copyright (c) 2016-present Invertase Limited & Contributors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this library except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-#import
-#import
-#import "RNFBMLVisionFaceDetectorModule.h"
-#import "RNFBMLVisionCommon.h"
-
-@implementation RNFBMLVisionFaceDetectorModule
-#pragma mark -
-#pragma mark Module Setup
-
-RCT_EXPORT_MODULE();
-
-#pragma mark -
-#pragma mark Firebase ML Kit Vision Methods
-
-RCT_EXPORT_METHOD(faceDetectorProcessImage:
- (FIRApp *) firebaseApp
- : (NSString *)filePath
- : (NSDictionary *)faceDetectorOptions
- : (RCTPromiseResolveBlock)resolve
- : (RCTPromiseRejectBlock)reject
-) {
- [RNFBMLVisionCommon UIImageForFilePath:filePath completion:^(NSArray *errorCodeMessageArray, UIImage *image) {
- if (errorCodeMessageArray != nil) {
- [RNFBSharedUtils rejectPromiseWithUserInfo:reject userInfo:(NSMutableDictionary *) @{
- @"code": errorCodeMessageArray[0],
- @"message": errorCodeMessageArray[1],
- }];
- return;
- }
-
- FIRVisionImage *visionImage = [[FIRVisionImage alloc] initWithImage:image];
- FIRVision *vision = [FIRVision visionForApp:firebaseApp];
-
- FIRVisionFaceDetectorOptions *options = [[FIRVisionFaceDetectorOptions alloc] init];
-
- NSInteger *classificationMode = [faceDetectorOptions[@"classificationMode"] integerValue];
- if (classificationMode == 1) {
- options.classificationMode = FIRVisionFaceDetectorClassificationModeNone;
- } else if (classificationMode == 2) {
- options.classificationMode = FIRVisionFaceDetectorClassificationModeAll;
- }
-
- NSInteger *contourMode = [faceDetectorOptions[@"contourMode"] integerValue];
- if (contourMode == 1) {
- options.contourMode = FIRVisionFaceDetectorContourModeNone;
- } else if (contourMode == 2) {
- options.contourMode = FIRVisionFaceDetectorContourModeAll;
- }
-
- NSInteger *landmarkMode = [faceDetectorOptions[@"landmarkMode"] integerValue];
- if (landmarkMode == 1) {
- options.landmarkMode = FIRVisionFaceDetectorLandmarkModeNone;
- } else if (landmarkMode == 2) {
- options.landmarkMode = FIRVisionFaceDetectorLandmarkModeAll;
- }
-
- NSInteger *performanceMode = [faceDetectorOptions[@"performanceMode"] integerValue];
- if (performanceMode == 1) {
- options.performanceMode = FIRVisionFaceDetectorPerformanceModeFast;
- } else if (performanceMode == 2) {
- options.performanceMode = FIRVisionFaceDetectorPerformanceModeAccurate;
- }
-
- options.minFaceSize = [faceDetectorOptions[@"minFaceSize"] doubleValue];
-
- FIRVisionFaceDetector *faceDetector = [vision faceDetectorWithOptions:options];
- [faceDetector processImage:visionImage completion:^(NSArray *faces, NSError *error) {
- if (error != nil) {
- [RNFBSharedUtils rejectPromiseWithUserInfo:reject userInfo:(NSMutableDictionary *) @{
- @"code": @"unknown",
- @"message": [error localizedDescription],
- }];
- return;
- }
-
- NSMutableArray *facesFormatted = [[NSMutableArray alloc] init];
-
- for (FIRVisionFace *face in faces) {
- NSMutableDictionary *visionFace = [[NSMutableDictionary alloc] init];
-
- visionFace[@"boundingBox"] = [RNFBMLVisionCommon rectToIntArray:face.frame];
-
- visionFace[@"headEulerAngleY"] = face.hasHeadEulerAngleY ? @(face.headEulerAngleY) : @(-1);
- visionFace[@"headEulerAngleZ"] = face.hasHeadEulerAngleZ ? @(face.headEulerAngleZ) : @(-1);
- visionFace[@"leftEyeOpenProbability"] = face.hasLeftEyeOpenProbability ? @(face.leftEyeOpenProbability) : @(-1);
- visionFace[@"rightEyeOpenProbability"] = face.hasRightEyeOpenProbability ? @(face.rightEyeOpenProbability) : @(-1);
- visionFace[@"smilingProbability"] = face.hasSmilingProbability ? @(face.smilingProbability) : @(-1);
- visionFace[@"trackingId"] = face.hasTrackingID ? @(face.trackingID) : @(-1);
-
- // Contours
- NSMutableArray *faceContours = [[NSMutableArray alloc] init];
- if (contourMode == (NSInteger *) 2) {
- [faceContours addObject:[RNFBMLVisionCommon contourToDict:[face contourOfType:FIRFaceContourTypeAll]]];
- [faceContours addObject:[RNFBMLVisionCommon contourToDict:[face contourOfType:FIRFaceContourTypeFace]]];
- [faceContours addObject:[RNFBMLVisionCommon contourToDict:[face contourOfType:FIRFaceContourTypeLeftEyebrowTop]]];
- [faceContours addObject:[RNFBMLVisionCommon contourToDict:[face contourOfType:FIRFaceContourTypeLeftEyebrowBottom]]];
- [faceContours addObject:[RNFBMLVisionCommon contourToDict:[face contourOfType:FIRFaceContourTypeRightEyebrowTop]]];
- [faceContours addObject:[RNFBMLVisionCommon contourToDict:[face contourOfType:FIRFaceContourTypeRightEyebrowBottom]]];
- [faceContours addObject:[RNFBMLVisionCommon contourToDict:[face contourOfType:FIRFaceContourTypeLeftEye]]];
- [faceContours addObject:[RNFBMLVisionCommon contourToDict:[face contourOfType:FIRFaceContourTypeRightEye]]];
- [faceContours addObject:[RNFBMLVisionCommon contourToDict:[face contourOfType:FIRFaceContourTypeUpperLipTop]]];
- [faceContours addObject:[RNFBMLVisionCommon contourToDict:[face contourOfType:FIRFaceContourTypeUpperLipBottom]]];
- [faceContours addObject:[RNFBMLVisionCommon contourToDict:[face contourOfType:FIRFaceContourTypeLowerLipTop]]];
- [faceContours addObject:[RNFBMLVisionCommon contourToDict:[face contourOfType:FIRFaceContourTypeLowerLipBottom]]];
- [faceContours addObject:[RNFBMLVisionCommon contourToDict:[face contourOfType:FIRFaceContourTypeNoseBridge]]];
- [faceContours addObject:[RNFBMLVisionCommon contourToDict:[face contourOfType:FIRFaceContourTypeNoseBottom]]];
- }
- visionFace[@"faceContours"] = faceContours;
-
- // Face Landmarks
- NSMutableArray *faceLandmarks = [[NSMutableArray alloc] init];
- if (landmarkMode == (NSInteger *) 2) {
- [faceLandmarks addObject:[RNFBMLVisionCommon landmarkToDict:[face landmarkOfType:FIRFaceLandmarkTypeMouthBottom]]];
- [faceLandmarks addObject:[RNFBMLVisionCommon landmarkToDict:[face landmarkOfType:FIRFaceLandmarkTypeMouthRight]]];
- [faceLandmarks addObject:[RNFBMLVisionCommon landmarkToDict:[face landmarkOfType:FIRFaceLandmarkTypeMouthLeft]]];
- [faceLandmarks addObject:[RNFBMLVisionCommon landmarkToDict:[face landmarkOfType:FIRFaceLandmarkTypeRightEye]]];
- [faceLandmarks addObject:[RNFBMLVisionCommon landmarkToDict:[face landmarkOfType:FIRFaceLandmarkTypeLeftEye]]];
- [faceLandmarks addObject:[RNFBMLVisionCommon landmarkToDict:[face landmarkOfType:FIRFaceLandmarkTypeRightCheek]]];
- [faceLandmarks addObject:[RNFBMLVisionCommon landmarkToDict:[face landmarkOfType:FIRFaceLandmarkTypeLeftCheek]]];
- [faceLandmarks addObject:[RNFBMLVisionCommon landmarkToDict:[face landmarkOfType:FIRFaceLandmarkTypeNoseBase]]];
- }
- visionFace[@"landmarks"] = faceLandmarks;
-
- [facesFormatted addObject:visionFace];
- }
-
- resolve(facesFormatted);
- }];
- }];
-}
-
-@end
diff --git a/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionTextRecognizerModule.h b/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionTextRecognizerModule.h
deleted file mode 100644
index 29a772cf6fc..00000000000
--- a/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionTextRecognizerModule.h
+++ /dev/null
@@ -1,24 +0,0 @@
-/**
- * Copyright (c) 2016-present Invertase Limited & Contributors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this library except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-#import
-#import
-#import
-
-@interface RNFBMLVisionTextRecognizerModule : NSObject
-
-@end
\ No newline at end of file
diff --git a/packages/ml-vision/lib/BarcodeDetectorTypes.d.ts b/packages/ml-vision/lib/BarcodeDetectorTypes.d.ts
deleted file mode 100644
index 8e0cce321f4..00000000000
--- a/packages/ml-vision/lib/BarcodeDetectorTypes.d.ts
+++ /dev/null
@@ -1,1029 +0,0 @@
-import { FirebaseVisionTypes } from '.';
-
-/**
- * Firebase ML Kit package for React Native.
- *
- * #### Example 1
- *
- * Access the firebase export from the `ml-vision` package:
- *
- * ```js
- * import { firebase } from '@react-native-firebase/ml-vision';
- *
- * // firebase.vision().X
- * ```
- *
- * #### Example 2
- *
- * Using the default export from the `ml-vision` package:
- *
- * ```js
- * import vision from '@react-native-firebase/ml-vision';
- *
- * // vision().X
- * ```
- *
- * #### Example 3
- *
- * Using the default export from the `app` package:
- *
- * ```js
- * import firebase from '@react-native-firebase/app';
- * import '@react-native-firebase/ml-vision';
- *
- * // firebase.vision().X
- * ```
- *
- * @firebase ml-vision
- */
-export namespace MLKitVision {
- /**
- * A representation of a barcode detected in an image.
- *
- * #### Example
- *
- * ```js
- * const [barcode, ...otherBarcodes] = await firebase.vision().barcodeDetectorProcessImage(filePath);
- * console.log(barcode);
- * ```
- */
- export interface VisionBarcode {
- /**
- * Returns the bounding rectangle of the detected barcode.
- */
- boundingBox: FirebaseVisionTypes.VisionRectangle;
-
- /**
- * Gets the four corner points in clockwise direction starting with top-left. Due to the possible perspective distortions, this is not necessarily a rectangle. Parts of the region could be outside of the image.
- */
- cornerPoints: FirebaseVisionTypes.VisionPoint[];
-
- /**
- * Returns the barcode format, for example `VisionBarcodeFormat.QR_CODE`
- *
- * Use with `VisionBarcodeFormat` to switch based on format if needed.
- */
- format: number;
-
- /**
- * Returns type of the barcode value, for example `VisionBarcodeValueType.EMAIL`.
- *
- * If the value structure cannot be parsed, `VisionBarcodeValueType.TEXT` will be returned.
- * If the recognized structure type is not defined in the current version of the native Firebase SDKs, `VisionBarcodeValueType.UNKNOWN` will be returned.
- *
- * Note that the built-in parsers only recognize a few popular value structures. For your specific use case, you might want to directly consume `rawValue` and implement your own parsing logic.
- */
- valueType: number;
-
- /**
- * Returns barcode value in a user-friendly format.
- *
- * May omit some of the information encoded in the barcode. For example, if `'rawValue returns `MEBKM:TITLE:Invertase;URL://invertase.io;;'`, the display_value might be `'//invertase.io'`.
- *
- * If `valueType` === `VisionBarcodeValueType.TEXT`, this field will be identical to `rawValue`.
- *
- * This value can also be multiline, for example, when line breaks are encoded into the original `TEXT` barcode value.
- *
- * Returns `null` if nothing found.
- */
- displayValue: string | null;
-
- /**
- * Returns barcode value as it was encoded in the barcode.
- *
- * Structured values are not parsed.
- *
- * Returns `null` if nothing found.
- */
- rawValue: string | null;
-
- /**
- * Gets parsed calendar event (set if `valueType` is `VisionBarcodeValueType.CALENDAR_EVENT`).
- *
- * #### Example
- *
- * ```js
- * import vision, { VisionBarcodeValueType } from '@react-native-firebase/ml-vision';
- *
- * const [barcode, ...otherBarcodes] = await vision().barcodeDetectorProcessImage(filePath);
- *
- * if (barcode && barcode.valueType === VisionBarcodeValueType.CALENDAR_EVENT) {
- * console.log(barcode.calendarEvent);
- * }
- * ```
- */
- calendarEvent?: VisionBarcodeCalendarEvent;
-
- /**
- * Gets parsed contact details (set if `valueType` is `VisionBarcodeValueType.CONTACT_INFO`).
- *
- * #### Example
- *
- * ```js
- * import vision, { VisionBarcodeValueType } from '@react-native-firebase/ml-vision';
- *
- * const [barcode, ...otherBarcodes] = await vision().barcodeDetectorProcessImage(filePath);
- *
- * if (barcode && barcode.valueType === VisionBarcodeValueType.CONTACT_INFO) {
- * console.log(barcode.contactInfo);
- * }
- * ```
- */
- contactInfo?: VisionBarcodeContactInfo;
-
- /**
- * Gets parsed drivers license details (set if `valueType` is `VisionBarcodeValueType.DRIVER_LICENSE`).
- *
- * #### Example
- *
- * ```js
- * import vision, { VisionBarcodeValueType } from '@react-native-firebase/ml-vision';
- *
- * const [barcode, ...otherBarcodes] = await vision().barcodeDetectorProcessImage(filePath);
- *
- * if (barcode && barcode.valueType === VisionBarcodeValueType.DRIVER_LICENSE) {
- * console.log(barcode.driverLicense);
- * }
- * ```
- */
- driverLicense?: VisionBarcodeDriverLicense;
-
- /**
- * Gets parsed email details (set if `valueType` is `VisionBarcodeValueType.EMAIL`).
- *
- * #### Example
- *
- * ```js
- * import vision, { VisionBarcodeValueType } from '@react-native-firebase/ml-vision';
- *
- * const [barcode, ...otherBarcodes] = await vision().barcodeDetectorProcessImage(filePath);
- *
- * if (barcode && barcode.valueType === VisionBarcodeValueType.EMAIL) {
- * console.log(barcode.email);
- * }
- * ```
- */
- email?: VisionBarcodeEmail;
-
- /**
- * Gets parsed Geo Point details (set if `valueType` is `VisionBarcodeValueType.GEO`).
- *
- * #### Example
- *
- * ```js
- * import vision, { VisionBarcodeValueType } from '@react-native-firebase/ml-vision';
- *
- * const [barcode, ...otherBarcodes] = await vision().barcodeDetectorProcessImage(filePath);
- *
- * if (barcode && barcode.valueType === VisionBarcodeValueType.GEO) {
- * console.log(barcode.geoPoint);
- * }
- * ```
- */
- geoPoint?: FirebaseVisionTypes.VisionGeoPoint;
-
- /**
- * Gets parsed phone details (set if `valueType` is `VisionBarcodeValueType.PHONE`).
- *
- * #### Example
- *
- * ```js
- * import vision, { VisionBarcodeValueType } from '@react-native-firebase/ml-vision';
- *
- * const [barcode, ...otherBarcodes] = await vision().barcodeDetectorProcessImage(filePath);
- *
- * if (barcode && barcode.valueType === VisionBarcodeValueType.PHONE) {
- * console.log(barcode.phone);
- * }
- * ```
- */
- phone?: VisionBarcodePhone;
-
- /**
- * Gets parsed sms details (set if `valueType` is `VisionBarcodeValueType.SMS`).
- *
- * #### Example
- *
- * ```js
- * import vision, { VisionBarcodeValueType } from '@react-native-firebase/ml-vision';
- *
- * const [barcode, ...otherBarcodes] = await vision().barcodeDetectorProcessImage(filePath);
- *
- * if (barcode && barcode.valueType === VisionBarcodeValueType.SMS) {
- * console.log(barcode.sms);
- * }
- * ```
- */
- sms?: VisionBarcodeSms;
-
- /**
- * Gets parsed url details (set if `valueType` is `VisionBarcodeValueType.URL`).
- *
- * #### Example
- *
- * ```js
- * import vision, { VisionBarcodeValueType } from '@react-native-firebase/ml-vision';
- *
- * const [barcode, ...otherBarcodes] = await vision().barcodeDetectorProcessImage(filePath);
- *
- * if (barcode && barcode.valueType === VisionBarcodeValueType.URL) {
- * console.log(barcode.url);
- * }
- * ```
- */
- url?: VisionBarcodeUrl;
-
- /**
- * Gets parsed wifi details (set if `valueType` is `VisionBarcodeValueType.WIFI`).
- *
- * #### Example
- *
- * ```js
- * import vision, { VisionBarcodeValueType } from '@react-native-firebase/ml-vision';
- *
- * const [barcode, ...otherBarcodes] = await vision().barcodeDetectorProcessImage(filePath);
- *
- * if (barcode && barcode.valueType === VisionBarcodeValueType.WIFI) {
- * console.log(barcode.wifi);
- * }
- * ```
- */
- wifi?: VisionBarcodeWifi;
- }
-
- /**
- * Wifi network parameters from a 'WIFI:' or similar QRCode type.
- *
- * #### Example
- *
- * ```js
- * import vision, { VisionBarcodeValueType } from '@react-native-firebase/ml-vision';
- *
- * const [barcode, ...otherBarcodes] = await vision().barcodeDetectorProcessImage(filePath);
- *
- * if (barcode && barcode.valueType === VisionBarcodeValueType.WIFI) {
- * console.log(barcode.wifi);
- * }
- * ```
- */
- export interface VisionBarcodeWifi {
- /**
- * The encryption type of the WIFI. e.g. `VisionBarcodeWifiEncryptionType.WPA`
- *
- * See all types at `VisionBarcodeWifiEncryptionType`.
- */
- encryptionType: number;
-
- /**
- * The password for this WIFI.
- *
- * Returns `null` if nothing found.
- */
- password: string | null;
-
- /**
- * The SSID for this WIFI.
- *
- * Returns `null` if nothing found.
- */
- ssid: string | null;
- }
-
- /**
- * A URL and title from a 'MEBKM:' or similar QRCode type.
- *
- * #### Example
- *
- * ```js
- * import vision, { VisionBarcodeValueType } from '@react-native-firebase/ml-vision';
- *
- * const [barcode, ...otherBarcodes] = await vision().barcodeDetectorProcessImage(filePath);
- *
- * if (barcode && barcode.valueType === VisionBarcodeValueType.URL) {
- * console.log(barcode.url);
- * }
- * ```
- */
- export interface VisionBarcodeUrl {
- /**
- * The title for this url.
- *
- * Returns `null` if nothing found.
- */
- title: string | null;
-
- /**
- * The URL.
- *
- * Returns `null` if nothing found.
- */
- url: string | null;
- }
-
- /**
- * An sms message from an 'SMS:' or similar QRCode type.
- *
- * #### Example
- *
- * ```js
- * import vision, { VisionBarcodeValueType } from '@react-native-firebase/ml-vision';
- *
- * const [barcode, ...otherBarcodes] = await vision().barcodeDetectorProcessImage(filePath);
- *
- * if (barcode && barcode.valueType === VisionBarcodeValueType.SMS) {
- * console.log(barcode.sms);
- * }
- * ```
- */
- export interface VisionBarcodeSms {
- /**
- * The message text for this SMS.
- *
- * Returns `null` if nothing found.
- */
- message: string | null;
-
- /**
- * The phone number for this SMS.
- *
- * Returns `null` if nothing found.
- */
- phoneNumber: string | number;
- }
-
- /**
- * A driver license or ID card.
- *
- * #### Example
- *
- * ```js
- * import vision, { VisionBarcodeValueType } from '@react-native-firebase/ml-vision';
- *
- * const [barcode, ...otherBarcodes] = await vision().barcodeDetectorProcessImage(filePath);
- *
- * if (barcode && barcode.valueType === VisionBarcodeValueType.DRIVER_LICENSE) {
- * console.log(barcode.driverLicense);
- * }
- * ```
- */
- export interface VisionBarcodeDriverLicense {
- /**
- * Gets city of holder's address.
- *
- * Returns `null` if nothing found.
- */
- addressCity: string | null;
-
- /**
- * Gets state of holder's address.
- *
- * Returns `null` if nothing found.
- */
- addressState: string | null;
-
- /**
- * The holder's street address.
- *
- * Returns `null` if nothing found.
- */
- addressStreet: string | null;
-
- /**
- * The zip code of holder's address.
- *
- * Returns `null` if nothing found.
- */
- addressZip: string | null;
-
- /**
- * The birth date of the holder.
- *
- * Returns `null` if nothing found.
- */
- birthDate: string | null;
-
- /**
- * The "DL" for driver licenses, "ID" for ID cards.
- *
- * Returns `null` if nothing found.
- */
- documentType: string | null;
-
- /**
- * The expiry date of the license.
- *
- * Returns `null` if nothing found.
- */
- expiryDate: string | null;
-
- /**
- * The holder's first name.
- *
- * Returns `null` if nothing found.
- */
- firstName: string | null;
-
- /**
- * The holder's gender.
- *
- * Returns `null` if nothing found.
- */
- gender: string | null;
-
- /**
- * The issue date of the license.
- *
- * Returns `null` if nothing found.
- */
- issueDate: string | null;
-
- /**
- * The country in which DL/ID was issued.
- *
- * Returns `null` if nothing found.
- */
- issuingCountry: string | null;
-
- /**
- * The holder's last name.
- *
- * Returns `null` if nothing found.
- */
- lastName: string | null;
-
- /**
- * The driver license ID number.
- *
- * Returns `null` if nothing found.
- */
- licenseNumber: string | null;
-
- /**
- * The holder's middle name.
- *
- * Returns `null` if nothing found.
- */
- middleName: string | null;
- }
-
- /**
- * A calendar event extracted from QRCode.
- *
- * ```js
- * import vision, { VisionBarcodeValueType } from '@react-native-firebase/ml-vision';
- *
- * const [barcode, ...otherBarcodes] = await vision().barcodeDetectorProcessImage(filePath);
- *
- * if (barcode && barcode.valueType === VisionBarcodeValueType.CALENDAR_EVENT) {
- * console.log(barcode.calendarEvent);
- * }
- * ```
- */
- export interface VisionBarcodeCalendarEvent {
- /**
- * The description of the calendar event.
- *
- * Returns `null` if nothing found.
- */
- description: string | null;
-
- /**
- * The end date time of the calendar event.
- *
- * Returns `null` if nothing found.
- */
- end: string | null;
-
- /**
- * The location of the calendar event.
- *
- * Returns `null` if nothing found.
- */
- location: string | null;
-
- /**
- * The organizer of the calendar event.
- *
- * Returns `null` if nothing found.
- */
- organizer: string | null;
-
- /**
- * The start date time of the calendar event.
- *
- * Returns `null` if nothing found.
- */
- start: string | null;
-
- /**
- * The status of the calendar event.
- *
- * Returns `null` if nothing found.
- */
- status: string | null;
-
- /**
- * The summary of the calendar event.
- *
- * Returns `null` if nothing found.
- */
- summary: string | null;
- }
-
- /**
- * A persons or organization's business card. For example a VCARD.
- *
- * #### Example
- *
- * ```js
- * import vision, { VisionBarcodeValueType } from '@react-native-firebase/ml-vision';
- *
- * const [barcode, ...otherBarcodes] = await vision().barcodeDetectorProcessImage(filePath);
- *
- * if (barcode && barcode.valueType === VisionBarcodeValueType.CONTACT_INFO) {
- * console.log(barcode.contactInfo);
- * }
- * ```
- */
- export interface VisionBarcodeContactInfo {
- /**
- * Get an array of detected urls for the contact.
- *
- * Returns an empty array if nothing found;
- */
- urls: string[];
-
- /**
- * Gets the contact persons title. E.g. `Dr`
- *
- * Returns `null` if no title detected.
- */
- title: string | null;
-
- /**
- * Gets the contact persons organization.
- *
- * Returns `null` if no organization detected.
- */
- organization: string | null;
-
- /**
- * Gets the contact persons phones.
- *
- * Returns an empty array if nothing found.
- */
- phones: VisionBarcodePhone[];
-
- /**
- * Gets the contact persons emails.
- *
- * Returns an empty array if nothing found.
- */
- emails: VisionBarcodeEmail[];
-
- /**
- * Gets the contact persons name.
- */
- name: VisionBarcodePersonName;
-
- /**
- * Gets an array of the contact persons addresses.
- *
- * Returns an empty array if nothing found.
- */
- addresses: VisionBarcodeAddress[];
- }
-
- /**
- * A contacts address.
- */
- export interface VisionBarcodeAddress {
- /**
- * An array of address line strings of the formatted address.
- */
- lines: string[];
-
- /**
- * The address type, e.g. `VisionBarcodeAddressType.WORK`.
- */
- type: number;
- }
-
- /**
- * A persons name, both formatted version and their individual name components.
- */
- export interface VisionBarcodePersonName {
- /**
- * The persons first name.
- *
- * Returns `null` if not found.
- */
- first: string | null;
-
- /**
- * A properly formatted name.
- *
- * Returns `null` if no name components found.
- */
- formatted: string | null;
-
- /**
- * The persons last name.
- *
- * Returns `null` if not found.
- */
- last: string | null;
-
- /**
- * The persons middle name.
- *
- * Returns `null` if not found.
- */
- middle: string | null;
-
- /**
- * The prefix of the name.
- *
- * Returns `null` if not found.
- */
- prefix: string | null;
-
- /**
- * Designates a text string to be set as the kana name in the phonebook.
- */
- pronunciation: string | null;
-
- /**
- * The suffix of the persons name.
- *
- * Returns `null` if not found.
- */
- suffix: string | null;
- }
-
- /**
- * An email message from a 'MAILTO:' or similar QRCode type, or from a ContactInfo/VCARD.
- *
- * #### Example
- *
- * ```js
- * import vision, { VisionBarcodeValueType } from '@react-native-firebase/ml-vision';
- *
- * const [barcode, ...otherBarcodes] = await vision().barcodeDetectorProcessImage(filePath);
- *
- * if (barcode && barcode.valueType === VisionBarcodeValueType.EMAIL) {
- * console.log(barcode.email);
- * } else if (barcode && barcode.valueType === VisionBarcodeValueType.CONTACT_INFO) {
- * console.log(barcode.contactInfo.emails[0]);
- * }
- * ```
- */
- export interface VisionBarcodeEmail {
- /**
- * The email address.
- *
- * Returns `null` if non detected for this `type`.
- */
- address: string | null;
-
- /**
- * The email body content.
- *
- * Returns `null` if no body detected.
- */
- body: string | null;
-
- /**
- * The email subject.
- *
- * Returns `null` if no subject was detected.
- */
- subject: string | null;
- }
-
- /**
- * A phone number and it's detected type, e.g. `VisionBarcodePhoneType.MOBILE`
- *
- * #### Example
- *
- * ```js
- * import vision, { VisionBarcodeValueType } from '@react-native-firebase/ml-vision';
- *
- * const [barcode, ...otherBarcodes] = await vision().barcodeDetectorProcessImage(filePath);
- *
- * if (barcode && barcode.valueType === VisionBarcodeValueType.PHONE) {
- * console.log(barcode.phone);
- * } else if (barcode && barcode.valueType === VisionBarcodeValueType.CONTACT_INFO) {
- * console.log(barcode.contactInfo.phones[0]);
- * }
- * ```
- */
- export interface VisionBarcodePhone {
- /**
- * The detected phone number.
- *
- * Returns `null` if no number detected for this type.
- */
- number: string | null;
-
- /**
- * Gets type of the phone number, e.g. `VisionBarcodePhoneType.MOBILE`.
- *
- * See also `VisionBarcodePhoneType`.
- */
- type: number;
- }
-
- /**
- * Custom options for barcode detection.
- *
- * #### Example
- *
- * ```js
- * import vision, { VisionBarcodeFormat, VisionBarcodeValueType } from '@react-native-firebase/ml-vision';
- *
- * const [barcode, ...otherBarcodes] = await vision().barcodeDetectorProcessImage(filePath, {
- * barcodeFormats: [VisionBarcodeFormat.QR_CODE]
- * });
- *
- * if (barcode && barcode.valueType === VisionBarcodeValueType.CONTACT_INFO) {
- * console.log(barcode.contactInfo);
- * }
- * ```
- */
- export interface VisionBarcodeDetectorOptions {
- /**
- * Set the barcode formats to detect.
- *
- * Defaults to `VisionBarcodeFormat.ALL_FORMATS`;
- *
- * @param formats Array of `VisionBarcodeFormat` types.
- */
- barcodeFormats?: VisionBarcodeFormat[];
- }
-
- /**
- * Barcode format constants - enumeration of supported barcode formats.
- *
- * Can be used to specify the known type of a barcode before processing; via `VisionBarcodeDetectorOptions.setBarcodeFormats()`
- */
- export enum VisionBarcodeFormat {
- /**
- * Barcode format constant representing the union of all supported formats.
- */
- ALL_FORMATS = 0,
-
- /**
- * Barcode format constant for AZTEC.
- */
- AZTEC = 4096,
-
- /**
- * Barcode format constant for Codabar.
- */
- CODABAR = 8,
-
- /**
- * Barcode format constant for Code 128.
- */
- CODE_128 = 1,
-
- /**
- * Barcode format constant for Code 39.
- */
- CODE_39 = 2,
-
- /**
- * Barcode format constant for Code 93.
- */
- CODE_93 = 4,
-
- /**
- * Barcode format constant for Data Matrix.
- */
- DATA_MATRIX = 16,
-
- /**
- * Barcode format constant for EAN-13.
- */
- EAN_13 = 32,
-
- /**
- * Barcode format constant for EAN-8.
- */
- EAN_8 = 64,
-
- /**
- * Barcode format constant for ITF (Interleaved Two-of-Five).
- */
- ITF = 128,
-
- /**
- * Barcode format constant for PDF-417.
- */
- PDF417 = 2048,
-
- /**
- * Barcode format constant for QR Code.
- */
- QR_CODE = 256,
-
- /**
- * Barcode format unknown to the current SDK, but understood by Google Play services.
- */
- UNKNOWN = -1,
-
- /**
- * Barcode format constant for UPC-A.
- */
- UPC_A = 512,
-
- /**
- * Barcode format constant for UPC-E.
- */
- UPC_E = 1024,
- }
-
- /**
- * Barcode value type constants - enumeration of supported barcode content value types.
- *
- * Can be used with `VisionBarcode.valueType` to determine the barcode content type of a detected barcode.
- *
- * #### Example
- *
- * ```js
- * import vision, { VisionBarcodeValueType } from '@react-native-firebase/ml-vision';
- *
- * const [barcode, ...otherBarcodes] = await vision().barcodeDetectorProcessImage(filePath);
- *
- * // check for a calendar event barcode value type
- * if (barcode && barcode.valueType === VisionBarcodeValueType.CALENDAR_EVENT) {
- * console.log(barcode.calendarEvent);
- * }
- * ```
- */
- export enum VisionBarcodeValueType {
- /**
- * Barcode value type constant for calendar events.
- */
- CALENDAR_EVENT = 11,
-
- /**
- * Barcode value type constant for contact information.
- */
- CONTACT_INFO = 1,
-
- /**
- * Barcode value type constant for driver's license data.
- */
- DRIVER_LICENSE = 12,
-
- /**
- * Barcode value type constant for email message details.
- */
- EMAIL = 2,
-
- /**
- * Barcode value type constant for geographic coordinates.
- */
- GEO = 10,
-
- /**
- * Barcode value type constant for ISBNs.
- */
- ISBN = 3,
-
- /**
- * Barcode value type constant for phone numbers.
- */
- PHONE = 4,
-
- /**
- * Barcode value type constant for product codes.
- */
- PRODUCT = 5,
-
- /**
- * Barcode value type constant for SMS details.
- */
- SMS = 6,
-
- /**
- * Barcode value type constant for plain text.
- */
- TEXT = 7,
-
- /**
- * Barcode value type unknown, which indicates the current version of SDK cannot recognize the structure of the barcode.
- */
- UNKNOWN = 0,
-
- /**
- * Barcode value type constant for URLs/bookmarks.
- */
- URL = 8,
-
- /**
- * Barcode value type constant for WiFi access point details.
- */
- WIFI = 9,
- }
-
- /**
- * The type of a address detected in a barcode.
- *
- * Use with `VisionBarcodeAddress.type`.
- */
- export enum VisionBarcodeAddressType {
- /**
- * Unknown type
- */
- UNKNOWN = 0,
-
- /**
- * Address is specified as a WORK address.
- */
- WORK = 1,
-
- /**
- * Address is specified as a HOME address.
- */
- HOME = 2,
- }
-
- /**
- * The type of an email detected in a barcode.
- *
- * Use with `VisionBarcodeEmail.type`.
- */
- export enum VisionBarcodeEmailType {
- /**
- * Unknown type
- */
- UNKNOWN = 0,
-
- /**
- * Email address is specified as a WORK email.
- */
- WORK = 1,
-
- /**
- * Email address is specified as a HOME / personal email.
- */
- HOME = 2,
- }
-
- /**
- * The type of a phone number detected in a barcode.
- *
- * Use with `VisionBarcodePhone.type`.
- */
- export enum VisionBarcodePhoneType {
- /**
- * Fax machine.
- */
- FAX = 3,
-
- /**
- * Home phone.
- */
- HOME = 2,
-
- /**
- * Mobile Phone.
- */
- MOBILE = 4,
-
- /**
- * Unknown type.
- */
- UNKNOWN = 0,
-
- /**
- * Work phone.
- */
- WORK = 1,
- }
-
- /**
- * The type of wifi encryption used for a `VisionBarcodeWifi` instance.
- *
- * Use with `VisionBarcodeWifi.encryptionType`.
- */
- export enum VisionBarcodeWifiEncryptionType {
- /**
- * Wifi has no encryption and is open.
- */
- OPEN = 1,
-
- /**
- * Wifi uses WPA encryption. This includes WPA2.
- */
- WPA = 2,
-
- /**
- * Wifi uses WEP encryption.
- */
- WEP = 3,
- }
-}
diff --git a/packages/ml-vision/lib/VisionBarcodeAddressType.js b/packages/ml-vision/lib/VisionBarcodeAddressType.js
deleted file mode 100644
index 5d4971f5aaa..00000000000
--- a/packages/ml-vision/lib/VisionBarcodeAddressType.js
+++ /dev/null
@@ -1,22 +0,0 @@
-/*
- * Copyright (c) 2016-present Invertase Limited & Contributors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this library except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-export default {
- UNKNOWN: 0,
- WORK: 1,
- HOME: 2,
-};
diff --git a/packages/ml-vision/lib/VisionBarcodeEmailType.js b/packages/ml-vision/lib/VisionBarcodeEmailType.js
deleted file mode 100644
index 5d4971f5aaa..00000000000
--- a/packages/ml-vision/lib/VisionBarcodeEmailType.js
+++ /dev/null
@@ -1,22 +0,0 @@
-/*
- * Copyright (c) 2016-present Invertase Limited & Contributors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this library except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-export default {
- UNKNOWN: 0,
- WORK: 1,
- HOME: 2,
-};
diff --git a/packages/ml-vision/lib/VisionBarcodeFormat.js b/packages/ml-vision/lib/VisionBarcodeFormat.js
deleted file mode 100644
index 70a08872520..00000000000
--- a/packages/ml-vision/lib/VisionBarcodeFormat.js
+++ /dev/null
@@ -1,79 +0,0 @@
-/*
- * Copyright (c) 2016-present Invertase Limited & Contributors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this library except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-export default {
- /**
- * Barcode format constant representing the union of all supported formats.
- */
- ALL_FORMATS: 0,
- /**
- * Barcode format constant for AZTEC.
- */
- AZTEC: 4096,
- /**
- * Barcode format constant for Codabar.
- */
- CODABAR: 8,
- /**
- * Barcode format constant for Code 128.
- */
- CODE_128: 1,
- /**
- * Barcode format constant for Code 39.
- */
- CODE_39: 2,
- /**
- * Barcode format constant for Code 93.
- */
- CODE_93: 4,
- /**
- * Barcode format constant for Data Matrix.
- */
- DATA_MATRIX: 16,
- /**
- * Barcode format constant for EAN-13.
- */
- EAN_13: 32,
- /**
- * Barcode format constant for EAN-8.
- */
- EAN_8: 64,
- /**
- * Barcode format constant for ITF (Interleaved Two-of-Five).
- */
- ITF: 128,
- /**
- * Barcode format constant for PDF-417.
- */
- PDF417: 2048,
- /**
- * Barcode format constant for QR Code.
- */
- QR_CODE: 256,
- /**
- * Barcode format unknown to the current SDK, but understood by Google Play services.
- */
- UNKNOWN: -1,
- /**
- * Barcode format constant for UPC-A.
- */
- UPC_A: 512,
- /**
- * Barcode format constant for UPC-E.
- */
- UPC_E: 1024,
-};
diff --git a/packages/ml-vision/lib/VisionBarcodePhoneType.js b/packages/ml-vision/lib/VisionBarcodePhoneType.js
deleted file mode 100644
index d63a55afc8b..00000000000
--- a/packages/ml-vision/lib/VisionBarcodePhoneType.js
+++ /dev/null
@@ -1,24 +0,0 @@
-/*
- * Copyright (c) 2016-present Invertase Limited & Contributors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this library except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-export default {
- FAX: 3,
- HOME: 2,
- MOBILE: 4,
- UNKNOWN: 0,
- WORK: 1,
-};
diff --git a/packages/ml-vision/lib/VisionBarcodeValueType.js b/packages/ml-vision/lib/VisionBarcodeValueType.js
deleted file mode 100644
index 4b81f202f80..00000000000
--- a/packages/ml-vision/lib/VisionBarcodeValueType.js
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
- * Copyright (c) 2016-present Invertase Limited & Contributors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this library except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-export default {
- /**
- * Barcode value type constant for calendar events.
- */
- CALENDAR_EVENT: 11,
-
- /**
- * Barcode value type constant for contact information.
- */
- CONTACT_INFO: 1,
-
- /**
- * Barcode value type constant for driver's license data.
- */
- DRIVER_LICENSE: 12,
-
- /**
- * Barcode value type constant for email message details.
- */
- EMAIL: 2,
-
- /**
- * Barcode value type constant for geographic coordinates.
- */
- GEO: 10,
-
- /**
- * Barcode value type constant for ISBNs.
- */
- ISBN: 3,
-
- /**
- * Barcode value type constant for phone numbers.
- */
- PHONE: 4,
-
- /**
- * Barcode value type constant for product codes.
- */
- PRODUCT: 5,
-
- /**
- * Barcode value type constant for SMS details.
- */
- SMS: 6,
-
- /**
- * Barcode value type constant for plain text.
- */
- TEXT: 7,
-
- /**
- * Barcode value type unknown, which indicates the current version of SDK cannot recognize the structure of the barcode.
- */
- UNKNOWN: 0,
-
- /**
- * Barcode value type constant for URLs/bookmarks.
- */
- URL: 8,
-
- /**
- * Barcode value type constant for WiFi access point details.
- */
- WIFI: 9,
-};
diff --git a/packages/ml-vision/lib/VisionBarcodeWifiEncryptionType.js b/packages/ml-vision/lib/VisionBarcodeWifiEncryptionType.js
deleted file mode 100644
index a2312fa078f..00000000000
--- a/packages/ml-vision/lib/VisionBarcodeWifiEncryptionType.js
+++ /dev/null
@@ -1,22 +0,0 @@
-/*
- * Copyright (c) 2016-present Invertase Limited & Contributors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this library except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-export default {
- OPEN: 1,
- WPA: 2,
- WEP: 3,
-};
diff --git a/packages/ml-vision/lib/VisionFaceContourType.js b/packages/ml-vision/lib/VisionFaceContourType.js
deleted file mode 100644
index cdab469370d..00000000000
--- a/packages/ml-vision/lib/VisionFaceContourType.js
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Copyright (c) 2016-present Invertase Limited & Contributors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this library except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-export default {
- ALL_POINTS: 1,
- FACE: 2,
- LEFT_EYE: 7,
- LEFT_EYEBROW_BOTTOM: 4,
- LEFT_EYEBROW_TOP: 3,
- LOWER_LIP_BOTTOM: 12,
- LOWER_LIP_TOP: 11,
- NOSE_BOTTOM: 14,
- NOSE_BRIDGE: 13,
- RIGHT_EYE: 8,
- RIGHT_EYEBROW_BOTTOM: 6,
- RIGHT_EYEBROW_TOP: 5,
- UPPER_LIP_BOTTOM: 10,
- UPPER_LIP_TOP: 9,
-};
diff --git a/packages/ml-vision/lib/VisionFaceDetectorClassificationMode.js b/packages/ml-vision/lib/VisionFaceDetectorClassificationMode.js
deleted file mode 100644
index c4770ed9528..00000000000
--- a/packages/ml-vision/lib/VisionFaceDetectorClassificationMode.js
+++ /dev/null
@@ -1,21 +0,0 @@
-/*
- * Copyright (c) 2016-present Invertase Limited & Contributors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this library except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-export default {
- NO_CLASSIFICATIONS: 1,
- ALL_CLASSIFICATIONS: 2,
-};
diff --git a/packages/ml-vision/lib/VisionFaceDetectorContourMode.js b/packages/ml-vision/lib/VisionFaceDetectorContourMode.js
deleted file mode 100644
index 6f2ac438bfe..00000000000
--- a/packages/ml-vision/lib/VisionFaceDetectorContourMode.js
+++ /dev/null
@@ -1,21 +0,0 @@
-/*
- * Copyright (c) 2016-present Invertase Limited & Contributors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this library except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-export default {
- NO_CONTOURS: 1,
- ALL_CONTOURS: 2,
-};
diff --git a/packages/ml-vision/lib/VisionFaceDetectorLandmarkMode.js b/packages/ml-vision/lib/VisionFaceDetectorLandmarkMode.js
deleted file mode 100644
index 0bdc0bf212c..00000000000
--- a/packages/ml-vision/lib/VisionFaceDetectorLandmarkMode.js
+++ /dev/null
@@ -1,21 +0,0 @@
-/*
- * Copyright (c) 2016-present Invertase Limited & Contributors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this library except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-export default {
- NO_LANDMARKS: 1,
- ALL_LANDMARKS: 2,
-};
diff --git a/packages/ml-vision/lib/VisionFaceDetectorPerformanceMode.js b/packages/ml-vision/lib/VisionFaceDetectorPerformanceMode.js
deleted file mode 100644
index 0d2a1aa6ac0..00000000000
--- a/packages/ml-vision/lib/VisionFaceDetectorPerformanceMode.js
+++ /dev/null
@@ -1,21 +0,0 @@
-/*
- * Copyright (c) 2016-present Invertase Limited & Contributors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this library except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-export default {
- FAST: 1,
- ACCURATE: 2,
-};
diff --git a/packages/ml-vision/lib/VisionFaceLandmarkType.js b/packages/ml-vision/lib/VisionFaceLandmarkType.js
deleted file mode 100644
index b4b13dbc81f..00000000000
--- a/packages/ml-vision/lib/VisionFaceLandmarkType.js
+++ /dev/null
@@ -1,29 +0,0 @@
-/*
- * Copyright (c) 2016-present Invertase Limited & Contributors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this library except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-export default {
- LEFT_CHEEK: 1,
- LEFT_EAR: 3,
- LEFT_EYE: 4,
- MOUTH_BOTTOM: 0,
- MOUTH_LEFT: 5,
- MOUTH_RIGHT: 11,
- NOSE_BASE: 6,
- RIGHT_CHEEK: 7,
- RIGHT_EAR: 9,
- RIGHT_EYE: 10,
-};
diff --git a/packages/ml-vision/lib/VisionPoint.js b/packages/ml-vision/lib/VisionPoint.js
deleted file mode 100644
index 1e55ef745e0..00000000000
--- a/packages/ml-vision/lib/VisionPoint.js
+++ /dev/null
@@ -1,83 +0,0 @@
-// TODO introduce in a later release if required
-// /* eslint-disable no-bitwise */
-//
-// /*
-// * Copyright (c) 2016-present Invertase Limited & Contributors
-// *
-// * Licensed under the Apache License, Version 2.0 (the "License");
-// * you may not use this library except in compliance with the License.
-// * You may obtain a copy of the License at
-// *
-// * http://www.apache.org/licenses/LICENSE-2.0
-// *
-// * Unless required by applicable law or agreed to in writing, software
-// * distributed under the License is distributed on an "AS IS" BASIS,
-// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// * See the License for the specific language governing permissions and
-// * limitations under the License.
-// *
-// */
-//
-// export default class VisionPoint {
-// constructor(x, y) {
-// this._x = x || 0;
-// this._y = y || 0;
-// }
-//
-// /**
-// * Set the point's x and y coordinates
-// *
-// * @param x
-// * @param y
-// */
-// set(x, y) {
-// // todo arg validate number for all args
-// this._x = x;
-// this._y = y;
-// }
-//
-// /**
-// * Copy the coordinates from the source point into this point.
-// *
-// * @param otherPoint VisionPoint
-// */
-// setFromPoint(otherPoint) {
-// // todo arg instance of VisionPoint check
-// this.set(otherPoint.x, otherPoint.y);
-// }
-//
-// get x() {
-// return this._x;
-// }
-//
-// get y() {
-// return this._y;
-// }
-//
-// /**
-// * Returns true if this VisionPoint has the same coordinates as the specified VisionPoint.
-// *
-// * @param otherPoint
-// * @returns {boolean}
-// */
-// isEqual(otherPoint) {
-// // todo arg instance of VisionPoint check
-// return this.toString() === otherPoint.toString();
-// }
-//
-// /**
-// * Returns this point as an array of [x, y]
-// * @returns {*[]}
-// */
-// toArray() {
-// return [this.x, this.y];
-// }
-//
-// /**
-// * Returns this point as an string, e.g VisionPoint[x, y]
-// * @returns {string}
-// */
-// toString() {
-// return `Point[${this.x}, ${this.y}]`;
-// }
-// }
diff --git a/packages/ml-vision/lib/VisionRectangle.js b/packages/ml-vision/lib/VisionRectangle.js
deleted file mode 100644
index 8e781805bbb..00000000000
--- a/packages/ml-vision/lib/VisionRectangle.js
+++ /dev/null
@@ -1,206 +0,0 @@
-// TODO introduce in a later release if required
-// /* eslint-disable no-bitwise */
-//
-// /*
-// * Copyright (c) 2016-present Invertase Limited & Contributors
-// *
-// * Licensed under the Apache License, Version 2.0 (the "License");
-// * you may not use this library except in compliance with the License.
-// * You may obtain a copy of the License at
-// *
-// * http://www.apache.org/licenses/LICENSE-2.0
-// *
-// * Unless required by applicable law or agreed to in writing, software
-// * distributed under the License is distributed on an "AS IS" BASIS,
-// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// * See the License for the specific language governing permissions and
-// * limitations under the License.
-// *
-// */
-//
-// export default class VisionRectangle {
-// /**
-// *
-// * @param left
-// * @param top
-// * @param right
-// * @param bottom
-// */
-// constructor(left, top, right, bottom) {
-// this._left = left || 0;
-// this._top = top || 0;
-// this._right = right || 0;
-// this._bottom = bottom || 0;
-// }
-//
-// /**
-// * Set the rectangle's coordinates to the specified values.
-// *
-// * @param left
-// * @param top
-// * @param right
-// * @param bottom
-// */
-// set(left, top, right, bottom) {
-// // todo arg validate number for all args
-// // todo arg validate left <= right
-// // todo arg validate top <= bottom
-// this._left = left;
-// this._top = top;
-// this._right = right;
-// this._bottom = bottom;
-// }
-//
-// /**
-// * Copy the coordinates from the source rectangle into this rectangle.
-// *
-// * @param otherRect VisionRectangle
-// */
-// setFromRectangle(otherRect) {
-// // todo arg instance of VisionRectangle check
-// this.set(otherRect.left, otherRect.top, otherRect.right, otherRect.bottom);
-// }
-//
-// get top() {
-// return this._top;
-// }
-//
-// get left() {
-// return this._left;
-// }
-//
-// get bottom() {
-// return this._bottom;
-// }
-//
-// get right() {
-// return this._right;
-// }
-//
-// get width() {
-// return this._right - this._left;
-// }
-//
-// get height() {
-// return this._bottom - this._top;
-// }
-//
-// /**
-// * Returns whether the first rectangle contains the second rectangle.
-// * @param otherRect VisionRectangle
-// * @returns {boolean}
-// */
-// containsRectangle(otherRect) {
-// // todo arg instance of VisionRectangle check
-// return (
-// !this.isEmpty() &&
-// this.left <= otherRect.left &&
-// this.top <= otherRect.top &&
-// this.right >= otherRect.right &&
-// this.bottom >= otherRect.bottom
-// );
-// }
-//
-// /**
-// * Returns whether a rectangle contains a specified point.
-// *
-// * @param x
-// * @param y
-// * @returns {boolean}
-// */
-// containsPoint(x, y) {
-// return !this.isEmpty() && x >= this.left && x < this.right && y >= this.top && y < this.bottom;
-// }
-//
-// /**
-// * Returns whether two rectangles intersect.
-// *
-// * @param otherRect VisionRectangle
-// * @returns {boolean}
-// */
-// intersectsRectangle(otherRect) {
-// // todo arg instance of VisionRectangle check
-// return (
-// this.left < otherRect.right &&
-// otherRect.left < this.right &&
-// this.top < otherRect.bottom &&
-// otherRect.top < this.bottom
-// );
-// }
-//
-// /**
-// * If the rectangle specified intersects this
-// * rectangle, return true and set this rectangle to that intersection,
-// * otherwise return false and do not change this rectangle. No check is
-// * performed to see if either rectangle is empty. Note: To just test for
-// * intersection, use {@link #intersectsRectangle(otherRect: VisionRectangle)}.
-// *
-// * @param otherRect
-// * @returns {boolean}
-// */
-// intersectRectangle(otherRect) {
-// // todo arg instance of VisionRectangle check
-// if (
-// this.left < otherRect.right &&
-// otherRect.left < this.right &&
-// this.top < otherRect.bottom &&
-// otherRect.top < this.bottom
-// ) {
-// if (this.left < otherRect.left) this._left = otherRect.left;
-// if (this.top < otherRect.top) this._top = otherRect.top;
-// if (this.right > otherRect.right) this._right = otherRect.right;
-// if (this.bottom > otherRect.bottom) this._bottom = otherRect.bottom;
-// return true;
-// }
-// return false;
-// }
-//
-// /**
-// * Returns the horizontal center of the rectangle.
-// */
-// centerX() {
-// return (this.left + this.right) >> 1;
-// }
-//
-// /**
-// * Returns the vertical center of the rectangle.
-// */
-// centerY() {
-// return (this.top + this.bottom) >> 1;
-// }
-//
-// /**
-// * Returns whether a rectangle has zero width or height
-// * @returns {boolean}
-// */
-// isEmpty() {
-// return this.left >= this.right || this.top >= this.bottom;
-// }
-//
-// /**
-// * Returns true if this VisionRectangle has the same bounding box as the specified VisionRectangle.
-// *
-// * @param otherRect
-// * @returns {boolean}
-// */
-// isEqual(otherRect) {
-// // todo arg instance of VisionPoint check
-// return this.toString() === otherRect.toString();
-// }
-//
-// /**
-// * Returns this rectangle as an array of [left, top, right, bottom]
-// * @returns {*[]}
-// */
-// toArray() {
-// return [this.left, this.top, this.right, this.bottom];
-// }
-//
-// /**
-// * Returns this rectangle as an string, e.g VisionRectangle[left, top, right, bottom]
-// * @returns {string}
-// */
-// toString() {
-// return `Rectangle[${this.left}, ${this.top}, ${this.right}, ${this.bottom}]`;
-// }
-// }
diff --git a/packages/ml-vision/lib/index.d.ts b/packages/ml-vision/lib/index.d.ts
deleted file mode 100644
index dd18c285783..00000000000
--- a/packages/ml-vision/lib/index.d.ts
+++ /dev/null
@@ -1,1236 +0,0 @@
-/*
- * Copyright (c) 2016-present Invertase Limited & Contributors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this library except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-import { ReactNativeFirebase } from '@react-native-firebase/app';
-import { MLKitVision } from './BarcodeDetectorTypes';
-/**
- * Firebase ML Kit package for React Native.
- *
- * #### Example 1
- *
- * Access the firebase export from the `ml-vision` package:
- *
- * ```js
- * import { firebase } from '@react-native-firebase/ml-vision';
- *
- * // firebase.vision().X
- * ```
- *
- * #### Example 2
- *
- * Using the default export from the `ml-vision` package:
- *
- * ```js
- * import vision from '@react-native-firebase/ml-vision';
- *
- * // vision().X
- * ```
- *
- * #### Example 3
- *
- * Using the default export from the `app` package:
- *
- * ```js
- * import firebase from '@react-native-firebase/app';
- * import '@react-native-firebase/ml-vision';
- *
- * // firebase.vision().X
- * ```
- *
- * @firebase ml-vision
- */
-export namespace FirebaseVisionTypes {
- import FirebaseModule = ReactNativeFirebase.FirebaseModule;
-
- export interface Statics {
- VisionCloudTextRecognizerModelType: typeof VisionCloudTextRecognizerModelType;
- VisionFaceDetectorClassificationMode: typeof VisionFaceDetectorClassificationMode;
- VisionFaceDetectorContourMode: typeof VisionFaceDetectorContourMode;
- VisionFaceDetectorLandmarkMode: typeof VisionFaceDetectorLandmarkMode;
- VisionFaceDetectorPerformanceMode: typeof VisionFaceDetectorPerformanceMode;
- VisionFaceLandmarkType: typeof VisionFaceLandmarkType;
- VisionFaceContourType: typeof VisionFaceContourType;
- VisionCloudLandmarkRecognizerModelType: typeof VisionCloudLandmarkRecognizerModelType;
- VisionDocumentTextRecognizedBreakType: typeof VisionDocumentTextRecognizedBreakType;
- VisionBarcodeFormat: typeof MLKitVision.VisionBarcodeFormat;
- VisionBarcodeValueType: typeof MLKitVision.VisionBarcodeValueType;
- VisionBarcodeAddressType: typeof MLKitVision.VisionBarcodeAddressType;
- VisionBarcodeEmailType: typeof MLKitVision.VisionBarcodeEmailType;
- VisionBarcodePhoneType: typeof MLKitVision.VisionBarcodePhoneType;
- VisionBarcodeWifiEncryptionType: typeof MLKitVision.VisionBarcodeWifiEncryptionType;
- }
-
- /**
- * Options for vision face detector.
- */
- export interface VisionFaceDetectorOptions {
- /**
- * Indicates whether to run additional classifiers for characterizing attributes such as "smiling" and "eyes open".
- *
- * Defaults to `VisionFaceDetectorClassificationMode.NO_CLASSIFICATIONS`.
- *
- * #### Example
- *
- * ```js
- * const faces = await firebase.vision().faceDetectorProcessImage(filePath, {
- * classificationMode: VisionFaceDetectorClassificationMode.ALL_CLASSIFICATIONS,
- * });
- * ```
- */
- classificationMode?:
- | VisionFaceDetectorClassificationMode.NO_CLASSIFICATIONS
- | VisionFaceDetectorClassificationMode.ALL_CLASSIFICATIONS;
-
- /**
- * Sets whether to detect no contours or all contours. Processing time increases as the number of contours to search
- * for increases, so detecting all contours will increase the overall detection time. Note that it would return up
- * to 5 faces contours.
- *
- * Defaults to `VisionFaceDetectorContourMode.NO_CONTOURS`.
- *
- * #### Example
- *
- * ```js
- * const faces = await firebase.vision().faceDetectorProcessImage(filePath, {
- * contourMode: VisionFaceDetectorContourMode.ALL_CONTOURS,
- * });
- * ```
- */
- contourMode?:
- | VisionFaceDetectorContourMode.NO_CONTOURS
- | VisionFaceDetectorContourMode.ALL_CONTOURS;
-
- /**
- * Sets whether to detect no landmarks or all landmarks. Processing time increases as the number of landmarks to
- * search for increases, so detecting all landmarks will increase the overall detection time. Detecting landmarks
- * can improve pose estimation.
- *
- * Defaults to `VisionFaceDetectorLandmarkMode.NO_LANDMARKS`.
- *
- * #### Example
- *
- * ```js
- * const faces = await firebase.vision().faceDetectorProcessImage(filePath, {
- * landmarkMode: VisionFaceDetectorLandmarkMode.ALL_LANDMARKS,
- * });
- * ```
- */
- landmarkMode?:
- | VisionFaceDetectorLandmarkMode.NO_LANDMARKS
- | VisionFaceDetectorLandmarkMode.ALL_LANDMARKS;
-
- /**
- * Sets the smallest desired face size, expressed as a proportion of the width of the head to the image width. For
- * example, if a value of 0.1 is specified then the smallest face to search for is roughly 10% of the width of the
- * image being searched.
- *
- * Setting the min face size is a performance vs. accuracy trade-off: setting the face size smaller will enable the
- * detector to find smaller faces but detection will take longer; setting the face size larger will exclude smaller
- * faces but will run faster.
- *
- * This is not a hard limit on face size; the detector may find faces slightly smaller than specified.
- *
- * Defaults to 0.1.
- *
- * #### Example
- *
- * ```js
- * const faces = await firebase.vision().faceDetectorProcessImage(filePath, {
- * minFaceSize: 0.5,
- * });
- * ```
- */
- minFaceSize?: number;
-
- /**
- * Extended option for controlling additional accuracy / speed trade-offs in performing face detection. In general,
- * choosing the more accurate mode will generally result in longer runtime, whereas choosing the faster mode will
- * generally result in detecting fewer faces.
- *
- * Defaults to `VisionFaceDetectorPerformanceMode.FAST`.
- *
- * #### Example
- *
- * ```js
- * const faces = await firebase.vision().faceDetectorProcessImage(filePath, {
- * performanceMode: VisionFaceDetectorPerformanceMode.ACCURATE,
- * });
- * ```
- */
- performanceMode?:
- | VisionFaceDetectorPerformanceMode.FAST
- | VisionFaceDetectorPerformanceMode.ACCURATE;
- }
-
- /**
- * Options for on device image labeler. Confidence threshold could be provided for the label detection.
- *
-
- */
- export interface VisionImageLabelerOptions {
- /**
- * Sets confidence threshold of detected labels. Only labels detected with confidence higher than this threshold are returned.
- *
- * For example, if the confidence threshold is set to 0.7, only labels with confidence >= 0.7 would be returned.
- *
- * Defaults to 0.5.
- *
- * #### Example
- *
- * ```js
- * const labels = await firebase.vision().imageLabelerProcessImage(filePath, {
- * confidenceThreshold: 0.8,
- * });
- * ```
- */
- confidenceThreshold?: number;
- }
-
- /**
- * Options for cloud image labeler. Confidence threshold could be provided for the label detection.
- *
- * For example, if the confidence threshold is set to 0.7, only labels with confidence >= 0.7 would be returned. The default threshold is 0.5.
- *
- * Note: at most 20 labels will be returned for cloud image labeler.
- */
- export interface VisionCloudImageLabelerOptions {
- /**
- * Only allow registered application instances with matching certificate fingerprint to use Cloud Vision API.
- *
- * > Do not set this for debug build if you use simulators to test.
- *
- * #### Example
- *
- * ```js
- * await firebase.vision().cloudImageLabelerProcessImage(filePath, {
- * enforceCertFingerprintMatch: true,
- * });
- * ```
- */
- enforceCertFingerprintMatch?: boolean;
-
- /**
- * Sets confidence threshold in the range of [0.0 - 1.0] of detected labels. Only labels detected with confidence higher than this threshold are returned.
- *
- * Defaults to 0.5.
- *
- * #### Example
- *
- * ```js
- * await firebase.vision().cloudImageLabelerProcessImage(filePath, {
- * confidenceThreshold: 0.8,
- * });
- * ```
- */
- confidenceThreshold?: number;
-
- /**
- * API key to use for Cloud Vision API. If not set, the default API key from `firebase.app()` will be used.
- *
- * #### Example
- *
- * ```js
- * await firebase.vision().cloudImageLabelerProcessImage(filePath, {
- * apiKeyOverride: 'xyz123',
- * });
- * ```
- *
- * @ios
- */
- apiKeyOverride?: string;
- }
-
- /**
- * Detector for finding popular natural and man-made structures within an image.
- */
- export interface VisionCloudLandmarkRecognizerOptions {
- /**
- * Only allow registered application instances with matching certificate fingerprint to use Cloud Vision API.
- *
- * > Do not set this for debug build if you use simulators to test.
- */
- enforceCertFingerprintMatch?: boolean;
-
- /**
- * Sets the maximum number of results of this type.
- *
- * Defaults to 10.
- */
- maxResults?: number;
-
- /**
- * Sets model type for the detection.
- *
- * Defaults to `VisionCloudLandmarkRecognizerModelType.STABLE_MODEL`.
- */
- modelType?:
- | VisionCloudLandmarkRecognizerModelType.STABLE_MODEL
- | VisionCloudLandmarkRecognizerModelType.LATEST_MODEL;
-
- /**
- * API key to use for Cloud Vision API. If not set, the default API key from `firebase.app()` will be used.
- *
- * @ios
- */
- apiKeyOverride?: string;
- }
-
- /**
- * Model types for cloud landmark recognition.
- */
- export enum VisionCloudLandmarkRecognizerModelType {
- /**
- * Stable model would be used.
- */
- STABLE_MODEL = 1,
-
- /**
- * Latest model would be used.
- */
- LATEST_MODEL = 2,
- }
-
- /**
- * Options for cloud text recognizer.
- */
- export interface VisionCloudTextRecognizerOptions {
- /**
- * Only allow registered application instances with matching certificate fingerprint to use Cloud Vision API.
- *
- * > Do not set this for debug build if you use simulators to test.
- *
- * #### Example
- *
- * ```js
- * await firebase.vision().cloudTextRecognizerProcessImage(filePath, {
- * enforceCertFingerprintMatch: true,
- * });
- * ```
- */
- enforceCertFingerprintMatch?: boolean;
-
- /**
- * Sets model type for cloud text recognition. The two models SPARSE_MODEL and DENSE_MODEL handle different text densities in an image.
- *
- * See `VisionCloudTextRecognizerModelType` for types.
- *
- * Defaults to `VisionCloudTextRecognizerModelType.SPARSE_MODEL`.
- *
- * #### Example
- *
- * ```js
- * import {
- * firebase,
- * VisionCloudTextRecognizerModelType,
- * } from '@react-native-firebase/ml-vision';
- *
- * await firebase.vision().cloudTextRecognizerProcessImage(filePath, {
- * modelType: VisionCloudTextRecognizerModelType.DENSE_MODEL,
- * });
- * ```
- */
- modelType?:
- | VisionCloudTextRecognizerModelType.SPARSE_MODEL
- | VisionCloudTextRecognizerModelType.DENSE_MODEL;
-
- /**
- * Sets language hints. In most cases, not setting this yields the best results since it enables automatic language
- * detection. For languages based on the Latin alphabet, setting language hints is not needed. In rare cases, when
- * the language of the text in the image is known, setting a hint will help get better results (although it will be a
- * significant hindrance if the hint is wrong).
- *
- * Each language code must be a BCP-47 identifier. See [Google Cloud OCR Language Support](https://cloud.google.com/vision/docs/languages) for more information.
- *
- * #### Example
- *
- * ```js
- * await firebase.vision().cloudTextRecognizerProcessImage(filePath, {
- * languageHints: ['fr', 'de'],
- * });
- * ```
- */
- languageHints?: string[];
-
- /**
- * API key to use for Cloud Vision API. If not set, the default API key from `firebase.app()` will be used.
- *
- * #### Example
- *
- * ```js
- * await firebase.vision().cloudTextRecognizerProcessImage(filePath, {
- * apiKeyOverride: 'xyz123',
- * });
- * ```
- *
- * @ios
- */
- apiKeyOverride?: string;
- }
-
- /**
- * Options for the cloud document text recognizer.
- */
- export interface VisionCloudDocumentTextRecognizerOptions {
- /**
- * Only allow registered application instances with matching certificate fingerprint to use Cloud Vision API.
- *
- * > Do not set this for debug build if you use simulators to test.
- *
- * #### Example
- *
- * ```js
- * await firebase.vision().cloudTextRecognizerProcessImage(filePath, {
- * enforceCertFingerprintMatch: true,
- * });
- * ```
- */
- enforceCertFingerprintMatch?: boolean;
-
- /**
- * Sets language hints. In most cases, not setting this yields the best results since it enables automatic language
- * detection. For languages based on the Latin alphabet, setting language hints is not needed. In rare cases, when
- * the language of the text in the image is known, setting a hint will help get better results (although it will be a
- * significant hindrance if the hint is wrong).
- *
- * Each language code must be a BCP-47 identifier. See [Google Cloud OCR Language Support](https://cloud.google.com/vision/docs/languages) for more information.
- *
- * #### Example
- *
- * ```js
- * await firebase.vision().cloudTextRecognizerProcessImage(filePath, {
- * languageHints: ['fr', 'de'],
- * });
- * ```
- */
- languageHints?: string[];
-
- /**
- * API key to use for Cloud Vision API. If not set, the default API key from `firebase.app()` will be used.
- *
- * #### Example
- *
- * ```js
- * await firebase.vision().cloudTextRecognizerProcessImage(filePath, {
- * apiKeyOverride: 'xyz123',
- * });
- * ```
- *
- * @ios
- */
- apiKeyOverride?: string;
- }
-
- /**
- * The cloud model type used for in VisionCloudTextRecognizerOptions & VisionCloudDocumentTextRecognizerOptions
- *
- * Defaults to `SPARSE_MODEL`
- */
- export enum VisionCloudTextRecognizerModelType {
- /**
- * Dense model type. It is more suitable for well-formatted dense text.
- */
- SPARSE_MODEL = 1,
- /**
- * Sparse model type. It is more suitable for sparse text.
- */
- DENSE_MODEL = 2,
- }
-
- /**
- * Indicates whether to run additional classifiers for characterizing attributes such as "smiling" and "eyes open".
- */
- export enum VisionFaceDetectorClassificationMode {
- /**
- * Disables collection of classifier information.
- */
- NO_CLASSIFICATIONS = 1,
-
- /**
- * Enables collection of classifier information.
- */
- ALL_CLASSIFICATIONS = 2,
- }
-
- /**
- * Sets whether to detect contours or not. Processing time increases as the number of contours to search for increases,
- * so detecting all contours will increase the overall detection time.
- */
- export enum VisionFaceDetectorContourMode {
- /**
- * Disables collection of contour information.
- */
- NO_CONTOURS = 1,
-
- /**
- * Enables collection of contour information.
- */
- ALL_CONTOURS = 2,
- }
-
- /**
- * Sets whether to detect no landmarks or all landmarks. Processing time increases as the number of landmarks to
- * search for increases, so detecting all landmarks will increase the overall detection time. Detecting
- * landmarks can improve pose estimation.
- */
- export enum VisionFaceDetectorLandmarkMode {
- /**
- * Disables collection of landmark information.
- */
- NO_LANDMARKS = 1,
-
- /**
- * Enables collection of landmark information.
- */
- ALL_LANDMARKS = 2,
- }
-
- /**
- * Extended option for controlling additional accuracy / speed trade-offs in performing face detection. In general,
- * choosing the more accurate mode will generally result in longer runtime, whereas choosing the faster
- * mode will generally result in detecting fewer faces.
- */
- export enum VisionFaceDetectorPerformanceMode {
- /**
- * Indicates a preference for speed in extended settings that may make an accuracy vs. speed trade-off. This will
- * tend to detect fewer faces and may be less precise in determining values such as position, but will run faster.
- */
- FAST = 1,
-
- /**
- * Indicates a preference for accuracy in extended settings that may make an accuracy vs. speed trade-off.
- * This will tend to detect more faces and may be more precise in determining values such as position, at the cost
- * of speed.
- */
- ACCURATE = 2,
- }
-
- /**
- * A Rectangle holds four number coordinates relative to the processed image.
- * Rectangle are represented as [left, top, right, bottom].
- *
- * Used by Vision Text Recognizer, Face Detector & Landmark Recognition APIs.
- */
- export type VisionRectangle = [number, number, number, number];
-
- /**
- * A point holds two number coordinates relative to the processed image.
- * Points are represented as [x, y].
- *
- * Used by Vision Text Recognizer, Face Detector & Landmark Recognition APIs.
- */
- export type VisionPoint = [number, number];
-
- /**
- * A hierarchical representation of texts recognized in an image.
- */
- export interface VisionText {
- /**
- * Retrieve the recognized text as a string.
- */
- text: string;
-
- /**
- * Gets an array `VisionTextBlock`, which is a block of text that can be further decomposed to an array of `VisionTextLine`.
- */
- blocks: VisionTextBlock[];
- }
-
- /**
- * Represents a block of text.
- */
- export interface VisionDocumentTextBlock extends VisionDocumentTextBase {
- /**
- * Gets an Array of `VisionDocumentTextParagraph`s that make up this block.
- */
- paragraphs: VisionDocumentTextParagraph[];
- }
-
- /**
- * A structural unit of text representing a number of words in certain order.
- */
- export interface VisionDocumentTextParagraph extends VisionDocumentTextBase {
- /**
- * Gets an Array of `VisionDocumentTextWord`s that make up this paragraph.
- *
- * Returns an empty list if no Word is found.
- */
- words: VisionDocumentTextWord[];
- }
-
- /**
- * A single word representation.
- */
- export interface VisionDocumentTextWord extends VisionDocumentTextBase {
- /**
- * Gets an Array of `VisionDocumentTextSymbol`s that make up this word.
- * The order of the symbols follows the natural reading order.
- */
- symbols: VisionDocumentTextSymbol[];
- }
-
- /**
- * A single symbol representation.
- */
- export type VisionDocumentTextSymbol = VisionDocumentTextBase;
-
- /**
- * Enum representing the detected break type.
- */
- export enum VisionDocumentTextRecognizedBreakType {
- /**
- * Line-wrapping break.
- */
- EOL_SURE_SPACE = 3,
-
- /**
- * End-line hyphen that is not present in text; does not co-occur with `SPACE`, `LEADER_SPACE`, or `LINE_BREAK`.
- */
- HYPHEN = 4,
-
- /**
- * Line break that ends a paragraph.
- */
- LINE_BREAK = 5,
-
- /**
- * Regular space.
- */
- SPACE = 1,
-
- /**
- * Sure space (very wide).
- */
- SURE_SPACE = 2,
-
- /**
- * Unknown break label type.
- */
- UNKNOWN = 0,
- }
-
- /**
- * A recognized break is the detected start or end of a structural component.
- */
- export interface VisionDocumentTextRecognizedBreak {
- /**
- * Gets detected break type.
- */
- breakType: VisionDocumentTextRecognizedBreakType;
-
- /**
- * Returns true if break prepends an element.
- */
- isPrefix: boolean;
- }
- /**
- * A shared type that all VisionDocumentText components inherit from
- */
- export interface VisionDocumentTextBase {
- /**
- * Gets the recognized text as a string. Returned in reading order for the language. For Latin, this is top to bottom within a `VisionTextBlock`, and left-to-right within a `VisionTextLine`.
- */
- text: string;
-
- /**
- * The confidence of the recognized text. It only return valid result from cloud recognizers. For on-device text recognition, the confidence is always null.
- */
- confidence: null | number;
-
- /**
- * Gets a list of recognized languages. (Cloud API only. On-Device returns empty array)
- *
- * A language is the BCP-47 language code, such as "en-US" or "sr-Latn".
- */
- recognizedLanguages: string[];
-
- /**
- * Returns the bounding rectangle of the detected text.
- */
- boundingBox: VisionRectangle;
-
- /**
- * Gets the recognized break - the detected start or end of a structural component.
- */
- recognizedBreak: VisionDocumentTextRecognizedBreak;
- }
-
- /**
- * A hierarchical representation of document text recognized in an image.
- */
- export interface VisionDocumentText {
- /**
- * Retrieve the recognized text as a string.
- */
- text: string;
-
- /**
- * Gets an array `VisionTextBlock`, which is a block of text that can be further decomposed to an array of `VisionDocumentTextParagraph`.
- */
- blocks: VisionDocumentTextBlock[];
- }
-
- /**
- * A shared type that all Vision Text components inherit from
- */
- export interface VisionTextBase {
- /**
- * Gets the recognized text as a string. Returned in reading order for the language. For Latin, this is top to bottom within a `VisionTextBlock`, and left-to-right within a `VisionTextLine`.
- */
- text: string;
-
- /**
- * The confidence of the recognized text. It only return valid result from cloud recognizers. For on-device text recognition, the confidence is always null.
- */
- confidence: null | number;
-
- /**
- * Gets a list of recognized languages. (Cloud API only. On-Device returns empty array)
- *
- * A language is the BCP-47 language code, such as "en-US" or "sr-Latn".
- */
- recognizedLanguages: string[];
-
- /**
- * Returns the bounding rectangle of the detected text.
- */
- boundingBox: VisionRectangle;
-
- /**
- * Gets the four corner points in clockwise direction starting with top-left. Due to the possible perspective distortions, this is not necessarily a rectangle. Parts of the region could be outside of the image.
- */
- cornerPoints: VisionPoint[];
- }
-
- /**
- * Represents a block of text (similar to a paragraph).
- */
- export interface VisionTextBlock extends VisionTextBase {
- /**
- * Gets an Array of VisionTextLine's that make up this text block.
- */
- lines: VisionTextLine[];
- }
-
- /**
- * Represents a line of text.
- */
- export interface VisionTextLine extends VisionTextBase {
- /**
- * Gets an Array of VisionTextElement's that make up this text block.
- *
- * An element is roughly equivalent to a space-separated "word" in most Latin languages, or a character in others. For instance, if a word is split between two lines by a hyphen, each part is encoded as a separate Element.
- */
- elements: VisionTextElement[];
- }
-
- /**
- * Roughly equivalent to a space-separated "word" in most Latin languages, or a character in others. For instance, if a word is split between two lines by a hyphen, each part is encoded as a separate Element.
- */
- export type VisionTextElement = VisionTextBase;
-
- /**
- * Represents an image label return from `imageLabelerProcessImage()` and `cloudImageLabelerProcessImage()`.
- */
- export interface VisionImageLabel {
- /**
- * Returns a detected label from the given image. The label returned here is in English only.
- *
- * Use `entityId` to retrieve a unique id.
- */
- text: string;
-
- /**
- * Returns an opaque entity ID. IDs are available in [Google Knowledge Graph Search API](https://developers.google.com/knowledge-graph/).
- */
- entityId: string;
-
- /**
- * Gets overall confidence of the result.
- *
- * Range between 0 (low confidence) and 1 (high confidence).
- */
- confidence: number;
- }
-
- /**
- * Represents a face returned from `faceDetectorProcessImage()`.
- */
- export interface VisionFace {
- /**
- * Returns the axis-aligned bounding rectangle of the detected face.
- */
- boundingBox: VisionRectangle;
-
- /**
- * Represent a face contour. A contour is a list of points on a detected face, such as the mouth.
- *
- * When 'left' and 'right' are used, they are relative to the subject in the image. For example, the `LEFT_EYE`
- * landmark is the subject's left eye, not the eye that is on the left when viewing the image.
- */
- faceContours: VisionFaceContour[];
-
- /**
- * Returns the rotation of the face about the vertical axis of the image. Positive euler y is when the face turns
- * toward the right side of the of the image that is being processed.
- */
- headEulerAngleY: number;
-
- /**
- * Returns the rotation of the face about the axis pointing out of the image. Positive euler z is a
- * counter-clockwise rotation within the image plane.
- */
- headEulerAngleZ: number;
-
- /**
- * Returns an array of `VisionFaceLandmark`.
- *
- * Returns an empty array if the landmark mode has not been enabled via `setLandmarkMode()`.
- */
- landmarks: VisionFaceLandmark[];
-
- /**
- * Returns a value between 0.0 and 1.0 giving a probability that the face's left eye is open.
- *
- * Returns -1 if the classification mode has not been enabled via `setClassificationMode()`.
- */
- leftEyeOpenProbability: number;
-
- /**
- * Returns a value between 0.0 and 1.0 giving a probability that the face's right eye is open.
- *
- * Returns -1 if the classification mode has not been enabled via `setClassificationMode()`.
- */
- rightEyeOpenProbability: number;
-
- /**
- * Returns a value between 0.0 and 1.0 giving a probability that the face is smiling.
- *
- * Returns -1 if the classification mode has not been enabled via `setClassificationMode()`.
- */
- smilingProbability: number;
- }
-
- /**
- * Represent a face landmark. A landmark is a point on a detected face, such as an eye, nose, or mouth.
- *
- * When 'left' and 'right' are used, they are relative to the subject in the image. For example, the `LEFT_EYE` landmark
- * is the subject's left eye, not the eye that is on the left when viewing the image.
- */
- export interface VisionFaceLandmark {
- /**
- * Returns the landmark type.
- */
- type: VisionFaceLandmarkType;
-
- /**
- * Gets a 2D point for landmark position, where (0, 0) is the upper-left corner of the image.
- */
- position: VisionPoint[];
- }
-
- /**
- * Landmark types for a face.
- */
- export enum VisionFaceLandmarkType {
- /**
- * The midpoint between the subject's left mouth corner and the outer corner of the subject's left eye.
- */
- LEFT_CHEEK = 1,
-
- /**
- * The midpoint of the subject's left ear tip and left ear lobe.
- */
- LEFT_EAR = 3,
-
- /**
- * The center of the subject's left eye cavity.
- */
- LEFT_EYE = 4,
-
- /**
- * The center of the subject's bottom lip.
- */
- MOUTH_BOTTOM = 0,
-
- /**
- * The subject's left mouth corner where the lips meet.
- */
- MOUTH_LEFT = 5,
-
- /**
- * The subject's right mouth corner where the lips meet.
- */
- MOUTH_RIGHT = 11,
-
- /**
- * The midpoint between the subject's nostrils where the nose meets the face.
- */
- NOSE_BASE = 6,
-
- /**
- * The midpoint between the subject's right mouth corner and the outer corner of the subject's right eye.
- */
- RIGHT_CHEEK = 7,
-
- /**
- * The midpoint of the subject's right ear tip and right ear lobe.
- */
- RIGHT_EAR = 9,
-
- /**
- * The center of the subject's right eye cavity.
- */
- RIGHT_EYE = 10,
- }
-
- /**
- * Represent a face contour. A contour is a list of points on a detected face, such as the mouth.
- * When 'left' and 'right' are used, they are relative to the subject in the image. For example, the `LEFT_EYE` landmark
- * is the subject's left eye, not the eye that is on the left when viewing the image.
- */
- export interface VisionFaceContour {
- /**
- * Returns the contour type.
- */
- type: VisionFaceContourType;
-
- /**
- * Gets a list of 2D points for this face contour, where (0, 0) is the upper-left corner of the image. The point is
- * guaranteed to be within the bounds of the image.
- */
- points: VisionPoint[];
- }
-
- /**
- * Countour type for a face.
- */
- export enum VisionFaceContourType {
- /**
- * All points of a face contour.
- */
- ALL_POINTS = 1,
-
- /**
- * The outline of the subject's face.
- */
- FACE = 2,
-
- /**
- * The outline of the subject's left eye cavity.
- */
- LEFT_EYE = 7,
-
- /**
- * The bottom outline of the subject's left eyebrow.
- */
- LEFT_EYEBROW_BOTTOM = 4,
-
- /**
- * The top outline of the subject's left eyebrow.
- */
- LEFT_EYEBROW_TOP = 3,
-
- /**
- * The bottom outline of the subject's lower lip.
- */
- LOWER_LIP_BOTTOM = 12,
-
- /**
- * The top outline of the subject's lower lip.
- */
- LOWER_LIP_TOP = 11,
-
- /**
- * The outline of the subject's nose bridge.
- */
- NOSE_BOTTOM = 14,
-
- /**
- * The outline of the subject's nose bridge.
- */
- NOSE_BRIDGE = 13,
-
- /**
- * The outline of the subject's right eye cavity.
- */
- RIGHT_EYE = 8,
-
- /**
- * The bottom outline of the subject's right eyebrow.
- */
- RIGHT_EYEBROW_BOTTOM = 6,
-
- /**
- * The top outline of the subject's right eyebrow.
- */
- RIGHT_EYEBROW_TOP = 5,
-
- /**
- * The bottom outline of the subject's upper lip.
- */
- UPPER_LIP_BOTTOM = 10,
-
- /**
- * The top outline of the subject's upper lip.
- */
- UPPER_LIP_TOP = 9,
- }
-
- /**
- * Represents a detected landmark returned from `cloudLandmarkRecognizerProcessImage()`.
- */
- export interface VisionLandmark {
- /**
- * Gets image region of the detected landmark. Returns null if nothing was detected
- */
- boundingBox: VisionRectangle | null;
-
- /**
- * Gets overall confidence of the result. Ranging between 0 & 1.
- */
- confidence: number;
-
- /**
- * Gets opaque entity ID. Some IDs may be available in [Google Knowledge Graph Search API](https://developers.google.com/knowledge-graph/).
- */
- entityId: string;
-
- /**
- * Gets the detected landmark.
- */
- landmark: string;
-
- /**
- * Gets the location information for the detected entity.
- *
- * Multiple VisionGeoPoint elements can be present because one location may indicate the location of the scene
- * in the image, and another location may indicate the location of the place where the image was taken.
- * Location information is usually present for landmarks.
- */
- locations: VisionGeoPoint[];
- }
-
- /**
- * A representation of a latitude/longitude pair.
- *
- * This is expressed as an array of numbers representing degrees latitude and degrees longitude, in the form `[lat, lng]`.
- */
- export type VisionGeoPoint = [number, number];
-
- /**
- * The Firebase ML Kit service interface.
- *
- * > This module is available for the default app only.
- *
- * #### Example
- *
- * Get the ML Kit service for the default app:
- *
- * ```js
- * const defaultAppMLKit = firebase.vision();
- * ```
- */
- export class Module extends FirebaseModule {
- /**
- * Detects faces from a local image file.
- *
- * @param imageFilePath A local path to an image on the device.
- * @param faceDetectorOptions An optional instance of `VisionFaceDetectorOptions`.
- */
- faceDetectorProcessImage(
- imageFilePath: string,
- faceDetectorOptions?: VisionFaceDetectorOptions,
- ): Promise;
-
- /**
- * Detect text from a local image file using the on-device model.
- *
- * @param imageFilePath A local path to an image on the device.
- */
- textRecognizerProcessImage(imageFilePath: string): Promise;
-
- /**
- * Detect text from a local image file using the cloud (Firebase) model.
- *
- * @param imageFilePath A local path to an image on the device.
- * @param cloudTextRecognizerOptions An instance of `VisionCloudTextRecognizerOptions`.
- */
- cloudTextRecognizerProcessImage(
- imageFilePath: string,
- cloudTextRecognizerOptions?: VisionCloudTextRecognizerOptions,
- ): Promise;
-
- /**
- * Detect text within a document using a local image file from the cloud (Firebase) model.
- *
- * @param imageFilePath A local path to an image on the device.
- * @param cloudDocumentTextRecognizerOptions An instance of `VisionCloudDocumentTextRecognizerOptions`.
- */
- cloudDocumentTextRecognizerProcessImage(
- imageFilePath: string,
- cloudDocumentTextRecognizerOptions?: VisionCloudDocumentTextRecognizerOptions,
- ): Promise;
-
- /**
- * Returns an array of landmarks (as `VisionLandmark`) of a given local image file path. Landmark detection
- * is done on cloud (Firebase).
- *
- * @param imageFilePath A local image file path.
- * @param cloudLandmarkRecognizerOptions An optional instance of `VisionCloudLandmarkRecognizerOptions`.
- */
- cloudLandmarkRecognizerProcessImage(
- imageFilePath: string,
- cloudLandmarkRecognizerOptions?: VisionCloudLandmarkRecognizerOptions,
- ): Promise;
-
- /**
- * Returns an array of labels (as `VisionImageLabel`) of a given local image file path. Label detection is done
- * on device, resulting in faster results but less descriptive.
- *
- * #### Example
- *
- * ```js
- * const labels = await firebase.vision().imageLabelerProcessImage(filePath, {
- * confidenceThreshold: 0.8,
- * });
- * ```
- *
- * @param imageFilePath A local image file path.
- * @param imageLabelerOptions An optional instance of `VisionImageLabelerOptions`.
- */
- imageLabelerProcessImage(
- imageFilePath: string,
- imageLabelerOptions?: VisionImageLabelerOptions,
- ): Promise;
-
- /**
- * Returns an array of labels (as `VisionImageLabel`) of a given local image file path. Label detection is done
- * on cloud (Firebase), resulting in slower results but more descriptive.
- *
- * #### Example
- *
- * ```js
- * const labels = await firebase.vision().cloudImageLabelerProcessImage(filePath, {
- * confidenceThreshold: 0.8,
- * });
- * ```
- *
- * @param imageFilePath A local image file path.
- * @param cloudImageLabelerOptions An optional instance of `VisionCloudImageLabelerOptions`.
- */
- cloudImageLabelerProcessImage(
- imageFilePath: string,
- cloudImageLabelerOptions?: VisionCloudImageLabelerOptions,
- ): Promise;
-
- /**
- * Returns an array of barcodes (as `VisionBarcode`) detected for a local image file path.
- *
- * Barcode detection is done locally on device.
- *
- * #### Example 1
- *
- * ```js
- * import vision, { VisionBarcodeValueType } from '@react-native-firebase/ml-vision';
- *
- * const [barcode, ...otherBarcodes] = await vision().barcodeDetectorProcessImage(filePath);
- *
- * if (barcode && barcode.valueType === VisionBarcodeValueType.CONTACT_INFO) {
- * console.log(barcode.contactInfo);
- * }
- * ```
- *
- * #### Example 2
- *
- * Process image with custom `VisionBarcodeDetectorOptions`.
- *
- * ```js
- * import vision, { VisionBarcodeFormat, VisionBarcodeValueType } from '@react-native-firebase/ml-vision';
- *
- * const [barcode, ...otherBarcodes] = await vision().barcodeDetectorProcessImage(filePath, {
- * barcodeFormats: [VisionBarcodeFormat.QR_CODE]
- * });
- *
- * if (barcode && barcode.valueType === VisionBarcodeValueType.CONTACT_INFO) {
- * console.log(barcode.contactInfo);
- * }
- * ```
- *
- * @param imageFilePath A local image file path.
- * @param barcodeDetectorOptions Optional instance of `VisionBarcodeDetectorOptions`.
- */
- barcodeDetectorProcessImage(
- imageFilePath: string,
- barcodeDetectorOptions?: MLKitVision.VisionBarcodeDetectorOptions,
- ): Promise;
- }
-}
-
-declare const defaultExport: ReactNativeFirebase.FirebaseModuleWithStaticsAndApp<
- FirebaseVisionTypes.Module,
- FirebaseVisionTypes.Statics
->;
-
-export const firebase: ReactNativeFirebase.Module & {
- analytics: typeof defaultExport;
- app(name?: string): ReactNativeFirebase.FirebaseApp & { vision(): FirebaseVisionTypes.Module };
-};
-
-export const VisionBarcodeFormat: FirebaseVisionTypes.Statics['VisionBarcodeFormat'];
-export const VisionFaceContourType: FirebaseVisionTypes.Statics['VisionFaceContourType'];
-export const VisionFaceLandmarkType: FirebaseVisionTypes.Statics['VisionFaceLandmarkType'];
-export const VisionBarcodeValueType: FirebaseVisionTypes.Statics['VisionBarcodeValueType'];
-export const VisionBarcodeEmailType: FirebaseVisionTypes.Statics['VisionBarcodeEmailType'];
-export const VisionBarcodePhoneType: FirebaseVisionTypes.Statics['VisionBarcodePhoneType'];
-export const VisionBarcodeAddressType: FirebaseVisionTypes.Statics['VisionBarcodeAddressType'];
-export const VisionFaceDetectorContourMode: FirebaseVisionTypes.Statics['VisionFaceDetectorContourMode'];
-export const VisionFaceDetectorLandmarkMode: FirebaseVisionTypes.Statics['VisionFaceDetectorLandmarkMode'];
-export const VisionBarcodeWifiEncryptionType: FirebaseVisionTypes.Statics['VisionBarcodeWifiEncryptionType'];
-export const VisionFaceDetectorPerformanceMode: FirebaseVisionTypes.Statics['VisionFaceDetectorPerformanceMode'];
-export const VisionCloudTextRecognizerModelType: FirebaseVisionTypes.Statics['VisionCloudTextRecognizerModelType'];
-export const VisionFaceDetectorClassificationMode: FirebaseVisionTypes.Statics['VisionFaceDetectorClassificationMode'];
-export const VisionDocumentTextRecognizedBreakType: FirebaseVisionTypes.Statics['VisionDocumentTextRecognizedBreakType'];
-export const VisionCloudLandmarkRecognizerModelType: FirebaseVisionTypes.Statics['VisionCloudLandmarkRecognizerModelType'];
-
-export default defaultExport;
-
-/**
- * Attach namespace to `firebase.` and `FirebaseApp.`.
- */
-declare module '@react-native-firebase/app' {
- namespace ReactNativeFirebase {
- import FirebaseModuleWithStaticsAndApp = ReactNativeFirebase.FirebaseModuleWithStaticsAndApp;
- interface Module {
- vision: FirebaseModuleWithStaticsAndApp<
- FirebaseVisionTypes.Module,
- FirebaseVisionTypes.Statics
- >;
- }
-
- interface FirebaseApp {
- vision(): FirebaseVisionTypes.Module;
- }
-
- interface FirebaseJsonConfig {
- ml_vision_face_model: boolean;
- ml_vision_ocr_model: boolean;
- ml_vision_barcode_model: boolean;
- ml_vision_label_model: boolean;
- ml_vision_image_label_model: boolean;
- }
- }
-}
diff --git a/packages/ml-vision/lib/index.js b/packages/ml-vision/lib/index.js
deleted file mode 100644
index 325db1ebb3b..00000000000
--- a/packages/ml-vision/lib/index.js
+++ /dev/null
@@ -1,273 +0,0 @@
-/*
- * Copyright (c) 2016-present Invertase Limited & Contributors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this library except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-import {
- isString,
- toFilePath,
- validateOptionalNativeDependencyExists,
-} from '@react-native-firebase/app/lib/common';
-import {
- createModuleNamespace,
- FirebaseModule,
- getFirebaseRoot,
-} from '@react-native-firebase/app/lib/internal';
-import version from './version';
-import VisionBarcodeAddressType from './VisionBarcodeAddressType';
-import visionBarcodeDetectorOptions from './visionBarcodeDetectorOptions';
-import VisionBarcodeEmailType from './VisionBarcodeEmailType';
-import VisionBarcodeFormat from './VisionBarcodeFormat';
-import VisionBarcodePhoneType from './VisionBarcodePhoneType';
-import VisionBarcodeValueType from './VisionBarcodeValueType';
-import VisionBarcodeWifiEncryptionType from './VisionBarcodeWifiEncryptionType';
-import visionCloudDocumentTextRecognizerOptions from './visionCloudDocumentTextRecognizerOptions';
-import visionCloudImageLabelerOptions from './visionCloudImageLabelerOptions';
-import VisionCloudLandmarkRecognizerModelType from './VisionCloudLandmarkRecognizerModelType';
-import visionCloudLandmarkRecognizerOptions from './visionCloudLandmarkRecognizerOptions';
-import VisionCloudTextRecognizerModelType from './VisionCloudTextRecognizerModelType';
-import visionCloudTextRecognizerOptions from './visionCloudTextRecognizerOptions';
-import VisionDocumentTextRecognizedBreakType from './VisionDocumentTextRecognizedBreakType';
-import VisionFaceContourType from './VisionFaceContourType';
-import VisionFaceDetectorClassificationMode from './VisionFaceDetectorClassificationMode';
-import VisionFaceDetectorContourMode from './VisionFaceDetectorContourMode';
-import VisionFaceDetectorLandmarkMode from './VisionFaceDetectorLandmarkMode';
-import visionFaceDetectorOptions from './visionFaceDetectorOptions';
-import VisionFaceDetectorPerformanceMode from './VisionFaceDetectorPerformanceMode';
-import VisionFaceLandmarkType from './VisionFaceLandmarkType';
-import visionImageLabelerOptions from './visionImageLabelerOptions';
-
-const statics = {
- VisionCloudTextRecognizerModelType,
- VisionFaceDetectorClassificationMode,
- VisionFaceDetectorContourMode,
- VisionFaceDetectorLandmarkMode,
- VisionFaceDetectorPerformanceMode,
- VisionFaceLandmarkType,
- VisionFaceContourType,
- VisionCloudLandmarkRecognizerModelType,
- VisionDocumentTextRecognizedBreakType,
- VisionBarcodeFormat,
- VisionBarcodeValueType,
- VisionBarcodeAddressType,
- VisionBarcodeEmailType,
- VisionBarcodePhoneType,
- VisionBarcodeWifiEncryptionType,
-};
-
-const namespace = 'vision';
-const nativeModuleName = [
- 'RNFBMLVisionFaceDetectorModule',
- 'RNFBMLVisionImageLabelerModule',
- 'RNFBMLVisionTextRecognizerModule',
- 'RNFBMLVisionBarcodeDetectorModule',
- 'RNFBMLVisionLandmarkRecognizerModule',
- 'RNFBMLVisionDocumentTextRecognizerModule',
-];
-
-class FirebaseMlKitVisionModule extends FirebaseModule {
- faceDetectorProcessImage(localImageFilePath, faceDetectorOptions) {
- validateOptionalNativeDependencyExists(
- 'ml_vision_face_model',
- 'ML Kit Vision Face Detector',
- !!this.native.faceDetectorProcessImage,
- );
-
- if (!isString(localImageFilePath)) {
- throw new Error(
- "firebase.vision().faceDetectorProcessImage(*) 'localImageFilePath' expected a string local file path.",
- );
- }
-
- let options;
- try {
- options = visionFaceDetectorOptions(faceDetectorOptions);
- } catch (e) {
- throw new Error(
- `firebase.vision().faceDetectorProcessImage(_, *) 'faceDetectorOptions' ${e.message}.`,
- );
- }
-
- return this.native.faceDetectorProcessImage(toFilePath(localImageFilePath), options);
- }
-
- textRecognizerProcessImage(localImageFilePath) {
- if (!isString(localImageFilePath)) {
- throw new Error(
- "firebase.vision().textRecognizerProcessImage(*) 'localImageFilePath' expected a string local file path.",
- );
- }
-
- return this.native.textRecognizerProcessImage(toFilePath(localImageFilePath));
- }
-
- cloudTextRecognizerProcessImage(localImageFilePath, cloudTextRecognizerOptions) {
- if (!isString(localImageFilePath)) {
- throw new Error(
- "firebase.vision().cloudTextRecognizerProcessImage(*) 'localImageFilePath' expected a string local file path.",
- );
- }
-
- let options;
- try {
- options = visionCloudTextRecognizerOptions(cloudTextRecognizerOptions);
- } catch (e) {
- throw new Error(`firebase.vision().cloudTextRecognizerProcessImage(_, *) ${e.message}`);
- }
-
- return this.native.cloudTextRecognizerProcessImage(toFilePath(localImageFilePath), options);
- }
-
- cloudDocumentTextRecognizerProcessImage(localImageFilePath, cloudDocumentTextRecognizerOptions) {
- if (!isString(localImageFilePath)) {
- throw new Error(
- "firebase.vision().cloudDocumentTextRecognizerProcessImage(*) 'localImageFilePath' expected a string local file path.",
- );
- }
-
- let options;
- try {
- options = visionCloudDocumentTextRecognizerOptions(cloudDocumentTextRecognizerOptions);
- } catch (e) {
- throw new Error(
- `firebase.vision().cloudDocumentTextRecognizerProcessImage(_, *) ${e.message}.`,
- );
- }
-
- return this.native.cloudDocumentTextRecognizerProcessImage(
- toFilePath(localImageFilePath),
- options,
- );
- }
-
- cloudLandmarkRecognizerProcessImage(localImageFilePath, cloudLandmarkRecognizerOptions) {
- if (!isString(localImageFilePath)) {
- throw new Error(
- "firebase.vision().cloudLandmarkRecognizerProcessImage(*) 'localImageFilePath' expected a string local file path.",
- );
- }
-
- let options;
- try {
- options = visionCloudLandmarkRecognizerOptions(cloudLandmarkRecognizerOptions);
- } catch (e) {
- throw new Error(`firebase.vision().cloudLandmarkRecognizerProcessImage(_, *) ${e.message}.`);
- }
-
- return this.native.cloudLandmarkRecognizerProcessImage(toFilePath(localImageFilePath), options);
- }
-
- imageLabelerProcessImage(localImageFilePath, imageLabelerOptions) {
- validateOptionalNativeDependencyExists(
- 'ml_vision_image_label_model',
- 'ML Kit Vision Image Labeler',
- !!this.native.imageLabelerProcessImage,
- );
-
- if (!isString(localImageFilePath)) {
- throw new Error(
- "firebase.vision().imageLabelerProcessImage(*) 'localImageFilePath' expected a string local file path.",
- );
- }
-
- let options;
- try {
- options = visionImageLabelerOptions(imageLabelerOptions);
- } catch (e) {
- throw new Error(`firebase.vision().imageLabelerProcessImage(_, *) ${e.message}.`);
- }
-
- return this.native.imageLabelerProcessImage(toFilePath(localImageFilePath), options);
- }
-
- cloudImageLabelerProcessImage(localImageFilePath, cloudImageLabelerOptions) {
- validateOptionalNativeDependencyExists(
- 'ml_vision_image_label_model',
- 'ML Kit Vision Image Labeler',
- !!this.native.imageLabelerProcessImage,
- );
-
- if (!isString(localImageFilePath)) {
- throw new Error(
- "firebase.vision().cloudImageLabelerProcessImage(*) 'localImageFilePath' expected a string local file path.",
- );
- }
-
- let options;
- try {
- options = visionCloudImageLabelerOptions(cloudImageLabelerOptions);
- } catch (e) {
- throw new Error(`firebase.vision().cloudImageLabelerProcessImage(_, *) ${e.message}.`);
- }
-
- return this.native.cloudImageLabelerProcessImage(toFilePath(localImageFilePath), options);
- }
-
- barcodeDetectorProcessImage(localImageFilePath, barcodeDetectorOptions) {
- if (!isString(localImageFilePath)) {
- throw new Error(
- "firebase.vision().barcodeDetectorProcessImage(*) 'localImageFilePath' expected a string local file path.",
- );
- }
-
- let options;
- try {
- options = visionBarcodeDetectorOptions(barcodeDetectorOptions);
- } catch (e) {
- throw new Error(`firebase.vision().barcodeDetectorProcessImage(_, *) ${e.message}`);
- }
-
- return this.native.barcodeDetectorProcessImage(toFilePath(localImageFilePath), options);
- }
-}
-
-// import { SDK_VERSION } from '@react-native-firebase/ml-vision';
-export const SDK_VERSION = version;
-
-// import vision from '@react-native-firebase/ml-vision';
-// vision().X(...);
-export default createModuleNamespace({
- statics,
- version,
- namespace,
- nativeModuleName,
- nativeEvents: false,
- hasMultiAppSupport: true,
- hasCustomUrlOrRegionSupport: false,
- ModuleClass: FirebaseMlKitVisionModule,
-});
-
-// import vision, { firebase } from '@react-native-firebase/ml-vision';
-// vision().X(...);
-// firebase.vision().X(...);
-export const firebase = getFirebaseRoot();
-
-// e.g.
-// // import { VisionCloudTextRecognizerModelType } from '@react-native-firebase/ml-vision';
-export { default as VisionBarcodeFormat } from './VisionBarcodeFormat';
-export { default as VisionFaceContourType } from './VisionFaceContourType';
-export { default as VisionFaceLandmarkType } from './VisionFaceLandmarkType';
-export { default as VisionBarcodeValueType } from './VisionBarcodeValueType';
-export { default as VisionBarcodeEmailType } from './VisionBarcodeEmailType';
-export { default as VisionBarcodePhoneType } from './VisionBarcodePhoneType';
-export { default as VisionBarcodeAddressType } from './VisionBarcodeAddressType';
-export { default as VisionFaceDetectorContourMode } from './VisionFaceDetectorContourMode';
-export { default as VisionFaceDetectorLandmarkMode } from './VisionFaceDetectorLandmarkMode';
-export { default as VisionBarcodeWifiEncryptionType } from './VisionBarcodeWifiEncryptionType';
-export { default as VisionFaceDetectorPerformanceMode } from './VisionFaceDetectorPerformanceMode';
-export { default as VisionCloudTextRecognizerModelType } from './VisionCloudTextRecognizerModelType';
-export { default as VisionFaceDetectorClassificationMode } from './VisionFaceDetectorClassificationMode';
-export { default as VisionDocumentTextRecognizedBreakType } from './VisionDocumentTextRecognizedBreakType';
-export { default as VisionCloudLandmarkRecognizerModelType } from './VisionCloudLandmarkRecognizerModelType';
diff --git a/packages/ml-vision/lib/visionBarcodeDetectorOptions.js b/packages/ml-vision/lib/visionBarcodeDetectorOptions.js
deleted file mode 100644
index 0321f5fa59e..00000000000
--- a/packages/ml-vision/lib/visionBarcodeDetectorOptions.js
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Copyright (c) 2016-present Invertase Limited & Contributors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this library except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-import { isArray, isObject, isUndefined } from '@react-native-firebase/app/lib/common';
-import VisionBarcodeFormat from './VisionBarcodeFormat';
-
-export default function visionBarcodeDetectorOptions(barcodeDetectorOptions) {
- const out = {
- barcodeFormats: [VisionBarcodeFormat.ALL_FORMATS],
- };
-
- if (isUndefined(barcodeDetectorOptions)) {
- return out;
- }
-
- if (!isObject(barcodeDetectorOptions)) {
- throw new Error("'barcodeDetectorOptions' expected an object value.");
- }
-
- if (barcodeDetectorOptions.barcodeFormats) {
- if (!isArray(barcodeDetectorOptions.barcodeFormats)) {
- throw new Error(
- "'barcodeDetectorOptions.barcodeFormats' must be an array of VisionBarcodeFormat types.",
- );
- }
-
- const validFormats = Object.values(VisionBarcodeFormat);
-
- for (let i = 0; i < barcodeDetectorOptions.barcodeFormats.length; i++) {
- if (!validFormats.includes(barcodeDetectorOptions.barcodeFormats[i])) {
- throw new Error(
- `'barcodeDetectorOptions.barcodeFormats' type at index ${i} is invalid. Expected a VisionBarcodeFormat type.`,
- );
- }
- }
-
- out.barcodeFormats = barcodeDetectorOptions.barcodeFormats;
- }
-
- return out;
-}
diff --git a/packages/ml-vision/lib/visionFaceDetectorOptions.js b/packages/ml-vision/lib/visionFaceDetectorOptions.js
deleted file mode 100644
index 2b5e9ebe8ac..00000000000
--- a/packages/ml-vision/lib/visionFaceDetectorOptions.js
+++ /dev/null
@@ -1,113 +0,0 @@
-/*
- * Copyright (c) 2016-present Invertase Limited & Contributors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this library except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-import {
- hasOwnProperty,
- isNumber,
- isObject,
- isUndefined,
-} from '@react-native-firebase/app/lib/common';
-import VisionFaceDetectorClassificationMode from './VisionFaceDetectorClassificationMode';
-import VisionFaceDetectorContourMode from './VisionFaceDetectorContourMode';
-import VisionFaceDetectorLandmarkMode from './VisionFaceDetectorLandmarkMode';
-import VisionFaceDetectorPerformanceMode from './VisionFaceDetectorPerformanceMode';
-
-export default function visionFaceDetectorOptions(faceDetectorOptions) {
- const out = {
- classificationMode: VisionFaceDetectorClassificationMode.NO_CLASSIFICATIONS,
- contourMode: VisionFaceDetectorContourMode.NO_CONTOURS,
- landmarkMode: VisionFaceDetectorLandmarkMode.NO_LANDMARKS,
- minFaceSize: 0.1,
- performanceMode: VisionFaceDetectorPerformanceMode.FAST,
- };
-
- if (isUndefined(faceDetectorOptions)) {
- return out;
- }
-
- if (!isObject(faceDetectorOptions)) {
- throw new Error("'faceDetectorOptions' expected an object value.");
- }
-
- if (faceDetectorOptions.classificationMode) {
- if (
- faceDetectorOptions.classificationMode !==
- VisionFaceDetectorClassificationMode.NO_CLASSIFICATIONS &&
- faceDetectorOptions.classificationMode !==
- VisionFaceDetectorClassificationMode.ALL_CLASSIFICATIONS
- ) {
- throw new Error(
- "'faceDetectorOptions.classificationMode' invalid classification mode. Expected VisionFaceDetectorClassificationMode.NO_CLASSIFICATIONS or VisionFaceDetectorClassificationMode.ALL_CLASSIFICATIONS.",
- );
- }
-
- out.classificationMode = faceDetectorOptions.classificationMode;
- }
-
- if (faceDetectorOptions.contourMode) {
- if (
- faceDetectorOptions.contourMode !== VisionFaceDetectorContourMode.NO_CONTOURS &&
- faceDetectorOptions.contourMode !== VisionFaceDetectorContourMode.ALL_CONTOURS
- ) {
- throw new Error(
- "'faceDetectorOptions.contourMode' invalid contour mode. Expected VisionFaceDetectorContourMode.NO_CONTOURS or VisionFaceDetectorContourMode.ALL_CONTOURS.",
- );
- }
-
- out.contourMode = faceDetectorOptions.contourMode;
- }
-
- if (faceDetectorOptions.landmarkMode) {
- if (
- faceDetectorOptions.landmarkMode !== VisionFaceDetectorLandmarkMode.NO_LANDMARKS &&
- faceDetectorOptions.landmarkMode !== VisionFaceDetectorLandmarkMode.ALL_LANDMARKS
- ) {
- throw new Error(
- "'faceDetectorOptions.landmarkMode' invalid landmark mode. Expected VisionFaceDetectorLandmarkMode.NO_LANDMARKS or VisionFaceDetectorLandmarkMode.ALL_LANDMARKS.",
- );
- }
-
- out.landmarkMode = faceDetectorOptions.landmarkMode;
- }
-
- if (hasOwnProperty(faceDetectorOptions, 'minFaceSize')) {
- if (!isNumber(faceDetectorOptions.minFaceSize)) {
- throw new Error("'faceDetectorOptions.minFaceSize' expected a number value between 0 & 1.");
- }
-
- if (faceDetectorOptions.minFaceSize < 0 || faceDetectorOptions.minFaceSize > 1) {
- throw new Error("'faceDetectorOptions.minFaceSize' expected value to be between 0 & 1.");
- }
-
- out.minFaceSize = faceDetectorOptions.minFaceSize;
- }
-
- if (faceDetectorOptions.performanceMode) {
- if (
- faceDetectorOptions.performanceMode !== VisionFaceDetectorPerformanceMode.FAST &&
- faceDetectorOptions.performanceMode !== VisionFaceDetectorPerformanceMode.ACCURATE
- ) {
- throw new Error(
- "'faceDetectorOptions.performanceMode' invalid performance mode. Expected VisionFaceDetectorPerformanceMode.FAST or VisionFaceDetectorPerformanceMode.ACCURATE.",
- );
- }
-
- out.performanceMode = faceDetectorOptions.performanceMode;
- }
-
- return out;
-}
diff --git a/packages/ml-vision/lib/visionImageLabelerOptions.js b/packages/ml-vision/lib/visionImageLabelerOptions.js
deleted file mode 100644
index 27f5104eed9..00000000000
--- a/packages/ml-vision/lib/visionImageLabelerOptions.js
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Copyright (c) 2016-present Invertase Limited & Contributors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this library except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-import {
- hasOwnProperty,
- isNumber,
- isObject,
- isUndefined,
-} from '@react-native-firebase/app/lib/common';
-
-export default function visionImageLabelerOptions(imageLabelerOptions) {
- const out = {
- confidenceThreshold: 0.5,
- };
-
- if (isUndefined(imageLabelerOptions)) {
- return out;
- }
-
- if (!isObject(imageLabelerOptions)) {
- throw new Error("'imageLabelerOptions' expected an object value.");
- }
-
- if (hasOwnProperty(imageLabelerOptions, 'confidenceThreshold')) {
- if (!isNumber(imageLabelerOptions.confidenceThreshold)) {
- throw new Error(
- "'imageLabelerOptions.confidenceThreshold' expected a number value between 0 & 1.",
- );
- }
-
- if (
- imageLabelerOptions.confidenceThreshold < 0 ||
- imageLabelerOptions.confidenceThreshold > 1
- ) {
- throw new Error(
- "'imageLabelerOptions.confidenceThreshold' expected a number value between 0 & 1.",
- );
- }
-
- out.confidenceThreshold = imageLabelerOptions.confidenceThreshold;
- }
-
- return out;
-}
diff --git a/packages/ml-vision/type-test.ts b/packages/ml-vision/type-test.ts
deleted file mode 100644
index c1c480303a6..00000000000
--- a/packages/ml-vision/type-test.ts
+++ /dev/null
@@ -1,64 +0,0 @@
-import firebase from '@react-native-firebase/app';
-import * as vision from '@react-native-firebase/ml-vision';
-
-console.log(vision.default().app);
-
-// checks module exists at root
-console.log(firebase.vision().app.name);
-
-// checks module exists at app level
-console.log(firebase.app().vision().app.name);
-
-// checks statics exist
-console.log(firebase.vision.SDK_VERSION);
-
-// checks statics exist on defaultExport
-console.log(firebase.SDK_VERSION);
-
-// checks root exists
-console.log(firebase.SDK_VERSION);
-
-// checks firebase named export exists on module
-console.log(vision.firebase.SDK_VERSION);
-
-// checks multi-app support exists
-console.log(firebase.vision(firebase.app()).app.name);
-
-// checks default export supports app arg
-console.log(firebase.vision(firebase.app('foo')).app.name);
-
-console.log(firebase.vision.VisionBarcodeFormat.ALL_FORMATS);
-console.log(vision.VisionBarcodeFormat);
-
-console.log(firebase.vision.VisionFaceContourType.ALL_POINTS);
-console.log(vision.VisionFaceContourType.ALL_POINTS);
-
-console.log(firebase.vision.VisionFaceLandmarkType.LEFT_CHEEK);
-console.log(vision.VisionFaceLandmarkType.LEFT_EAR);
-
-console.log(firebase.vision.VisionBarcodeValueType.CALENDAR_EVENT);
-// console.log(vision.VisionBarcodeValueType.);
-
-console.log(firebase.vision.VisionFaceDetectorContourMode.ALL_CONTOURS);
-console.log(vision.VisionFaceDetectorContourMode.ALL_CONTOURS);
-
-console.log(firebase.vision.VisionFaceDetectorLandmarkMode.ALL_LANDMARKS);
-console.log(vision.VisionFaceDetectorLandmarkMode.ALL_LANDMARKS);
-
-console.log(firebase.vision.VisionBarcodeWifiEncryptionType.WEP);
-// console.log(vision.VisionBarcodeWifiEncryptionType.WEP);
-
-console.log(firebase.vision.VisionFaceDetectorPerformanceMode.ACCURATE);
-console.log(vision.VisionFaceDetectorPerformanceMode.FAST);
-
-console.log(firebase.vision.VisionCloudTextRecognizerModelType.DENSE_MODEL);
-console.log(vision.VisionCloudTextRecognizerModelType.SPARSE_MODEL);
-
-console.log(firebase.vision.VisionFaceDetectorClassificationMode.ALL_CLASSIFICATIONS);
-console.log(vision.VisionFaceDetectorClassificationMode.ALL_CLASSIFICATIONS);
-
-console.log(firebase.vision.VisionDocumentTextRecognizedBreakType.EOL_SURE_SPACE);
-console.log(vision.VisionDocumentTextRecognizedBreakType.HYPHEN);
-
-console.log(firebase.vision.VisionCloudLandmarkRecognizerModelType.LATEST_MODEL);
-console.log(vision.VisionCloudLandmarkRecognizerModelType.STABLE_MODEL);
diff --git a/packages/ml-natural-language/.npmignore b/packages/ml/.npmignore
similarity index 100%
rename from packages/ml-natural-language/.npmignore
rename to packages/ml/.npmignore
diff --git a/packages/ml-vision/CHANGELOG.md b/packages/ml/CHANGELOG.md
similarity index 100%
rename from packages/ml-vision/CHANGELOG.md
rename to packages/ml/CHANGELOG.md
diff --git a/packages/ml-natural-language/LICENSE b/packages/ml/LICENSE
similarity index 100%
rename from packages/ml-natural-language/LICENSE
rename to packages/ml/LICENSE
diff --git a/packages/ml/README.md b/packages/ml/README.md
new file mode 100644
index 00000000000..a33e17c7c56
--- /dev/null
+++ b/packages/ml/README.md
@@ -0,0 +1,102 @@
+
+
+
+
+
React Native Firebase - ML
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+---
+
+Firebase Machine Learning is a mobile SDK that brings Google's machine learning expertise to Android and iOS apps in a powerful yet easy-to-use package. Whether you're new or experienced in machine learning, you can implement the functionality you need in just a few lines of code. There's no need to have deep knowledge of neural networks or model optimization to get started. On the other hand, if you are an experienced ML developer, Firebase ML provides convenient APIs that help you use your custom TensorFlow Lite models in your mobile apps.
+
+## Cloud vs. on-device
+
+Firebase ML has APIs that work either in the in the cloud or on the device. When we describe an ML API as being a cloud API or on-device API, we are describing which machine performs inference: that is, which machine uses the ML model to discover insights about the data you provide it. In Firebase ML, this happens either on Google Cloud, or on your users' mobile devices.
+
+The text recognition, image labeling, and landmark recognition APIs perform inference in the cloud. These models have more computational power and memory available to them than a comparable on-device model, and as a result, can perform inference with greater accuracy and precision than an on-device model. On the other hand, every request to these APIs requires a network round-trip, which makes them unsuitable for real-time and low-latency applications such as video processing.
+
+The custom model APIs and AutoML Vision Edge deal with ML models that run on the device. The models used and produced by these features are TensorFlow Lite models, which are optimized to run on mobile devices. The biggest advantage to these models is that they don't require a network connection and can run very quickly—fast enough, for example, to process frames of video in real time.
+
+Firebase ML provides two key capabilities around on-device custom models:
+
+- Custom model deployment: Deploy custom models to your users' devices by uploading them to our servers. Your Firebase-enabled app will download the model to the device on demand. This allows you to keep your app's initial install size small, and you can swap the ML model without having to republish your app.
+
+- AutoML Vision Edge: This service helps you create your own on-device custom image classification models with an easy-to-use web interface. Then, you can seamlessly host the models you create with the service mentioned above.
+
+## ML Kit: Ready-to-use on-device models
+
+> On June 3, 2020, Google started offering ML Kit's on-device APIs through a [new standalone SDK](https://developers.google.com/ml-kit). Cloud APIs, AutoML Vision Edge, and custom model deployment will continue to be available through Firebase Machine Learning.
+
+If you're looking for pre-trained models that run on the device, check out [the new standalone ML Kit](https://developers.google.com/ml-kit). Use the new [react-native-mlkit modules](https://www.npmjs.com/org/react-native-mlkit)) for most on-device use cases:
+
+- Text recognition
+- Image labeling
+- Object detection and tracking
+- Face detection and contour tracing
+- Barcode scanning
+- Language identification
+- Translation
+- Smart Reply
+
+---
+
+This react-native-firebase module currently supports the following Firebase ML APIs:
+
+| API | Supported |
+| -------------------------------------------------------------------------------- | --------- |
+| [Text Recognition](https://firebase.google.com/docs/ml/recognize-text) | ✅ |
+| [Document Text Recognition](https://firebase.google.com/docs/ml/recognize-text)) | ✅ |
+| [Image Labeling](https://firebase.google.com/docs/ml/label-images) | ✅ |
+| [Landmark Recognition](https://firebase.google.com/docs/ml/recognize-landmarks) | ✅ |
+| [AutoML Vision Edge](https://firebase.google.com/docs/ml/automl-image-labeling) | ❌ |
+| [Custom Models](https://firebase.google.com/docs/ml/use-custom-models) | ❌ |
+
+[> Learn More](https://firebase.google.com/docs/ml)
+
+## Installation
+
+Requires `@react-native-firebase/app` to be installed.
+
+```bash
+yarn add @react-native-firebase/ml
+```
+
+## Documentation
+
+- [Quick Start](https://rnfirebase.io/ml/usage)
+- [Reference](https://rnfirebase.io/reference/ml)
+
+### Additional Topics
+
+- [Text Recognition](https://rnfirebase.io/ml/text-recognition)
+- [Landmark Recognition](https://rnfirebase.io/ml/landmark-recognition)
+- [Image Labeling](https://rnfirebase.io/ml/image-labeling)
+
+## License
+
+- See [LICENSE](/LICENSE)
+
+---
+
+
+
+
+ Built and maintained with 💛 by Invertase .
+
+
+
+---
diff --git a/packages/ml-vision/RNFBMLVision.podspec b/packages/ml/RNFBML.podspec
similarity index 73%
rename from packages/ml-vision/RNFBMLVision.podspec
rename to packages/ml/RNFBML.podspec
index 860c5f391eb..693cb7715d1 100644
--- a/packages/ml-vision/RNFBMLVision.podspec
+++ b/packages/ml/RNFBML.podspec
@@ -11,7 +11,7 @@ if coreVersionDetected != coreVersionRequired
end
Pod::Spec.new do |s|
- s.name = "RNFBMLVision"
+ s.name = "RNFBML"
s.version = package["version"]
s.description = package["description"]
s.summary = <<-DESC
@@ -36,18 +36,6 @@ Pod::Spec.new do |s|
# Firebase dependencies
s.dependency 'Firebase/MLVision', firebase_sdk_version
- if FirebaseJSON::Config.get_value_or_default('ml_vision_face_model', false)
- s.dependency 'Firebase/MLVisionFaceModel', firebase_sdk_version
- end
- if FirebaseJSON::Config.get_value_or_default('ml_vision_ocr_model', false)
- s.dependency 'Firebase/MLVisionTextModel', firebase_sdk_version
- end
- if FirebaseJSON::Config.get_value_or_default('ml_vision_barcode_model', false)
- s.dependency 'Firebase/MLVisionBarcodeModel', firebase_sdk_version
- end
- if FirebaseJSON::Config.get_value_or_default('ml_vision_image_label_model', false)
- s.dependency 'Firebase/MLVisionLabelModel', firebase_sdk_version
- end
if defined?($RNFirebaseAsStaticFramework)
Pod::UI.puts "#{s.name}: Using overridden static_framework value of '#{$RNFirebaseAsStaticFramework}'"
diff --git a/packages/ml-natural-language/android/.editorconfig b/packages/ml/android/.editorconfig
similarity index 100%
rename from packages/ml-natural-language/android/.editorconfig
rename to packages/ml/android/.editorconfig
diff --git a/packages/ml-vision/android/build.gradle b/packages/ml/android/build.gradle
similarity index 91%
rename from packages/ml-vision/android/build.gradle
rename to packages/ml/android/build.gradle
index 99ae046b8ea..2db025b20e2 100644
--- a/packages/ml-vision/android/build.gradle
+++ b/packages/ml/android/build.gradle
@@ -11,7 +11,7 @@ buildscript {
}
dependencies {
- classpath("com.android.tools.build:gradle:4.0.1")
+ classpath("com.android.tools.build:gradle:4.1.0")
}
}
}
@@ -92,11 +92,8 @@ dependencies {
implementation platform("com.google.firebase:firebase-bom:${ReactNative.ext.getVersion("firebase", "bom")}")
implementation "com.google.firebase:firebase-ml-vision"
- // This is necessary to fix known dependency issues in the SDK
- // https://firebase.google.com/support/release-notes/android#bom_v25-8-0
implementation 'com.google.android.gms:play-services-vision:20.1.1'
implementation 'com.google.android.gms:play-services-vision-common:19.1.1'
- implementation 'com.google.firebase:firebase-ml-vision-image-label-model:20.0.2'
implementation 'com.google.android.gms:play-services-vision-face-contour-internal:16.0.3'
implementation 'com.google.android.gms:play-services-vision-image-labeling-internal:16.0.5'
implementation 'com.google.android.gms:play-services-vision-image-label:18.0.5'
@@ -104,8 +101,6 @@ dependencies {
implementation 'com.google.firebase:firebase-ml-model-interpreter:22.0.4'
}
-apply from: file("./ml-models.gradle")
-
ReactNative.shared.applyPackageVersion()
ReactNative.shared.applyDefaultExcludes()
ReactNative.module.applyAndroidVersions()
diff --git a/packages/ml-natural-language/android/lint.xml b/packages/ml/android/lint.xml
similarity index 100%
rename from packages/ml-natural-language/android/lint.xml
rename to packages/ml/android/lint.xml
diff --git a/packages/ml/android/settings.gradle b/packages/ml/android/settings.gradle
new file mode 100644
index 00000000000..21d0e6de756
--- /dev/null
+++ b/packages/ml/android/settings.gradle
@@ -0,0 +1 @@
+rootProject.name = '@react-native-firebase_ml'
diff --git a/packages/ml/android/src/main/AndroidManifest.xml b/packages/ml/android/src/main/AndroidManifest.xml
new file mode 100644
index 00000000000..b7e0bbc3790
--- /dev/null
+++ b/packages/ml/android/src/main/AndroidManifest.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
diff --git a/packages/ml-vision/android/src/main/java/io/invertase/firebase/ml/vision/UniversalFirebaseMLVisionCommon.java b/packages/ml/android/src/main/java/io/invertase/firebase/ml/UniversalFirebaseMLCommon.java
similarity index 98%
rename from packages/ml-vision/android/src/main/java/io/invertase/firebase/ml/vision/UniversalFirebaseMLVisionCommon.java
rename to packages/ml/android/src/main/java/io/invertase/firebase/ml/UniversalFirebaseMLCommon.java
index f3be9b1690a..dbc2475dbe8 100644
--- a/packages/ml-vision/android/src/main/java/io/invertase/firebase/ml/vision/UniversalFirebaseMLVisionCommon.java
+++ b/packages/ml/android/src/main/java/io/invertase/firebase/ml/UniversalFirebaseMLCommon.java
@@ -1,10 +1,10 @@
-package io.invertase.firebase.ml.vision;
+package io.invertase.firebase.ml;
import com.google.firebase.ml.common.FirebaseMLException;
import javax.annotation.Nullable;
-class UniversalFirebaseMLVisionCommon {
+class UniversalFirebaseMLCommon {
static final String KEY_BOUNDING_BOX = "boundingBox";
static final String KEY_TEXT = "text";
static final String KEY_CONFIDENCE = "confidence";
diff --git a/packages/ml-vision/android/src/main/java/io/invertase/firebase/ml/vision/UniversalFirebaseMLVisionDocumentTextRecognizerModule.java b/packages/ml/android/src/main/java/io/invertase/firebase/ml/UniversalFirebaseMLDocumentTextRecognizerModule.java
similarity index 96%
rename from packages/ml-vision/android/src/main/java/io/invertase/firebase/ml/vision/UniversalFirebaseMLVisionDocumentTextRecognizerModule.java
rename to packages/ml/android/src/main/java/io/invertase/firebase/ml/UniversalFirebaseMLDocumentTextRecognizerModule.java
index 4139664db78..2ff78b56b55 100644
--- a/packages/ml-vision/android/src/main/java/io/invertase/firebase/ml/vision/UniversalFirebaseMLVisionDocumentTextRecognizerModule.java
+++ b/packages/ml/android/src/main/java/io/invertase/firebase/ml/UniversalFirebaseMLDocumentTextRecognizerModule.java
@@ -1,4 +1,4 @@
-package io.invertase.firebase.ml.vision;
+package io.invertase.firebase.ml;
/*
* Copyright (c) 2016-present Invertase Limited & Contributors
@@ -35,10 +35,10 @@
import javax.annotation.Nullable;
import java.util.*;
-import static io.invertase.firebase.ml.vision.UniversalFirebaseMLVisionCommon.*;
+import static io.invertase.firebase.ml.UniversalFirebaseMLCommon.*;
-class UniversalFirebaseMLVisionDocumentTextRecognizerModule extends UniversalFirebaseModule {
- UniversalFirebaseMLVisionDocumentTextRecognizerModule(Context context, String serviceName) {
+class UniversalFirebaseMLDocumentTextRecognizerModule extends UniversalFirebaseModule {
+ UniversalFirebaseMLDocumentTextRecognizerModule(Context context, String serviceName) {
super(context, serviceName);
}
diff --git a/packages/ml-vision/android/src/main/java/io/invertase/firebase/ml/vision/UniversalFirebaseMLVisionImageLabelerModule.java b/packages/ml/android/src/main/java/io/invertase/firebase/ml/UniversalFirebaseMLImageLabelerModule.java
similarity index 79%
rename from packages/ml-vision/android/src/main/java/io/invertase/firebase/ml/vision/UniversalFirebaseMLVisionImageLabelerModule.java
rename to packages/ml/android/src/main/java/io/invertase/firebase/ml/UniversalFirebaseMLImageLabelerModule.java
index 786ba9cc6c9..7d41db3a4f7 100644
--- a/packages/ml-vision/android/src/main/java/io/invertase/firebase/ml/vision/UniversalFirebaseMLVisionImageLabelerModule.java
+++ b/packages/ml/android/src/main/java/io/invertase/firebase/ml/UniversalFirebaseMLImageLabelerModule.java
@@ -1,4 +1,4 @@
-package io.invertase.firebase.ml.vision;
+package io.invertase.firebase.ml;
/*
* Copyright (c) 2016-present Invertase Limited & Contributors
@@ -37,30 +37,13 @@
import java.util.List;
import java.util.Map;
-import static io.invertase.firebase.ml.vision.UniversalFirebaseMLVisionCommon.*;
+import static io.invertase.firebase.ml.UniversalFirebaseMLCommon.*;
-class UniversalFirebaseMLVisionImageLabelerModule extends UniversalFirebaseModule {
- UniversalFirebaseMLVisionImageLabelerModule(Context context, String serviceName) {
+class UniversalFirebaseMLImageLabelerModule extends UniversalFirebaseModule {
+ UniversalFirebaseMLImageLabelerModule(Context context, String serviceName) {
super(context, serviceName);
}
- Task>> imageLabelerProcessImage(String appName, String stringUri, Bundle imageLabelerOptions) {
- return Tasks.call(getExecutor(), () -> {
- FirebaseApp firebaseApp = FirebaseApp.getInstance(appName);
- FirebaseVisionOnDeviceImageLabelerOptions options = getOnDeviceImageLabelerOptions(imageLabelerOptions);
- FirebaseVisionImageLabeler visionImageLabeler = FirebaseVision.getInstance(firebaseApp)
- .getOnDeviceImageLabeler(options);
- FirebaseVisionImage image = FirebaseVisionImage.fromFilePath(
- getContext(),
- SharedUtils.getUri(stringUri)
- );
-
- return processLabelerList(
- Tasks.await(visionImageLabeler.processImage(image))
- );
- });
- }
-
Task>> cloudImageLabelerProcessImage(String appName, String stringUri, Bundle cloudImageLabelerOptions) {
return Tasks.call(getExecutor(), () -> {
FirebaseApp firebaseApp = FirebaseApp.getInstance(appName);
diff --git a/packages/ml-vision/android/src/main/java/io/invertase/firebase/ml/vision/UniversalFirebaseMLVisionLandmarkRecognizerModule.java b/packages/ml/android/src/main/java/io/invertase/firebase/ml/UniversalFirebaseMLLandmarkRecognizerModule.java
similarity index 94%
rename from packages/ml-vision/android/src/main/java/io/invertase/firebase/ml/vision/UniversalFirebaseMLVisionLandmarkRecognizerModule.java
rename to packages/ml/android/src/main/java/io/invertase/firebase/ml/UniversalFirebaseMLLandmarkRecognizerModule.java
index 6ec562271b8..20fc33626b8 100644
--- a/packages/ml-vision/android/src/main/java/io/invertase/firebase/ml/vision/UniversalFirebaseMLVisionLandmarkRecognizerModule.java
+++ b/packages/ml/android/src/main/java/io/invertase/firebase/ml/UniversalFirebaseMLLandmarkRecognizerModule.java
@@ -1,4 +1,4 @@
-package io.invertase.firebase.ml.vision;
+package io.invertase.firebase.ml;
/*
* Copyright (c) 2016-present Invertase Limited & Contributors
@@ -37,10 +37,10 @@
import java.util.List;
import java.util.Map;
-import static io.invertase.firebase.ml.vision.UniversalFirebaseMLVisionCommon.*;
+import static io.invertase.firebase.ml.UniversalFirebaseMLCommon.*;
-class UniversalFirebaseMLVisionLandmarkRecognizerModule extends UniversalFirebaseModule {
- UniversalFirebaseMLVisionLandmarkRecognizerModule(Context context, String serviceName) {
+class UniversalFirebaseMLLandmarkRecognizerModule extends UniversalFirebaseModule {
+ UniversalFirebaseMLLandmarkRecognizerModule(Context context, String serviceName) {
super(context, serviceName);
}
diff --git a/packages/ml-vision/android/src/main/java/io/invertase/firebase/ml/vision/UniversalFirebaseMLVisionTextRecognizerModule.java b/packages/ml/android/src/main/java/io/invertase/firebase/ml/UniversalFirebaseMLTextRecognizerModule.java
similarity index 89%
rename from packages/ml-vision/android/src/main/java/io/invertase/firebase/ml/vision/UniversalFirebaseMLVisionTextRecognizerModule.java
rename to packages/ml/android/src/main/java/io/invertase/firebase/ml/UniversalFirebaseMLTextRecognizerModule.java
index de16c5f5508..8a84efe8f41 100644
--- a/packages/ml-vision/android/src/main/java/io/invertase/firebase/ml/vision/UniversalFirebaseMLVisionTextRecognizerModule.java
+++ b/packages/ml/android/src/main/java/io/invertase/firebase/ml/UniversalFirebaseMLTextRecognizerModule.java
@@ -1,4 +1,4 @@
-package io.invertase.firebase.ml.vision;
+package io.invertase.firebase.ml;
/*
* Copyright (c) 2016-present Invertase Limited & Contributors
@@ -34,32 +34,13 @@
import java.util.*;
-import static io.invertase.firebase.ml.vision.UniversalFirebaseMLVisionCommon.*;
+import static io.invertase.firebase.ml.UniversalFirebaseMLCommon.*;
-class UniversalFirebaseMLVisionTextRecognizerModule extends UniversalFirebaseModule {
- UniversalFirebaseMLVisionTextRecognizerModule(Context context, String serviceName) {
+class UniversalFirebaseMLTextRecognizerModule extends UniversalFirebaseModule {
+ UniversalFirebaseMLTextRecognizerModule(Context context, String serviceName) {
super(context, serviceName);
}
- Task> textRecognizerProcessImage(
- String appName,
- String stringUri
- ) {
- return Tasks.call(getExecutor(), () -> {
- FirebaseApp firebaseApp = FirebaseApp.getInstance(appName);
- FirebaseVisionTextRecognizer detector = FirebaseVision.getInstance(firebaseApp)
- .getOnDeviceTextRecognizer();
-
- FirebaseVisionImage image = FirebaseVisionImage.fromFilePath(
- getContext(),
- SharedUtils.getUri(stringUri)
- );
-
- FirebaseVisionText result = Tasks.await(detector.processImage(image));
- return getFirebaseVisionTextMap(result);
- });
- }
-
Task> cloudTextRecognizerProcessImage(
String appName,
String stringUri,
diff --git a/packages/ml/android/src/reactnative/AndroidManifest.xml b/packages/ml/android/src/reactnative/AndroidManifest.xml
new file mode 100644
index 00000000000..d55b471c2c7
--- /dev/null
+++ b/packages/ml/android/src/reactnative/AndroidManifest.xml
@@ -0,0 +1,2 @@
+
+
diff --git a/packages/ml-vision/android/src/reactnative/java/io/invertase/firebase/ml/vision/RNFirebaseMLVisionDocumentTextRecognizerModule.java b/packages/ml/android/src/reactnative/java/io/invertase/firebase/ml/RNFirebaseMLDocumentTextRecognizerModule.java
similarity index 71%
rename from packages/ml-vision/android/src/reactnative/java/io/invertase/firebase/ml/vision/RNFirebaseMLVisionDocumentTextRecognizerModule.java
rename to packages/ml/android/src/reactnative/java/io/invertase/firebase/ml/RNFirebaseMLDocumentTextRecognizerModule.java
index b69ee5adbb2..2bbfb583d07 100644
--- a/packages/ml-vision/android/src/reactnative/java/io/invertase/firebase/ml/vision/RNFirebaseMLVisionDocumentTextRecognizerModule.java
+++ b/packages/ml/android/src/reactnative/java/io/invertase/firebase/ml/RNFirebaseMLDocumentTextRecognizerModule.java
@@ -1,4 +1,4 @@
-package io.invertase.firebase.ml.vision;
+package io.invertase.firebase.ml;
/*
* Copyright (c) 2016-present Invertase Limited & Contributors
@@ -20,13 +20,13 @@
import com.facebook.react.bridge.*;
import io.invertase.firebase.common.ReactNativeFirebaseModule;
-public class RNFirebaseMLVisionDocumentTextRecognizerModule extends ReactNativeFirebaseModule {
- private static final String SERVICE_NAME = "MLVisionDocumentTextRecognizer";
- private final UniversalFirebaseMLVisionDocumentTextRecognizerModule module;
+public class RNFirebaseMLDocumentTextRecognizerModule extends ReactNativeFirebaseModule {
+ private static final String SERVICE_NAME = "MLDocumentTextRecognizer";
+ private final UniversalFirebaseMLDocumentTextRecognizerModule module;
- RNFirebaseMLVisionDocumentTextRecognizerModule(ReactApplicationContext reactContext) {
+ RNFirebaseMLDocumentTextRecognizerModule(ReactApplicationContext reactContext) {
super(reactContext, SERVICE_NAME);
- this.module = new UniversalFirebaseMLVisionDocumentTextRecognizerModule(reactContext, SERVICE_NAME);
+ this.module = new UniversalFirebaseMLDocumentTextRecognizerModule(reactContext, SERVICE_NAME);
}
@ReactMethod
@@ -41,7 +41,7 @@ public void cloudDocumentTextRecognizerProcessImage(
if (task.isSuccessful()) {
promise.resolve(Arguments.makeNativeMap(task.getResult()));
} else {
- String[] errorCodeAndMessage = UniversalFirebaseMLVisionCommon.getErrorCodeAndMessageFromException(
+ String[] errorCodeAndMessage = UniversalFirebaseMLCommon.getErrorCodeAndMessageFromException(
task.getException());
rejectPromiseWithCodeAndMessage(
promise,
diff --git a/packages/ml-vision/android/src/reactnative/java/io/invertase/firebase/ml/vision/RNFirebaseMLVisionBarcodeDetectorModule.java b/packages/ml/android/src/reactnative/java/io/invertase/firebase/ml/RNFirebaseMLImageLabelerModule.java
similarity index 54%
rename from packages/ml-vision/android/src/reactnative/java/io/invertase/firebase/ml/vision/RNFirebaseMLVisionBarcodeDetectorModule.java
rename to packages/ml/android/src/reactnative/java/io/invertase/firebase/ml/RNFirebaseMLImageLabelerModule.java
index cacba5626a2..09733e56e72 100644
--- a/packages/ml-vision/android/src/reactnative/java/io/invertase/firebase/ml/vision/RNFirebaseMLVisionBarcodeDetectorModule.java
+++ b/packages/ml/android/src/reactnative/java/io/invertase/firebase/ml/RNFirebaseMLImageLabelerModule.java
@@ -1,4 +1,4 @@
-package io.invertase.firebase.ml.vision;
+package io.invertase.firebase.ml;
/*
* Copyright (c) 2016-present Invertase Limited & Contributors
@@ -20,23 +20,25 @@
import com.facebook.react.bridge.*;
import io.invertase.firebase.common.ReactNativeFirebaseModule;
-public class RNFirebaseMLVisionBarcodeDetectorModule extends ReactNativeFirebaseModule {
- private static final String SERVICE_NAME = "MLVisionBarcodeDetector";
- private final UniversalFirebaseMLVisionBarcodeDetectorModule module;
+public class RNFirebaseMLImageLabelerModule extends ReactNativeFirebaseModule {
+ private static final String SERVICE_NAME = "MLImageLabeler";
+ private final UniversalFirebaseMLImageLabelerModule module;
- RNFirebaseMLVisionBarcodeDetectorModule(ReactApplicationContext reactContext) {
+ RNFirebaseMLImageLabelerModule(ReactApplicationContext reactContext) {
super(reactContext, SERVICE_NAME);
- this.module = new UniversalFirebaseMLVisionBarcodeDetectorModule(reactContext, SERVICE_NAME);
+ this.module = new UniversalFirebaseMLImageLabelerModule(reactContext, SERVICE_NAME);
}
@ReactMethod
- public void barcodeDetectorProcessImage(String appName, String stringUri, ReadableMap barcodeDetectorOptions, Promise promise) {
- module.barcodeDetectorProcessImage(appName, stringUri, Arguments.toBundle(barcodeDetectorOptions))
- .addOnCompleteListener(getExecutor(), task -> {
+ public void cloudImageLabelerProcessImage(String appName, String stringUri, ReadableMap cloudImageLabelerOptions, Promise promise) {
+ this.module.cloudImageLabelerProcessImage(appName, stringUri, Arguments.toBundle(cloudImageLabelerOptions))
+ .addOnCompleteListener(task -> {
if (task.isSuccessful()) {
- promise.resolve(Arguments.makeNativeArray(task.getResult()));
+ promise.resolve(
+ Arguments.makeNativeArray(task.getResult())
+ );
} else {
- String[] errorCodeAndMessage = UniversalFirebaseMLVisionCommon.getErrorCodeAndMessageFromException(
+ String[] errorCodeAndMessage = UniversalFirebaseMLCommon.getErrorCodeAndMessageFromException(
task.getException());
rejectPromiseWithCodeAndMessage(
promise,
@@ -47,5 +49,4 @@ public void barcodeDetectorProcessImage(String appName, String stringUri, Readab
}
});
}
-
}
diff --git a/packages/ml-vision/android/src/reactnative/java/io/invertase/firebase/ml/vision/RNFirebaseMLVisionLandmarkRecognizerModule.java b/packages/ml/android/src/reactnative/java/io/invertase/firebase/ml/RNFirebaseMLLandmarkRecognizerModule.java
similarity index 72%
rename from packages/ml-vision/android/src/reactnative/java/io/invertase/firebase/ml/vision/RNFirebaseMLVisionLandmarkRecognizerModule.java
rename to packages/ml/android/src/reactnative/java/io/invertase/firebase/ml/RNFirebaseMLLandmarkRecognizerModule.java
index 71e731c7db5..37730a87883 100644
--- a/packages/ml-vision/android/src/reactnative/java/io/invertase/firebase/ml/vision/RNFirebaseMLVisionLandmarkRecognizerModule.java
+++ b/packages/ml/android/src/reactnative/java/io/invertase/firebase/ml/RNFirebaseMLLandmarkRecognizerModule.java
@@ -1,4 +1,4 @@
-package io.invertase.firebase.ml.vision;
+package io.invertase.firebase.ml;
/*
* Copyright (c) 2016-present Invertase Limited & Contributors
@@ -20,13 +20,13 @@
import com.facebook.react.bridge.*;
import io.invertase.firebase.common.ReactNativeFirebaseModule;
-public class RNFirebaseMLVisionLandmarkRecognizerModule extends ReactNativeFirebaseModule {
- private static final String SERVICE_NAME = "MLVisionLandmarkRecognizer";
- private UniversalFirebaseMLVisionLandmarkRecognizerModule module;
+public class RNFirebaseMLLandmarkRecognizerModule extends ReactNativeFirebaseModule {
+ private static final String SERVICE_NAME = "MLLandmarkRecognizer";
+ private UniversalFirebaseMLLandmarkRecognizerModule module;
- RNFirebaseMLVisionLandmarkRecognizerModule(ReactApplicationContext reactContext) {
+ RNFirebaseMLLandmarkRecognizerModule(ReactApplicationContext reactContext) {
super(reactContext, SERVICE_NAME);
- this.module = new UniversalFirebaseMLVisionLandmarkRecognizerModule(reactContext, SERVICE_NAME);
+ this.module = new UniversalFirebaseMLLandmarkRecognizerModule(reactContext, SERVICE_NAME);
}
@ReactMethod
@@ -38,7 +38,7 @@ public void cloudLandmarkRecognizerProcessImage(String appName, String stringUri
Arguments.makeNativeArray(task.getResult())
);
} else {
- String[] errorCodeAndMessage = UniversalFirebaseMLVisionCommon.getErrorCodeAndMessageFromException(
+ String[] errorCodeAndMessage = UniversalFirebaseMLCommon.getErrorCodeAndMessageFromException(
task.getException());
rejectPromiseWithCodeAndMessage(
promise,
diff --git a/packages/ml-vision/android/src/reactnative/java/io/invertase/firebase/ml/vision/RNFirebaseMLVisionFaceDetectorModule.java b/packages/ml/android/src/reactnative/java/io/invertase/firebase/ml/RNFirebaseMLTextRecognizerModule.java
similarity index 59%
rename from packages/ml-vision/android/src/reactnative/java/io/invertase/firebase/ml/vision/RNFirebaseMLVisionFaceDetectorModule.java
rename to packages/ml/android/src/reactnative/java/io/invertase/firebase/ml/RNFirebaseMLTextRecognizerModule.java
index e2bafafa8d1..11231a32dc3 100644
--- a/packages/ml-vision/android/src/reactnative/java/io/invertase/firebase/ml/vision/RNFirebaseMLVisionFaceDetectorModule.java
+++ b/packages/ml/android/src/reactnative/java/io/invertase/firebase/ml/RNFirebaseMLTextRecognizerModule.java
@@ -1,4 +1,4 @@
-package io.invertase.firebase.ml.vision;
+package io.invertase.firebase.ml;
/*
* Copyright (c) 2016-present Invertase Limited & Contributors
@@ -20,30 +20,28 @@
import com.facebook.react.bridge.*;
import io.invertase.firebase.common.ReactNativeFirebaseModule;
-public class RNFirebaseMLVisionFaceDetectorModule extends ReactNativeFirebaseModule {
- private static final String SERVICE_NAME = "MLVisionFaceDetector";
- private final UniversalFirebaseMLVisionFaceDetectorModule module;
+public class RNFirebaseMLTextRecognizerModule extends ReactNativeFirebaseModule {
+ private static final String SERVICE_NAME = "MLTextRecognizer";
+ private final UniversalFirebaseMLTextRecognizerModule module;
- RNFirebaseMLVisionFaceDetectorModule(ReactApplicationContext reactContext) {
+ RNFirebaseMLTextRecognizerModule(ReactApplicationContext reactContext) {
super(reactContext, SERVICE_NAME);
- this.module = new UniversalFirebaseMLVisionFaceDetectorModule(reactContext, SERVICE_NAME);
+ this.module = new UniversalFirebaseMLTextRecognizerModule(reactContext, SERVICE_NAME);
}
@ReactMethod
- public void faceDetectorProcessImage(
+ public void cloudTextRecognizerProcessImage(
String appName,
String stringUri,
- ReadableMap faceDetectorOptionsMap,
+ ReadableMap cloudTextRecognizerOptions,
Promise promise
) {
- module.faceDetectorProcessImage(appName, stringUri, Arguments.toBundle(faceDetectorOptionsMap))
+ module.cloudTextRecognizerProcessImage(appName, stringUri, Arguments.toBundle(cloudTextRecognizerOptions))
.addOnCompleteListener(getExecutor(), task -> {
if (task.isSuccessful()) {
- promise.resolve(
- Arguments.makeNativeArray(task.getResult())
- );
+ promise.resolve(Arguments.makeNativeMap(task.getResult()));
} else {
- String[] errorCodeAndMessage = UniversalFirebaseMLVisionCommon.getErrorCodeAndMessageFromException(
+ String[] errorCodeAndMessage = UniversalFirebaseMLCommon.getErrorCodeAndMessageFromException(
task.getException());
rejectPromiseWithCodeAndMessage(
promise,
@@ -54,5 +52,4 @@ public void faceDetectorProcessImage(
}
});
}
-
}
diff --git a/packages/ml-natural-language/android/src/reactnative/java/io/invertase/firebase/ml/naturallanguage/ReactNativeFirebaseMLNaturalLanguagePackage.java b/packages/ml/android/src/reactnative/java/io/invertase/firebase/ml/ReactNativeFirebaseMLPackage.java
similarity index 63%
rename from packages/ml-natural-language/android/src/reactnative/java/io/invertase/firebase/ml/naturallanguage/ReactNativeFirebaseMLNaturalLanguagePackage.java
rename to packages/ml/android/src/reactnative/java/io/invertase/firebase/ml/ReactNativeFirebaseMLPackage.java
index 4b2359d60a5..268b8618c10 100644
--- a/packages/ml-natural-language/android/src/reactnative/java/io/invertase/firebase/ml/naturallanguage/ReactNativeFirebaseMLNaturalLanguagePackage.java
+++ b/packages/ml/android/src/reactnative/java/io/invertase/firebase/ml/ReactNativeFirebaseMLPackage.java
@@ -1,4 +1,4 @@
-package io.invertase.firebase.ml.naturallanguage;
+package io.invertase.firebase.ml;
/*
* Copyright (c) 2016-present Invertase Limited & Contributors
@@ -21,40 +21,23 @@
import com.facebook.react.bridge.NativeModule;
import com.facebook.react.bridge.ReactApplicationContext;
import com.facebook.react.uimanager.ViewManager;
+import io.invertase.firebase.common.ReactNativeFirebaseJSON;
+import javax.annotation.Nonnull;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
-import javax.annotation.Nonnull;
-
-import io.invertase.firebase.common.ReactNativeFirebaseJSON;
-
@SuppressWarnings("unused")
-public class ReactNativeFirebaseMLNaturalLanguagePackage implements ReactPackage {
+public class ReactNativeFirebaseMLPackage implements ReactPackage {
@Nonnull
@Override
public List createNativeModules(@Nonnull ReactApplicationContext reactContext) {
List modules = new ArrayList<>();
-
- if (ReactNativeFirebaseJSON
- .getSharedInstance()
- .getBooleanValue("ml_natural_language_language_id_model", false)) {
- modules.add(new RNFirebaseMLNaturalLanguageIdModule(reactContext));
- }
-
- if (ReactNativeFirebaseJSON
- .getSharedInstance()
- .getBooleanValue("ml_natural_language_translate_model", false)) {
- modules.add(new RNFirebaseMLNaturalLanguageTranslateModule(reactContext));
- }
-
- if (ReactNativeFirebaseJSON
- .getSharedInstance()
- .getBooleanValue("ml_natural_language_smart_reply_model", false)) {
- modules.add(new RNFirebaseMLNaturalLanguageSmartReplyModule(reactContext));
- }
-
+ modules.add(new RNFirebaseMLTextRecognizerModule(reactContext));
+ modules.add(new RNFirebaseMLLandmarkRecognizerModule(reactContext));
+ modules.add(new RNFirebaseMLDocumentTextRecognizerModule(reactContext));
+ modules.add(new RNFirebaseMLImageLabelerModule(reactContext));
return modules;
}
diff --git a/packages/ml-vision/e2e/documentText.e2e.js b/packages/ml/e2e/documentText.e2e.js
similarity index 83%
rename from packages/ml-vision/e2e/documentText.e2e.js
rename to packages/ml/e2e/documentText.e2e.js
index 4be8f13122f..be1bc4d28d6 100644
--- a/packages/ml-vision/e2e/documentText.e2e.js
+++ b/packages/ml/e2e/documentText.e2e.js
@@ -42,7 +42,7 @@ function documentTextBaseElementValidate(documentTextBase) {
let testImageFile;
-describe('mlkit.vision.document.text', () => {
+describe('ml.document.text', () => {
before(async () => {
testImageFile = `${firebase.utils.FilePath.DOCUMENT_DIRECTORY}/text.png`;
await firebase
@@ -51,10 +51,10 @@ describe('mlkit.vision.document.text', () => {
.writeToFile(testImageFile);
});
- describe('VisionCloudDocumentTextRecognizerOptions', () => {
+ describe('MLCloudDocumentTextRecognizerOptions', () => {
it('throws if not an object', async () => {
try {
- await firebase.vision().cloudDocumentTextRecognizerProcessImage(testImageFile, 'foo');
+ await firebase.ml().cloudDocumentTextRecognizerProcessImage(testImageFile, 'foo');
return Promise.reject(new Error('Did not throw Error.'));
} catch (e) {
e.message.should.containEql(
@@ -66,7 +66,7 @@ describe('mlkit.vision.document.text', () => {
it('throws if enforceCertFingerprintMatch is not a boolean', async () => {
try {
- await firebase.vision().cloudDocumentTextRecognizerProcessImage(testImageFile, {
+ await firebase.ml().cloudDocumentTextRecognizerProcessImage(testImageFile, {
enforceCertFingerprintMatch: 'true',
});
return Promise.reject(new Error('Did not throw Error.'));
@@ -79,14 +79,14 @@ describe('mlkit.vision.document.text', () => {
});
it('sets enforceCertFingerprintMatch', async () => {
- await firebase.vision().cloudDocumentTextRecognizerProcessImage(testImageFile, {
+ await firebase.ml().cloudDocumentTextRecognizerProcessImage(testImageFile, {
enforceCertFingerprintMatch: false,
});
});
it('throws if apiKeyOverride is not a string', async () => {
try {
- await firebase.vision().cloudDocumentTextRecognizerProcessImage(testImageFile, {
+ await firebase.ml().cloudDocumentTextRecognizerProcessImage(testImageFile, {
apiKeyOverride: true,
});
return Promise.reject(new Error('Did not throw Error.'));
@@ -100,7 +100,7 @@ describe('mlkit.vision.document.text', () => {
it('throws if languageHints is not an array', async () => {
try {
- await firebase.vision().cloudDocumentTextRecognizerProcessImage(testImageFile, {
+ await firebase.ml().cloudDocumentTextRecognizerProcessImage(testImageFile, {
languageHints: 'en',
});
return Promise.reject(new Error('Did not throw Error.'));
@@ -114,7 +114,7 @@ describe('mlkit.vision.document.text', () => {
it('throws if languageHints is empty array', async () => {
try {
- await firebase.vision().cloudDocumentTextRecognizerProcessImage(testImageFile, {
+ await firebase.ml().cloudDocumentTextRecognizerProcessImage(testImageFile, {
languageHints: [],
});
return Promise.reject(new Error('Did not throw Error.'));
@@ -128,7 +128,7 @@ describe('mlkit.vision.document.text', () => {
it('throws if languageHints contains non-string', async () => {
try {
- await firebase.vision().cloudDocumentTextRecognizerProcessImage(testImageFile, {
+ await firebase.ml().cloudDocumentTextRecognizerProcessImage(testImageFile, {
languageHints: [123],
});
return Promise.reject(new Error('Did not throw Error.'));
@@ -141,7 +141,7 @@ describe('mlkit.vision.document.text', () => {
});
it('sets hinted languages', async () => {
- await firebase.vision().cloudDocumentTextRecognizerProcessImage(testImageFile, {
+ await firebase.ml().cloudDocumentTextRecognizerProcessImage(testImageFile, {
languageHints: ['fr'],
});
});
@@ -150,7 +150,7 @@ describe('mlkit.vision.document.text', () => {
describe('cloudDocumentTextRecognizerProcessImage()', () => {
it('should throw if image path is not a string', () => {
try {
- firebase.vision().cloudDocumentTextRecognizerProcessImage(123);
+ firebase.ml().cloudDocumentTextRecognizerProcessImage(123);
return Promise.reject(new Error('Did not throw an Error.'));
} catch (error) {
error.message.should.containEql("'localImageFilePath' expected a string local file path");
@@ -158,8 +158,8 @@ describe('mlkit.vision.document.text', () => {
}
});
- it('should return a VisionDocumentText representation for an image', async () => {
- const res = await firebase.vision().cloudDocumentTextRecognizerProcessImage(testImageFile);
+ it('should return a MLDocumentText representation for an image', async () => {
+ const res = await firebase.ml().cloudDocumentTextRecognizerProcessImage(testImageFile);
res.text.should.be.a.String();
res.blocks.should.be.an.Array();
diff --git a/packages/ml-vision/e2e/label.e2e.js b/packages/ml/e2e/label.e2e.js
similarity index 51%
rename from packages/ml-vision/e2e/label.e2e.js
rename to packages/ml/e2e/label.e2e.js
index d305c41f1ff..7b7c39ccb91 100644
--- a/packages/ml-vision/e2e/label.e2e.js
+++ b/packages/ml/e2e/label.e2e.js
@@ -17,7 +17,7 @@
let testImageFile;
-describe('mlkit.vision.label', () => {
+describe('ml.label', () => {
before(async () => {
testImageFile = `${firebase.utils.FilePath.DOCUMENT_DIRECTORY}/crab.jpg`;
await firebase
@@ -26,35 +26,10 @@ describe('mlkit.vision.label', () => {
.writeToFile(testImageFile);
});
- describe('imageLabelerProcessImage()', () => {
- it('should throw if image path is not a string', () => {
- try {
- firebase.vision().imageLabelerProcessImage(123);
- return Promise.reject(new Error('Did not throw an Error.'));
- } catch (error) {
- error.message.should.containEql("'localImageFilePath' expected a string local file path");
- return Promise.resolve();
- }
- });
-
- it('should return a local label array', async () => {
- const res = await firebase.vision().imageLabelerProcessImage(testImageFile);
-
- res.should.be.Array();
- res.length.should.be.greaterThan(0);
-
- res.forEach(i => {
- i.text.should.be.String();
- i.entityId.should.be.String();
- i.confidence.should.be.Number();
- });
- });
- });
-
describe('cloudImageLabelerProcessImage()', () => {
it('should throw if image path is not a string', () => {
try {
- firebase.vision().cloudImageLabelerProcessImage(123);
+ firebase.ml().cloudImageLabelerProcessImage(123);
return Promise.reject(new Error('Did not throw an Error.'));
} catch (error) {
error.message.should.containEql("'localImageFilePath' expected a string local file path");
@@ -62,65 +37,8 @@ describe('mlkit.vision.label', () => {
}
});
- it('should return a cloud label array', async () => {
- const res = await firebase.vision().cloudImageLabelerProcessImage(testImageFile);
-
- res.should.be.Array();
- res.length.should.be.greaterThan(0);
-
- res.forEach(i => {
- i.text.should.be.String();
- i.entityId.should.be.String();
- i.confidence.should.be.Number();
- });
- });
- });
-
- describe('VisionImageLabelerOptions', () => {
- it('throws if not an object', async () => {
- try {
- await firebase.vision().imageLabelerProcessImage(testImageFile, '123');
- return Promise.reject(new Error('Did not throw an Error.'));
- } catch (error) {
- error.message.should.containEql("'imageLabelerOptions' expected an object value");
- return Promise.resolve();
- }
- });
-
- describe('confidenceThreshold', () => {
- it('should throw if confidence threshold is not a number', async () => {
- try {
- await firebase.vision().imageLabelerProcessImage(testImageFile, {
- confidenceThreshold: '0.5',
- });
- return Promise.reject(new Error('Did not throw an Error.'));
- } catch (error) {
- error.message.should.containEql(
- "'imageLabelerOptions.confidenceThreshold' expected a number value between 0 & 1",
- );
- return Promise.resolve();
- }
- });
- });
-
- it('should throw if confidence threshold is not between 0 & 1', async () => {
- try {
- await firebase.vision().imageLabelerProcessImage(testImageFile, {
- confidenceThreshold: -0.2,
- });
- return Promise.reject(new Error('Did not throw an Error.'));
- } catch (error) {
- error.message.should.containEql(
- "'imageLabelerOptions.confidenceThreshold' expected a number value between 0 & 1",
- );
- return Promise.resolve();
- }
- });
-
- it('should accept options and return local labels', async () => {
- const res = await firebase.vision().imageLabelerProcessImage(testImageFile, {
- confidenceThreshold: 0.8,
- });
+ xit('should return a cloud label array', async () => {
+ const res = await firebase.ml().cloudImageLabelerProcessImage(testImageFile);
res.should.be.Array();
res.length.should.be.greaterThan(0);
@@ -133,10 +51,10 @@ describe('mlkit.vision.label', () => {
});
});
- describe('VisionCloudImageLabelerOptions', () => {
+ describe('MLCloudImageLabelerOptions', () => {
it('throws if not an object', async () => {
try {
- await firebase.vision().cloudImageLabelerProcessImage(testImageFile, '123');
+ await firebase.ml().cloudImageLabelerProcessImage(testImageFile, '123');
return Promise.reject(new Error('Did not throw an Error.'));
} catch (error) {
error.message.should.containEql("'cloudImageLabelerOptions' expected an object value");
@@ -147,7 +65,7 @@ describe('mlkit.vision.label', () => {
describe('confidenceThreshold', () => {
it('should throw if confidence threshold is not a number', async () => {
try {
- await firebase.vision().cloudImageLabelerProcessImage(testImageFile, {
+ await firebase.ml().cloudImageLabelerProcessImage(testImageFile, {
confidenceThreshold: '0.2',
});
return Promise.reject(new Error('Did not throw an Error.'));
@@ -161,7 +79,7 @@ describe('mlkit.vision.label', () => {
it('should throw if confidence threshold is not between 0 & 1', async () => {
try {
- await firebase.vision().cloudImageLabelerProcessImage(testImageFile, {
+ await firebase.ml().cloudImageLabelerProcessImage(testImageFile, {
confidenceThreshold: 1.1,
});
return Promise.reject(new Error('Did not throw an Error.'));
@@ -173,8 +91,8 @@ describe('mlkit.vision.label', () => {
}
});
- it('should accept options and return cloud labels', async () => {
- const res = await firebase.vision().cloudImageLabelerProcessImage(testImageFile, {
+ xit('should accept options and return cloud labels', async () => {
+ const res = await firebase.ml().cloudImageLabelerProcessImage(testImageFile, {
confidenceThreshold: 0.8,
});
@@ -192,7 +110,7 @@ describe('mlkit.vision.label', () => {
describe('enforceCertFingerprintMatch', () => {
it('throws if not a boolean', async () => {
try {
- await firebase.vision().cloudImageLabelerProcessImage(testImageFile, {
+ await firebase.ml().cloudImageLabelerProcessImage(testImageFile, {
enforceCertFingerprintMatch: 'true',
});
return Promise.reject(new Error('Did not throw an Error.'));
@@ -204,17 +122,17 @@ describe('mlkit.vision.label', () => {
}
});
- it('sets enforceCertFingerprintMatch', async () => {
- await firebase.vision().cloudImageLabelerProcessImage(testImageFile, {
+ xit('sets enforceCertFingerprintMatch', async () => {
+ await firebase.ml().cloudImageLabelerProcessImage(testImageFile, {
enforceCertFingerprintMatch: false,
});
});
});
- describe('apiKeyOverride', () => {
+ xdescribe('apiKeyOverride', () => {
it('throws if apiKeyOverride is not a string', async () => {
try {
- await firebase.vision().cloudImageLabelerProcessImage(testImageFile, {
+ await firebase.ml().cloudImageLabelerProcessImage(testImageFile, {
apiKeyOverride: true,
});
return Promise.reject(new Error('Did not throw Error.'));
diff --git a/packages/ml-vision/e2e/landmark.e2e.js b/packages/ml/e2e/landmark.e2e.js
similarity index 71%
rename from packages/ml-vision/e2e/landmark.e2e.js
rename to packages/ml/e2e/landmark.e2e.js
index fbe6b94301d..1d053c7f60a 100644
--- a/packages/ml-vision/e2e/landmark.e2e.js
+++ b/packages/ml/e2e/landmark.e2e.js
@@ -16,7 +16,7 @@
*/
let testImageFile;
-describe('mlkit.vision.landmark', () => {
+describe('ml.landmark', () => {
before(async () => {
testImageFile = `${firebase.utils.FilePath.DOCUMENT_DIRECTORY}/landmark.jpg`;
await firebase
@@ -28,7 +28,7 @@ describe('mlkit.vision.landmark', () => {
describe('cloudLandmarkRecognizerProcessImage()', () => {
it('should throw if image path is not a string', () => {
try {
- firebase.vision().cloudLandmarkRecognizerProcessImage(123);
+ firebase.ml().cloudLandmarkRecognizerProcessImage(123);
return Promise.reject(new Error('Did not throw an Error.'));
} catch (error) {
error.message.should.containEql("'localImageFilePath' expected a string local file path");
@@ -36,8 +36,8 @@ describe('mlkit.vision.landmark', () => {
}
});
- it('should return an array of landmark information', async () => {
- const res = await firebase.vision().cloudLandmarkRecognizerProcessImage(testImageFile);
+ xit('should return an array of landmark information', async () => {
+ const res = await firebase.ml().cloudLandmarkRecognizerProcessImage(testImageFile);
res.should.be.Array();
res.length.should.be.greaterThan(0);
@@ -59,10 +59,10 @@ describe('mlkit.vision.landmark', () => {
});
});
- describe('VisionCloudLandmarkRecognizerOptions', () => {
+ describe('MLCloudLandmarkRecognizerOptions', () => {
it('throws if not an object', async () => {
try {
- await firebase.vision().cloudLandmarkRecognizerProcessImage(testImageFile, '123');
+ await firebase.ml().cloudLandmarkRecognizerProcessImage(testImageFile, '123');
return Promise.reject(new Error('Did not throw an Error.'));
} catch (error) {
error.message.should.containEql(
@@ -75,7 +75,7 @@ describe('mlkit.vision.landmark', () => {
describe('cloudLandmarkRecognizerOptions', () => {
it('throws if not a boolean', async () => {
try {
- await firebase.vision().cloudLandmarkRecognizerProcessImage(testImageFile, {
+ await firebase.ml().cloudLandmarkRecognizerProcessImage(testImageFile, {
enforceCertFingerprintMatch: 'false',
});
return Promise.reject(new Error('Did not throw an Error.'));
@@ -87,15 +87,15 @@ describe('mlkit.vision.landmark', () => {
}
});
- it('sets cloudLandmarkRecognizerOptions', async () => {
- await firebase.vision().cloudLandmarkRecognizerProcessImage(testImageFile, {
+ xit('sets cloudLandmarkRecognizerOptions', async () => {
+ await firebase.ml().cloudLandmarkRecognizerProcessImage(testImageFile, {
enforceCertFingerprintMatch: false,
});
});
it('throws if apiKeyOverride is not a string', async () => {
try {
- await firebase.vision().cloudLandmarkRecognizerProcessImage(testImageFile, {
+ await firebase.ml().cloudLandmarkRecognizerProcessImage(testImageFile, {
apiKeyOverride: true,
});
return Promise.reject(new Error('Did not throw Error.'));
@@ -108,10 +108,10 @@ describe('mlkit.vision.landmark', () => {
});
});
// TODO temporarily disable test suite - is flakey on CI - needs investigating
- xdescribe('maxResults', () => {
+ describe('maxResults', () => {
it('throws if maxResults is not a number', async () => {
try {
- await firebase.vision().cloudLandmarkRecognizerProcessImage(testImageFile, {
+ await firebase.ml().cloudLandmarkRecognizerProcessImage(testImageFile, {
maxResults: '2',
});
return Promise.reject(new Error('Did not throw an Error.'));
@@ -123,8 +123,8 @@ describe('mlkit.vision.landmark', () => {
}
});
- it('limits the maximum results', async () => {
- const res = await firebase.vision().cloudLandmarkRecognizerProcessImage(testImageFile, {
+ xit('limits the maximum results', async () => {
+ const res = await firebase.ml().cloudLandmarkRecognizerProcessImage(testImageFile, {
maxResults: 3,
});
@@ -138,7 +138,7 @@ describe('mlkit.vision.landmark', () => {
describe('modelType', () => {
it('throws if model is invalid', async () => {
try {
- await firebase.vision().cloudLandmarkRecognizerProcessImage(testImageFile, {
+ await firebase.ml().cloudLandmarkRecognizerProcessImage(testImageFile, {
modelType: 3,
});
return Promise.reject(new Error('Did not throw an Error.'));
@@ -150,19 +150,19 @@ describe('mlkit.vision.landmark', () => {
}
});
- it('sets modelType', async () => {
- await firebase.vision().cloudLandmarkRecognizerProcessImage(testImageFile, {
- modelType: firebase.vision.VisionCloudLandmarkRecognizerModelType.STABLE_MODEL,
+ xit('sets modelType', async () => {
+ await firebase.ml().cloudLandmarkRecognizerProcessImage(testImageFile, {
+ modelType: firebase.ml.MLCloudLandmarkRecognizerModelType.STABLE_MODEL,
});
- await firebase.vision().cloudLandmarkRecognizerProcessImage(testImageFile, {
- modelType: firebase.vision.VisionCloudLandmarkRecognizerModelType.LATEST_MODEL,
+ await firebase.ml().cloudLandmarkRecognizerProcessImage(testImageFile, {
+ modelType: firebase.ml.MLCloudLandmarkRecognizerModelType.LATEST_MODEL,
});
});
- it('uses a latest model', async () => {
- const res = await firebase.vision().cloudLandmarkRecognizerProcessImage(testImageFile, {
- modelType: firebase.vision.VisionCloudLandmarkRecognizerModelType.LATEST_MODEL,
+ xit('uses a latest model', async () => {
+ const res = await firebase.ml().cloudLandmarkRecognizerProcessImage(testImageFile, {
+ modelType: firebase.ml.MLCloudLandmarkRecognizerModelType.LATEST_MODEL,
});
res.should.be.Array();
});
diff --git a/packages/ml-vision/e2e/mlKitVision.e2e.js b/packages/ml/e2e/ml.e2e.js
similarity index 75%
rename from packages/ml-vision/e2e/mlKitVision.e2e.js
rename to packages/ml/e2e/ml.e2e.js
index ca938d39356..5f19eee1a6a 100644
--- a/packages/ml-vision/e2e/mlKitVision.e2e.js
+++ b/packages/ml/e2e/ml.e2e.js
@@ -15,24 +15,22 @@
*
*/
-describe('vision()', () => {
+describe('ml()', () => {
describe('namespace', () => {
it('accessible from firebase.app()', () => {
const app = firebase.app();
- should.exist(app.vision);
- app.vision().app.should.equal(app);
+ should.exist(app.ml);
+ app.ml().app.should.equal(app);
});
it('supports multiple apps', async () => {
- firebase.vision().app.name.should.equal('[DEFAULT]');
+ firebase.ml().app.name.should.equal('[DEFAULT]');
- firebase
- .vision(firebase.app('secondaryFromNative'))
- .app.name.should.equal('secondaryFromNative');
+ firebase.ml(firebase.app('secondaryFromNative')).app.name.should.equal('secondaryFromNative');
firebase
.app('secondaryFromNative')
- .vision()
+ .ml()
.app.name.should.equal('secondaryFromNative');
});
});
diff --git a/packages/ml-vision/e2e/text.e2e.js b/packages/ml/e2e/text.e2e.js
similarity index 71%
rename from packages/ml-vision/e2e/text.e2e.js
rename to packages/ml/e2e/text.e2e.js
index 13c9da73ffa..73ddd028649 100644
--- a/packages/ml-vision/e2e/text.e2e.js
+++ b/packages/ml/e2e/text.e2e.js
@@ -69,8 +69,7 @@ function textBaseElementValidate(textBase, cloud = false) {
let testImageFile;
-// TODO allow android testing once ML model download manager support implemented
-ios.describe('mlkit.vision.text', () => {
+describe('ml.text', () => {
before(async () => {
testImageFile = `${firebase.utils.FilePath.DOCUMENT_DIRECTORY}/text.png`;
await firebase
@@ -79,43 +78,10 @@ ios.describe('mlkit.vision.text', () => {
.writeToFile(testImageFile);
});
- describe('textRecognizerProcessImage()', () => {
- it('should throw if image path is not a string', () => {
- try {
- firebase.vision().textRecognizerProcessImage(123);
- return Promise.reject(new Error('Did not throw an Error.'));
- } catch (error) {
- error.message.should.containEql("'localImageFilePath' expected a string local file path");
- return Promise.resolve();
- }
- });
-
- it('should return a VisionText representation for an image', async () => {
- const res = await firebase.vision().textRecognizerProcessImage(testImageFile);
- res.text.should.be.a.String();
- res.blocks.should.be.an.Array();
- res.blocks.length.should.be.greaterThan(0);
-
- res.blocks.forEach(textBlock => {
- textBaseElementValidate(textBlock);
- textBlock.lines.should.be.an.Array();
- textBlock.lines.length.should.be.greaterThan(0);
- textBlock.lines.forEach(line => {
- textBaseElementValidate(line);
- line.elements.should.be.an.Array();
- line.elements.length.should.be.greaterThan(0);
- line.elements.forEach(element => {
- textBaseElementValidate(element);
- });
- });
- });
- });
- });
-
- describe('VisionCloudTextRecognizerOptions', () => {
+ describe('MLCloudTextRecognizerOptions', () => {
it('throws if not an object', async () => {
try {
- await firebase.vision().cloudTextRecognizerProcessImage(testImageFile, '123');
+ await firebase.ml().cloudTextRecognizerProcessImage(testImageFile, '123');
return Promise.reject(new Error('Did not throw an Error.'));
} catch (error) {
error.message.should.containEql("'cloudTextRecognizerOptions' expected an object value");
@@ -126,7 +92,7 @@ ios.describe('mlkit.vision.text', () => {
describe('enforceCertFingerprintMatch', () => {
it('throws if not a boolean', async () => {
try {
- await firebase.vision().cloudTextRecognizerProcessImage(testImageFile, {
+ await firebase.ml().cloudTextRecognizerProcessImage(testImageFile, {
enforceCertFingerprintMatch: 'false',
});
return Promise.reject(new Error('Did not throw an Error.'));
@@ -139,7 +105,7 @@ ios.describe('mlkit.vision.text', () => {
});
it('sets a value', async () => {
- await firebase.vision().cloudTextRecognizerProcessImage(testImageFile, {
+ await firebase.ml().cloudTextRecognizerProcessImage(testImageFile, {
enforceCertFingerprintMatch: false,
});
});
@@ -148,7 +114,7 @@ ios.describe('mlkit.vision.text', () => {
describe('apiKeyOverride', () => {
it('throws if apiKeyOverride is not a string', async () => {
try {
- await firebase.vision().cloudTextRecognizerProcessImage(testImageFile, {
+ await firebase.ml().cloudTextRecognizerProcessImage(testImageFile, {
apiKeyOverride: true,
});
return Promise.reject(new Error('Did not throw Error.'));
@@ -164,7 +130,7 @@ ios.describe('mlkit.vision.text', () => {
describe('languageHints', () => {
it('throws if not array', async () => {
try {
- await firebase.vision().cloudTextRecognizerProcessImage(testImageFile, {
+ await firebase.ml().cloudTextRecognizerProcessImage(testImageFile, {
languageHints: 'en',
});
return Promise.reject(new Error('Did not throw an Error.'));
@@ -178,7 +144,7 @@ ios.describe('mlkit.vision.text', () => {
it('throws if empty array', async () => {
try {
- await firebase.vision().cloudTextRecognizerProcessImage(testImageFile, {
+ await firebase.ml().cloudTextRecognizerProcessImage(testImageFile, {
languageHints: [],
});
return Promise.reject(new Error('Did not throw an Error.'));
@@ -192,7 +158,7 @@ ios.describe('mlkit.vision.text', () => {
it('throws if array contains non-string values', async () => {
try {
- await firebase.vision().cloudTextRecognizerProcessImage(testImageFile, {
+ await firebase.ml().cloudTextRecognizerProcessImage(testImageFile, {
languageHints: [123],
});
return Promise.reject(new Error('Did not throw an Error.'));
@@ -205,7 +171,7 @@ ios.describe('mlkit.vision.text', () => {
});
it('sets hintedLanguages', async () => {
- await firebase.vision().cloudTextRecognizerProcessImage(testImageFile, {
+ await firebase.ml().cloudTextRecognizerProcessImage(testImageFile, {
languageHints: ['fr'],
});
});
@@ -214,7 +180,7 @@ ios.describe('mlkit.vision.text', () => {
describe('modelType', () => {
it('throws if invalid type', async () => {
try {
- await firebase.vision().cloudTextRecognizerProcessImage(testImageFile, {
+ await firebase.ml().cloudTextRecognizerProcessImage(testImageFile, {
modelType: 7,
});
return Promise.reject(new Error('Did not throw an Error.'));
@@ -225,12 +191,8 @@ ios.describe('mlkit.vision.text', () => {
});
it('sets modelType', async () => {
- await firebase.vision().cloudTextRecognizerProcessImage(testImageFile, {
- modelType: firebase.vision.VisionCloudTextRecognizerModelType.SPARSE_MODEL,
- });
-
- await firebase.vision().textRecognizerProcessImage(testImageFile, {
- modelType: firebase.vision.VisionCloudTextRecognizerModelType.DENSE_MODEL,
+ await firebase.ml().cloudTextRecognizerProcessImage(testImageFile, {
+ modelType: firebase.ml.MLCloudTextRecognizerModelType.SPARSE_MODEL,
});
});
});
@@ -239,7 +201,7 @@ ios.describe('mlkit.vision.text', () => {
describe('cloudTextRecognizerProcessImage()', () => {
it('should throw if image path is not a string', () => {
try {
- firebase.vision().cloudTextRecognizerProcessImage(123);
+ firebase.ml().cloudTextRecognizerProcessImage(123);
return Promise.reject(new Error('Did not throw an Error.'));
} catch (error) {
error.message.should.containEql("'localImageFilePath' expected a string local file path");
@@ -248,7 +210,7 @@ ios.describe('mlkit.vision.text', () => {
});
it('should return a VisionText representation for an image', async () => {
- const res = await firebase.vision().cloudTextRecognizerProcessImage(testImageFile);
+ const res = await firebase.ml().cloudTextRecognizerProcessImage(testImageFile);
res.text.should.be.a.String();
res.blocks.should.be.an.Array();
res.blocks.length.should.be.greaterThan(0);
diff --git a/packages/ml-natural-language/ios/RNFBMLNaturalLanguage.xcodeproj/project.pbxproj b/packages/ml/ios/RNFBML.xcodeproj/project.pbxproj
similarity index 66%
rename from packages/ml-natural-language/ios/RNFBMLNaturalLanguage.xcodeproj/project.pbxproj
rename to packages/ml/ios/RNFBML.xcodeproj/project.pbxproj
index f99e91f0d00..ab76410fa21 100644
--- a/packages/ml-natural-language/ios/RNFBMLNaturalLanguage.xcodeproj/project.pbxproj
+++ b/packages/ml/ios/RNFBML.xcodeproj/project.pbxproj
@@ -7,11 +7,11 @@
objects = {
/* Begin PBXBuildFile section */
- 27038A8322A16C43001E082B /* RCTConvert+FIRLanguageIdentificationOptions.m in Sources */ = {isa = PBXBuildFile; fileRef = 27038A8222A16C43001E082B /* RCTConvert+FIRLanguageIdentificationOptions.m */; };
- 2744B98621F45429004F8E3F /* RNFBMLNaturalLanguageIdModule.m in Sources */ = {isa = PBXBuildFile; fileRef = 2744B98521F45429004F8E3F /* RNFBMLNaturalLanguageIdModule.m */; };
- 27760EA1229ED5D000F5F127 /* RNFBMLNaturalLanguageTranslateModule.m in Sources */ = {isa = PBXBuildFile; fileRef = 27760EA0229ED5D000F5F127 /* RNFBMLNaturalLanguageTranslateModule.m */; };
- 27760EA4229ED74E00F5F127 /* RNFBMLNaturalLanguageSmartReplyModule.m in Sources */ = {isa = PBXBuildFile; fileRef = 27760EA3229ED74E00F5F127 /* RNFBMLNaturalLanguageSmartReplyModule.m */; };
- 27760EA722A0064100F5F127 /* RCTConvert+FIRTextMessageArray.m in Sources */ = {isa = PBXBuildFile; fileRef = 27760EA622A0064100F5F127 /* RCTConvert+FIRTextMessageArray.m */; };
+ 8B06D3F322F84F7200A5B542 /* RNFBMLLandmarkRecognizerModule.m in Sources */ = {isa = PBXBuildFile; fileRef = 8B06D3F222F84F7200A5B542 /* RNFBMLLandmarkRecognizerModule.m */; };
+ 8B06D3FC22F863AE00A5B542 /* RNFBMLCommon.m in Sources */ = {isa = PBXBuildFile; fileRef = 8B06D3FB22F863AE00A5B542 /* RNFBMLCommon.m */; };
+ 8B06D40622F97B4900A5B542 /* RNFBMLDocumentTextRecognizerModule.m in Sources */ = {isa = PBXBuildFile; fileRef = 8B06D40522F97B4900A5B542 /* RNFBMLDocumentTextRecognizerModule.m */; };
+ 8B06D40A22F989EF00A5B542 /* RNFBMLTextRecognizerModule.m in Sources */ = {isa = PBXBuildFile; fileRef = 8B06D40922F989EF00A5B542 /* RNFBMLTextRecognizerModule.m */; };
+ 8B06D40E22F99DF900A5B542 /* RNFBMLImageLabelerModule.m in Sources */ = {isa = PBXBuildFile; fileRef = 8B06D40D22F99DF900A5B542 /* RNFBMLImageLabelerModule.m */; };
/* End PBXBuildFile section */
/* Begin PBXCopyFilesBuildPhase section */
@@ -27,17 +27,17 @@
/* End PBXCopyFilesBuildPhase section */
/* Begin PBXFileReference section */
- 27038A8122A16C31001E082B /* RCTConvert+FIRLanguageIdentificationOptions.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "RCTConvert+FIRLanguageIdentificationOptions.h"; sourceTree = ""; };
- 27038A8222A16C43001E082B /* RCTConvert+FIRLanguageIdentificationOptions.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = "RCTConvert+FIRLanguageIdentificationOptions.m"; sourceTree = ""; };
- 2744B98221F45429004F8E3F /* libRNFBMLNaturalLanguage.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = libRNFBMLNaturalLanguage.a; sourceTree = BUILT_PRODUCTS_DIR; };
- 2744B98421F45429004F8E3F /* RNFBMLNaturalLanguageIdModule.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; name = RNFBMLNaturalLanguageIdModule.h; path = RNFBMLNaturalLanguage/RNFBMLNaturalLanguageIdModule.h; sourceTree = SOURCE_ROOT; };
- 2744B98521F45429004F8E3F /* RNFBMLNaturalLanguageIdModule.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; name = RNFBMLNaturalLanguageIdModule.m; path = RNFBMLNaturalLanguage/RNFBMLNaturalLanguageIdModule.m; sourceTree = SOURCE_ROOT; };
- 27760E9F229ED5B400F5F127 /* RNFBMLNaturalLanguageTranslateModule.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = RNFBMLNaturalLanguageTranslateModule.h; sourceTree = ""; };
- 27760EA0229ED5D000F5F127 /* RNFBMLNaturalLanguageTranslateModule.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = RNFBMLNaturalLanguageTranslateModule.m; sourceTree = ""; };
- 27760EA2229ED5F600F5F127 /* RNFBMLNaturalLanguageSmartReplyModule.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = RNFBMLNaturalLanguageSmartReplyModule.h; sourceTree = ""; };
- 27760EA3229ED74E00F5F127 /* RNFBMLNaturalLanguageSmartReplyModule.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = RNFBMLNaturalLanguageSmartReplyModule.m; sourceTree = ""; };
- 27760EA522A0064100F5F127 /* RCTConvert+FIRTextMessageArray.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "RCTConvert+FIRTextMessageArray.h"; path = "RNFBMLNaturalLanguage/RCTConvert+FIRTextMessageArray.h"; sourceTree = SOURCE_ROOT; };
- 27760EA622A0064100F5F127 /* RCTConvert+FIRTextMessageArray.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = "RCTConvert+FIRTextMessageArray.m"; sourceTree = ""; };
+ 2744B98221F45429004F8E3F /* libRNFBML.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = libRNFBML.a; sourceTree = BUILT_PRODUCTS_DIR; };
+ 8B06D3F122F84F6500A5B542 /* RNFBMLLandmarkRecognizerModule.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = RNFBMLLandmarkRecognizerModule.h; sourceTree = ""; };
+ 8B06D3F222F84F7200A5B542 /* RNFBMLLandmarkRecognizerModule.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = RNFBMLLandmarkRecognizerModule.m; sourceTree = ""; };
+ 8B06D3FA22F863A400A5B542 /* RNFBMLCommon.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = RNFBMLCommon.h; sourceTree = ""; };
+ 8B06D3FB22F863AE00A5B542 /* RNFBMLCommon.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = RNFBMLCommon.m; sourceTree = ""; };
+ 8B06D40422F97B3600A5B542 /* RNFBMLDocumentTextRecognizerModule.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = RNFBMLDocumentTextRecognizerModule.h; sourceTree = ""; };
+ 8B06D40522F97B4900A5B542 /* RNFBMLDocumentTextRecognizerModule.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = RNFBMLDocumentTextRecognizerModule.m; sourceTree = ""; };
+ 8B06D40822F989E400A5B542 /* RNFBMLTextRecognizerModule.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = RNFBMLTextRecognizerModule.h; sourceTree = ""; };
+ 8B06D40922F989EF00A5B542 /* RNFBMLTextRecognizerModule.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = RNFBMLTextRecognizerModule.m; sourceTree = ""; };
+ 8B06D40C22F99DEF00A5B542 /* RNFBMLImageLabelerModule.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = RNFBMLImageLabelerModule.h; sourceTree = ""; };
+ 8B06D40D22F99DF900A5B542 /* RNFBMLImageLabelerModule.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = RNFBMLImageLabelerModule.m; sourceTree = ""; };
/* End PBXFileReference section */
/* Begin PBXFrameworksBuildPhase section */
@@ -54,32 +54,32 @@
2744B97521F452B8004F8E3F /* Products */ = {
isa = PBXGroup;
children = (
- 2744B98221F45429004F8E3F /* libRNFBMLNaturalLanguage.a */,
+ 2744B98221F45429004F8E3F /* libRNFBML.a */,
);
name = Products;
sourceTree = "";
};
- 2744B98321F45429004F8E3F /* RNFBMLNaturalLanguage */ = {
+ 2744B98321F45429004F8E3F /* RNFBML */ = {
isa = PBXGroup;
children = (
- 27760EA522A0064100F5F127 /* RCTConvert+FIRTextMessageArray.h */,
- 27760EA622A0064100F5F127 /* RCTConvert+FIRTextMessageArray.m */,
- 2744B98421F45429004F8E3F /* RNFBMLNaturalLanguageIdModule.h */,
- 2744B98521F45429004F8E3F /* RNFBMLNaturalLanguageIdModule.m */,
- 27760E9F229ED5B400F5F127 /* RNFBMLNaturalLanguageTranslateModule.h */,
- 27760EA0229ED5D000F5F127 /* RNFBMLNaturalLanguageTranslateModule.m */,
- 27760EA2229ED5F600F5F127 /* RNFBMLNaturalLanguageSmartReplyModule.h */,
- 27760EA3229ED74E00F5F127 /* RNFBMLNaturalLanguageSmartReplyModule.m */,
- 27038A8122A16C31001E082B /* RCTConvert+FIRLanguageIdentificationOptions.h */,
- 27038A8222A16C43001E082B /* RCTConvert+FIRLanguageIdentificationOptions.m */,
+ 8B06D3F122F84F6500A5B542 /* RNFBMLLandmarkRecognizerModule.h */,
+ 8B06D3F222F84F7200A5B542 /* RNFBMLLandmarkRecognizerModule.m */,
+ 8B06D3FA22F863A400A5B542 /* RNFBMLCommon.h */,
+ 8B06D3FB22F863AE00A5B542 /* RNFBMLCommon.m */,
+ 8B06D40422F97B3600A5B542 /* RNFBMLDocumentTextRecognizerModule.h */,
+ 8B06D40522F97B4900A5B542 /* RNFBMLDocumentTextRecognizerModule.m */,
+ 8B06D40822F989E400A5B542 /* RNFBMLTextRecognizerModule.h */,
+ 8B06D40922F989EF00A5B542 /* RNFBMLTextRecognizerModule.m */,
+ 8B06D40C22F99DEF00A5B542 /* RNFBMLImageLabelerModule.h */,
+ 8B06D40D22F99DF900A5B542 /* RNFBMLImageLabelerModule.m */,
);
- path = RNFBMLNaturalLanguage;
+ path = RNFBML;
sourceTree = "";
};
3323F52AAFE26B7384BE4DE3 = {
isa = PBXGroup;
children = (
- 2744B98321F45429004F8E3F /* RNFBMLNaturalLanguage */,
+ 2744B98321F45429004F8E3F /* RNFBML */,
2744B97521F452B8004F8E3F /* Products */,
);
sourceTree = "";
@@ -87,9 +87,9 @@
/* End PBXGroup section */
/* Begin PBXNativeTarget section */
- 2744B98121F45429004F8E3F /* RNFBMLNaturalLanguage */ = {
+ 2744B98121F45429004F8E3F /* RNFBML */ = {
isa = PBXNativeTarget;
- buildConfigurationList = 2744B98821F45429004F8E3F /* Build configuration list for PBXNativeTarget "RNFBMLNaturalLanguage" */;
+ buildConfigurationList = 2744B98821F45429004F8E3F /* Build configuration list for PBXNativeTarget "RNFBML" */;
buildPhases = (
2744B97E21F45429004F8E3F /* Sources */,
2744B97F21F45429004F8E3F /* Frameworks */,
@@ -99,9 +99,9 @@
);
dependencies = (
);
- name = RNFBMLNaturalLanguage;
- productName = RNFBMLNaturalLanguage;
- productReference = 2744B98221F45429004F8E3F /* libRNFBMLNaturalLanguage.a */;
+ name = RNFBML;
+ productName = RNFBML;
+ productReference = 2744B98221F45429004F8E3F /* libRNFBML.a */;
productType = "com.apple.product-type.library.static";
};
/* End PBXNativeTarget section */
@@ -110,7 +110,7 @@
3323F95273A95DB34F55C6D7 /* Project object */ = {
isa = PBXProject;
attributes = {
- CLASSPREFIX = RNFBMLNaturalLanguage;
+ CLASSPREFIX = RNFBML;
LastUpgradeCheck = 1010;
ORGANIZATIONNAME = Invertase;
TargetAttributes = {
@@ -120,11 +120,12 @@
};
};
};
- buildConfigurationList = 3323F1C5716BA966BBBB95A4 /* Build configuration list for PBXProject "RNFBMLNaturalLanguage" */;
+ buildConfigurationList = 3323F1C5716BA966BBBB95A4 /* Build configuration list for PBXProject "RNFBML" */;
compatibilityVersion = "Xcode 8.0";
developmentRegion = English;
hasScannedForEncodings = 0;
knownRegions = (
+ English,
en,
);
mainGroup = 3323F52AAFE26B7384BE4DE3;
@@ -132,7 +133,7 @@
projectDirPath = "";
projectRoot = "";
targets = (
- 2744B98121F45429004F8E3F /* RNFBMLNaturalLanguage */,
+ 2744B98121F45429004F8E3F /* RNFBML */,
);
};
/* End PBXProject section */
@@ -142,11 +143,11 @@
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
- 27760EA4229ED74E00F5F127 /* RNFBMLNaturalLanguageSmartReplyModule.m in Sources */,
- 2744B98621F45429004F8E3F /* RNFBMLNaturalLanguageIdModule.m in Sources */,
- 27038A8322A16C43001E082B /* RCTConvert+FIRLanguageIdentificationOptions.m in Sources */,
- 27760EA722A0064100F5F127 /* RCTConvert+FIRTextMessageArray.m in Sources */,
- 27760EA1229ED5D000F5F127 /* RNFBMLNaturalLanguageTranslateModule.m in Sources */,
+ 8B06D40E22F99DF900A5B542 /* RNFBMLImageLabelerModule.m in Sources */,
+ 8B06D40622F97B4900A5B542 /* RNFBMLDocumentTextRecognizerModule.m in Sources */,
+ 8B06D40A22F989EF00A5B542 /* RNFBMLTextRecognizerModule.m in Sources */,
+ 8B06D3F322F84F7200A5B542 /* RNFBMLLandmarkRecognizerModule.m in Sources */,
+ 8B06D3FC22F863AE00A5B542 /* RNFBMLCommon.m in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
@@ -347,7 +348,7 @@
/* End XCBuildConfiguration section */
/* Begin XCConfigurationList section */
- 2744B98821F45429004F8E3F /* Build configuration list for PBXNativeTarget "RNFBMLNaturalLanguage" */ = {
+ 2744B98821F45429004F8E3F /* Build configuration list for PBXNativeTarget "RNFBML" */ = {
isa = XCConfigurationList;
buildConfigurations = (
2744B98921F45429004F8E3F /* Debug */,
@@ -356,7 +357,7 @@
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
- 3323F1C5716BA966BBBB95A4 /* Build configuration list for PBXProject "RNFBMLNaturalLanguage" */ = {
+ 3323F1C5716BA966BBBB95A4 /* Build configuration list for PBXProject "RNFBML" */ = {
isa = XCConfigurationList;
buildConfigurations = (
3323F7E33E1559A2B9826720 /* Debug */,
diff --git a/packages/ml-natural-language/ios/RNFBMLNaturalLanguage.xcodeproj/project.xcworkspace/contents.xcworkspacedata b/packages/ml/ios/RNFBML.xcodeproj/project.xcworkspace/contents.xcworkspacedata
similarity index 100%
rename from packages/ml-natural-language/ios/RNFBMLNaturalLanguage.xcodeproj/project.xcworkspace/contents.xcworkspacedata
rename to packages/ml/ios/RNFBML.xcodeproj/project.xcworkspace/contents.xcworkspacedata
diff --git a/packages/ml-natural-language/ios/RNFBMLNaturalLanguage.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist b/packages/ml/ios/RNFBML.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist
similarity index 100%
rename from packages/ml-natural-language/ios/RNFBMLNaturalLanguage.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist
rename to packages/ml/ios/RNFBML.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist
diff --git a/packages/ml-natural-language/ios/RNFBMLNaturalLanguage.xcodeproj/project.xcworkspace/xcshareddata/WorkspaceSettings.xcsettings b/packages/ml/ios/RNFBML.xcodeproj/project.xcworkspace/xcshareddata/WorkspaceSettings.xcsettings
similarity index 100%
rename from packages/ml-natural-language/ios/RNFBMLNaturalLanguage.xcodeproj/project.xcworkspace/xcshareddata/WorkspaceSettings.xcsettings
rename to packages/ml/ios/RNFBML.xcodeproj/project.xcworkspace/xcshareddata/WorkspaceSettings.xcsettings
diff --git a/packages/ml-natural-language/ios/RNFBMLNaturalLanguage.xcodeproj/xcshareddata/IDETemplateMacros.plist b/packages/ml/ios/RNFBML.xcodeproj/xcshareddata/IDETemplateMacros.plist
similarity index 100%
rename from packages/ml-natural-language/ios/RNFBMLNaturalLanguage.xcodeproj/xcshareddata/IDETemplateMacros.plist
rename to packages/ml/ios/RNFBML.xcodeproj/xcshareddata/IDETemplateMacros.plist
diff --git a/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionCommon.h b/packages/ml/ios/RNFBML/RNFBMLCommon.h
similarity index 75%
rename from packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionCommon.h
rename to packages/ml/ios/RNFBML/RNFBMLCommon.h
index e55df485d95..afbe07d173b 100644
--- a/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionCommon.h
+++ b/packages/ml/ios/RNFBML/RNFBMLCommon.h
@@ -15,15 +15,11 @@
*
*/
-@interface RNFBMLVisionCommon : NSObject
+@interface RNFBMLCommon : NSObject
+ (NSArray *)rectToIntArray:(CGRect)rect;
-+ (NSDictionary *)contourToDict:(FIRVisionFaceContour *)visionFaceContour;
-
-+ (NSDictionary *)landmarkToDict:(FIRVisionFaceLandmark *)visionFaceLandmark;
-
-+ (NSArray *)visionPointsToArray:(NSArray *_Nullable)points;
++ (NSArray *)pointsToArray:(NSArray *_Nullable)points;
+ (void)UIImageForFilePath:(NSString *)localFilePath completion:(void (^)(
NSArray *errorCodeMessageArray,
diff --git a/packages/ml/ios/RNFBML/RNFBMLCommon.m b/packages/ml/ios/RNFBML/RNFBMLCommon.m
new file mode 100644
index 00000000000..a24faea9847
--- /dev/null
+++ b/packages/ml/ios/RNFBML/RNFBMLCommon.m
@@ -0,0 +1,65 @@
+/**
+ * Copyright (c) 2016-present Invertase Limited & Contributors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this library except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+
+#import
+#import
+#import "RNFBMLCommon.h"
+
+@implementation RNFBMLCommon
+
++ (NSArray *)rectToIntArray:(CGRect)rect {
+ CGSize size = rect.size;
+ CGPoint point = rect.origin;
+ return @[@(point.x), @(point.y), @(point.x + size.width), @(point.y + size.height)];
+}
+
++ (NSArray *)pointsToArray:(NSArray *_Nullable)points {
+ if (points == nil) {
+ return @[];
+ }
+
+ NSMutableArray *pointsArray = [[NSMutableArray alloc] init];
+ for (NSValue *point in points) {
+ [pointsArray addObject:[self arrayForCGPoint:point.CGPointValue]];
+ }
+
+ return pointsArray;
+}
+
++ (NSArray *)arrayForCGPoint:(CGPoint)point {
+ return @[@(point.x), @(point.y)];
+}
+
++ (NSArray *)arrayForFIRVisionPoint:(FIRVisionPoint *)point {
+ return @[point.x, point.y];
+}
+
++ (void)UIImageForFilePath:(NSString *)localFilePath completion:(void (^)(
+ NSArray *errorCodeMessageArray,
+ UIImage *image
+))completion {
+ if (![[NSFileManager defaultManager] fileExistsAtPath:localFilePath]) {
+ completion(@[@"file-not-found", @"The local file specified does not exist on the device."], nil);
+ } else {
+ dispatch_async(dispatch_get_main_queue(), ^{
+ completion(nil, [RCTConvert UIImage:localFilePath]);
+ });
+ }
+}
+
+@end
diff --git a/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionLandmarkRecognizerModule.h b/packages/ml/ios/RNFBML/RNFBMLDocumentTextRecognizerModule.h
similarity index 90%
rename from packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionLandmarkRecognizerModule.h
rename to packages/ml/ios/RNFBML/RNFBMLDocumentTextRecognizerModule.h
index 4298dfe84c1..eb9a79bb87e 100644
--- a/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionLandmarkRecognizerModule.h
+++ b/packages/ml/ios/RNFBML/RNFBMLDocumentTextRecognizerModule.h
@@ -19,6 +19,6 @@
#import
#import
-@interface RNFBMLVisionLandmarkRecognizerModule : NSObject
+@interface RNFBMLDocumentTextRecognizerModule : NSObject
@end
diff --git a/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionDocumentTextRecognizerModule.m b/packages/ml/ios/RNFBML/RNFBMLDocumentTextRecognizerModule.m
similarity index 88%
rename from packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionDocumentTextRecognizerModule.m
rename to packages/ml/ios/RNFBML/RNFBMLDocumentTextRecognizerModule.m
index 82f4f7ccf10..23d47587049 100644
--- a/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionDocumentTextRecognizerModule.m
+++ b/packages/ml/ios/RNFBML/RNFBMLDocumentTextRecognizerModule.m
@@ -18,17 +18,17 @@
#import
#import
-#import "RNFBMLVisionDocumentTextRecognizerModule.h"
-#import "RNFBMLVisionCommon.h"
+#import "RNFBMLDocumentTextRecognizerModule.h"
+#import "RNFBMLCommon.h"
-@implementation RNFBMLVisionDocumentTextRecognizerModule
+@implementation RNFBMLDocumentTextRecognizerModule
#pragma mark -
#pragma mark Module Setup
RCT_EXPORT_MODULE();
#pragma mark -
-#pragma mark Firebase ML Kit Vision Methods
+#pragma mark Firebase ML Methods
RCT_EXPORT_METHOD(cloudDocumentTextRecognizerProcessImage:
(FIRApp *) firebaseApp
@@ -37,7 +37,7 @@ @implementation RNFBMLVisionDocumentTextRecognizerModule
: (RCTPromiseResolveBlock)resolve
: (RCTPromiseRejectBlock)reject
) {
- [RNFBMLVisionCommon UIImageForFilePath:filePath completion:^(NSArray *errorCodeMessageArray, UIImage *image) {
+ [RNFBMLCommon UIImageForFilePath:filePath completion:^(NSArray *errorCodeMessageArray, UIImage *image) {
if (errorCodeMessageArray != nil) {
[RNFBSharedUtils rejectPromiseWithUserInfo:reject userInfo:(NSMutableDictionary *) @{
@"code": errorCodeMessageArray[0],
@@ -79,19 +79,19 @@ @implementation RNFBMLVisionDocumentTextRecognizerModule
resolve(@{
@"text": result.text,
- @"blocks": [self getVisionDocumentTextBlocksList:result.blocks],
+ @"blocks": [self getMLDocumentTextBlocksList:result.blocks],
});
}];
}];
}
-- (NSArray *)getVisionDocumentTextBlocksList:(NSArray *)blocks {
+- (NSArray *)getMLDocumentTextBlocksList:(NSArray *)blocks {
NSMutableArray *documentTextBlocksFormattedList = [[NSMutableArray alloc] init];
for (FIRVisionDocumentTextBlock *block in blocks) {
NSMutableDictionary *documentTextBlockFormatted = [[NSMutableDictionary alloc] init];
- documentTextBlockFormatted[@"boundingBox"] = [RNFBMLVisionCommon rectToIntArray:block.frame];
+ documentTextBlockFormatted[@"boundingBox"] = [RNFBMLCommon rectToIntArray:block.frame];
documentTextBlockFormatted[@"text"] = block.text;
documentTextBlockFormatted[@"confidence"] = block.confidence;
documentTextBlockFormatted[@"recognizedLanguages"] = [self getLanguageCodesList:block.recognizedLanguages];
@@ -130,7 +130,7 @@ - (NSArray *)getParagraphsList:(NSArray *)par
for (FIRVisionDocumentTextParagraph *paragraph in paragraphs) {
NSMutableDictionary *paragraphFormatted = [[NSMutableDictionary alloc] init];
- paragraphFormatted[@"boundingBox"] = [RNFBMLVisionCommon rectToIntArray:paragraph.frame];
+ paragraphFormatted[@"boundingBox"] = [RNFBMLCommon rectToIntArray:paragraph.frame];
paragraphFormatted[@"text"] = paragraph.text;
paragraphFormatted[@"confidence"] = paragraph.confidence;
paragraphFormatted[@"recognizedLanguages"] = [self getLanguageCodesList:paragraph.recognizedLanguages];
@@ -149,7 +149,7 @@ - (NSArray *)getWordsList:(NSArray *)words {
for (FIRVisionDocumentTextWord *word in words) {
NSMutableDictionary *wordFormatted = [[NSMutableDictionary alloc] init];
- wordFormatted[@"boundingBox"] = [RNFBMLVisionCommon rectToIntArray:word.frame];
+ wordFormatted[@"boundingBox"] = [RNFBMLCommon rectToIntArray:word.frame];
wordFormatted[@"text"] = word.text;
wordFormatted[@"confidence"] = word.confidence;
wordFormatted[@"recognizedLanguages"] = [self getLanguageCodesList:word.recognizedLanguages];
@@ -168,7 +168,7 @@ - (NSArray *)getSymbolList:(NSArray *)symbols {
for (FIRVisionDocumentTextSymbol *symbol in symbols) {
NSMutableDictionary *symbolFormatted = [[NSMutableDictionary alloc] init];
- symbolFormatted[@"boundingBox"] = [RNFBMLVisionCommon rectToIntArray:symbol.frame];
+ symbolFormatted[@"boundingBox"] = [RNFBMLCommon rectToIntArray:symbol.frame];
symbolFormatted[@"text"] = symbol.text;
symbolFormatted[@"confidence"] = symbol.confidence;
symbolFormatted[@"recognizedLanguages"] = [self getLanguageCodesList:symbol.recognizedLanguages];
diff --git a/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RNFBMLNaturalLanguageIdModule.h b/packages/ml/ios/RNFBML/RNFBMLImageLabelerModule.h
similarity index 91%
rename from packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RNFBMLNaturalLanguageIdModule.h
rename to packages/ml/ios/RNFBML/RNFBMLImageLabelerModule.h
index 905bdad5528..6a85d990844 100644
--- a/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RNFBMLNaturalLanguageIdModule.h
+++ b/packages/ml/ios/RNFBML/RNFBMLImageLabelerModule.h
@@ -19,5 +19,6 @@
#import
#import
-@interface RNFBMLNaturalLanguageIdModule : NSObject
+@interface RNFBMLImageLabelerModule : NSObject
+
@end
diff --git a/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionImageLabelerModule.m b/packages/ml/ios/RNFBML/RNFBMLImageLabelerModule.m
similarity index 60%
rename from packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionImageLabelerModule.m
rename to packages/ml/ios/RNFBML/RNFBMLImageLabelerModule.m
index 8d95fba2573..663fc5c36a8 100644
--- a/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionImageLabelerModule.m
+++ b/packages/ml/ios/RNFBML/RNFBMLImageLabelerModule.m
@@ -17,62 +17,17 @@
#import
#import
-#import "RNFBMLVisionImageLabelerModule.h"
-#import "RNFBMLVisionCommon.h"
+#import "RNFBMLImageLabelerModule.h"
+#import "RNFBMLCommon.h"
-@implementation RNFBMLVisionImageLabelerModule
+@implementation RNFBMLImageLabelerModule
#pragma mark -
#pragma mark Module Setup
RCT_EXPORT_MODULE();
#pragma mark -
-#pragma mark Firebase ML Kit Vision Methods
-
-RCT_EXPORT_METHOD(imageLabelerProcessImage:
- (FIRApp *) firebaseApp
- : (NSString *)filePath
- : (NSDictionary *)imageLabelerOptions
- : (RCTPromiseResolveBlock)resolve
- : (RCTPromiseRejectBlock)reject
-) {
- [RNFBMLVisionCommon UIImageForFilePath:filePath completion:^(NSArray *errorCodeMessageArray, UIImage *image) {
- if (errorCodeMessageArray != nil) {
- [RNFBSharedUtils rejectPromiseWithUserInfo:reject userInfo:(NSMutableDictionary *) @{
- @"code": errorCodeMessageArray[0],
- @"message": errorCodeMessageArray[1],
- }];
- return;
- }
-
- FIRVisionImage *visionImage = [[FIRVisionImage alloc] initWithImage:image];
- FIRVision *vision = [FIRVision visionForApp:firebaseApp];
-
- FIRVisionOnDeviceImageLabelerOptions *options = [[FIRVisionOnDeviceImageLabelerOptions alloc] init];
-
- if (imageLabelerOptions[@"confidenceThreshold"]) {
- options.confidenceThreshold = [imageLabelerOptions[@"confidenceThreshold"] floatValue];
- }
-
- FIRVisionImageLabeler *labeler = [vision onDeviceImageLabelerWithOptions:options];
- [labeler processImage:visionImage completion:^(NSArray *_Nullable labels, NSError *error) {
- if (error != nil) {
- [RNFBSharedUtils rejectPromiseWithUserInfo:reject userInfo:(NSMutableDictionary *) @{
- @"code": @"unknown",
- @"message": [error localizedDescription],
- }];
- return;
- }
-
- if (labels == nil) {
- resolve(@[]);
- return;
- }
-
- resolve([self getLabelList:labels]);
- }];
- }];
-}
+#pragma mark Firebase ML Methods
RCT_EXPORT_METHOD(cloudImageLabelerProcessImage:
(FIRApp *) firebaseApp
@@ -81,7 +36,7 @@ @implementation RNFBMLVisionImageLabelerModule
: (RCTPromiseResolveBlock)resolve
: (RCTPromiseRejectBlock)reject
) {
- [RNFBMLVisionCommon UIImageForFilePath:filePath completion:^(NSArray *errorCodeMessageArray, UIImage *image) {
+ [RNFBMLCommon UIImageForFilePath:filePath completion:^(NSArray *errorCodeMessageArray, UIImage *image) {
if (errorCodeMessageArray != nil) {
[RNFBSharedUtils rejectPromiseWithUserInfo:reject userInfo:(NSMutableDictionary *) @{
@"code": errorCodeMessageArray[0],
diff --git a/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionFaceDetectorModule.h b/packages/ml/ios/RNFBML/RNFBMLLandmarkRecognizerModule.h
similarity index 92%
rename from packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionFaceDetectorModule.h
rename to packages/ml/ios/RNFBML/RNFBMLLandmarkRecognizerModule.h
index b7624d78298..cb84682e9d9 100644
--- a/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionFaceDetectorModule.h
+++ b/packages/ml/ios/RNFBML/RNFBMLLandmarkRecognizerModule.h
@@ -19,6 +19,6 @@
#import
#import
-@interface RNFBMLVisionFaceDetectorModule : NSObject
+@interface RNFBMLLandmarkRecognizerModule : NSObject
@end
diff --git a/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionLandmarkRecognizerModule.m b/packages/ml/ios/RNFBML/RNFBMLLandmarkRecognizerModule.m
similarity index 90%
rename from packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionLandmarkRecognizerModule.m
rename to packages/ml/ios/RNFBML/RNFBMLLandmarkRecognizerModule.m
index f086ae9c0ed..e2ea37a8298 100644
--- a/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionLandmarkRecognizerModule.m
+++ b/packages/ml/ios/RNFBML/RNFBMLLandmarkRecognizerModule.m
@@ -17,17 +17,17 @@
#import
#import
-#import "RNFBMLVisionLandmarkRecognizerModule.h"
-#import "RNFBMLVisionCommon.h"
+#import "RNFBMLLandmarkRecognizerModule.h"
+#import "RNFBMLCommon.h"
-@implementation RNFBMLVisionLandmarkRecognizerModule
+@implementation RNFBMLLandmarkRecognizerModule
#pragma mark -
#pragma mark Module Setup
RCT_EXPORT_MODULE();
#pragma mark -
-#pragma mark Firebase ML Kit Vision Methods
+#pragma mark Firebase ML Methods
RCT_EXPORT_METHOD(cloudLandmarkRecognizerProcessImage:
(FIRApp *) firebaseApp
@@ -36,7 +36,7 @@ @implementation RNFBMLVisionLandmarkRecognizerModule
: (RCTPromiseResolveBlock)resolve
: (RCTPromiseRejectBlock)reject
) {
- [RNFBMLVisionCommon UIImageForFilePath:filePath completion:^(NSArray *errorCodeMessageArray, UIImage *image) {
+ [RNFBMLCommon UIImageForFilePath:filePath completion:^(NSArray *errorCodeMessageArray, UIImage *image) {
if (errorCodeMessageArray != nil) {
[RNFBSharedUtils rejectPromiseWithUserInfo:reject userInfo:(NSMutableDictionary *) @{
@"code": errorCodeMessageArray[0],
@@ -81,7 +81,7 @@ @implementation RNFBMLVisionLandmarkRecognizerModule
visionLandmark[@"confidence"] = landmark.confidence;
visionLandmark[@"entityId"] = landmark.entityId;
visionLandmark[@"landmark"] = landmark.landmark;
- visionLandmark[@"boundingBox"] = [RNFBMLVisionCommon rectToIntArray:landmark.frame];
+ visionLandmark[@"boundingBox"] = [RNFBMLCommon rectToIntArray:landmark.frame];
NSMutableArray *locations = [[NSMutableArray alloc] init];
for (FIRVisionLatitudeLongitude *location in landmark.locations) {
diff --git a/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionImageLabelerModule.h b/packages/ml/ios/RNFBML/RNFBMLTextRecognizerModule.h
similarity index 90%
rename from packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionImageLabelerModule.h
rename to packages/ml/ios/RNFBML/RNFBMLTextRecognizerModule.h
index b7ed0366b0b..251401cacaf 100644
--- a/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionImageLabelerModule.h
+++ b/packages/ml/ios/RNFBML/RNFBMLTextRecognizerModule.h
@@ -19,6 +19,6 @@
#import
#import
-@interface RNFBMLVisionImageLabelerModule : NSObject
+@interface RNFBMLTextRecognizerModule : NSObject
-@end
\ No newline at end of file
+@end
diff --git a/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionTextRecognizerModule.m b/packages/ml/ios/RNFBML/RNFBMLTextRecognizerModule.m
similarity index 65%
rename from packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionTextRecognizerModule.m
rename to packages/ml/ios/RNFBML/RNFBMLTextRecognizerModule.m
index ae79f74bdbc..53f4cf1938d 100644
--- a/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionTextRecognizerModule.m
+++ b/packages/ml/ios/RNFBML/RNFBMLTextRecognizerModule.m
@@ -17,51 +17,17 @@
#import
#import
-#import "RNFBMLVisionTextRecognizerModule.h"
-#import "RNFBMLVisionCommon.h"
+#import "RNFBMLTextRecognizerModule.h"
+#import "RNFBMLCommon.h"
-@implementation RNFBMLVisionTextRecognizerModule
+@implementation RNFBMLTextRecognizerModule
#pragma mark -
#pragma mark Module Setup
RCT_EXPORT_MODULE();
#pragma mark -
-#pragma mark Firebase ML Kit Vision Methods
-
-RCT_EXPORT_METHOD(textRecognizerProcessImage:
- (FIRApp *) firebaseApp
- : (NSString *)filePath
- : (RCTPromiseResolveBlock)resolve
- : (RCTPromiseRejectBlock)reject
-) {
- [RNFBMLVisionCommon UIImageForFilePath:filePath completion:^(NSArray *errorCodeMessageArray, UIImage *image) {
- if (errorCodeMessageArray != nil) {
- [RNFBSharedUtils rejectPromiseWithUserInfo:reject userInfo:(NSMutableDictionary *) @{
- @"code": errorCodeMessageArray[0],
- @"message": errorCodeMessageArray[1],
- }];
- return;
- }
-
- FIRVisionImage *visionImage = [[FIRVisionImage alloc] initWithImage:image];
- FIRVision *vision = [FIRVision visionForApp:firebaseApp];
-
- FIRVisionTextRecognizer *textRecognizer = [vision onDeviceTextRecognizer];
-
- [textRecognizer processImage:visionImage completion:^(FIRVisionText *text, NSError *error) {
- if (error != nil) {
- [RNFBSharedUtils rejectPromiseWithUserInfo:reject userInfo:(NSMutableDictionary *) @{
- @"code": @"unknown",
- @"message": [error localizedDescription],
- }];
- return;
- }
-
- resolve([self getFirebaseVisionTextMap:text]);
- }];
- }];
-}
+#pragma mark Firebase ML Methods
RCT_EXPORT_METHOD(cloudTextRecognizerProcessImage:
(FIRApp *) firebaseApp
@@ -70,7 +36,7 @@ @implementation RNFBMLVisionTextRecognizerModule
: (RCTPromiseResolveBlock)resolve
: (RCTPromiseRejectBlock)reject
) {
- [RNFBMLVisionCommon UIImageForFilePath:filePath completion:^(NSArray *errorCodeMessageArray, UIImage *image) {
+ [RNFBMLCommon UIImageForFilePath:filePath completion:^(NSArray *errorCodeMessageArray, UIImage *image) {
if (errorCodeMessageArray != nil) {
[RNFBSharedUtils rejectPromiseWithUserInfo:reject userInfo:(NSMutableDictionary *) @{
@"code": errorCodeMessageArray[0],
@@ -110,21 +76,21 @@ @implementation RNFBMLVisionTextRecognizerModule
return;
}
- resolve([self getFirebaseVisionTextMap:text]);
+ resolve([self getFirebaseMLTextMap:text]);
}];
}];
}
-- (NSDictionary *)getFirebaseVisionTextMap:(FIRVisionText *)text {
- NSMutableDictionary *firebaseVisionTextMap = [[NSMutableDictionary alloc] init];
+- (NSDictionary *)getFirebaseMLTextMap:(FIRVisionText *)text {
+ NSMutableDictionary *firebaseMLTextMap = [[NSMutableDictionary alloc] init];
- firebaseVisionTextMap[@"text"] = text.text;
- firebaseVisionTextMap[@"blocks"] = [self getVisionTextBlocksList:text.blocks];
+ firebaseMLTextMap[@"text"] = text.text;
+ firebaseMLTextMap[@"blocks"] = [self getMLTextBlocksList:text.blocks];
- return firebaseVisionTextMap;
+ return firebaseMLTextMap;
}
-- (NSArray *)getVisionTextBlocksList:(NSArray *)blocks {
+- (NSArray *)getMLTextBlocksList:(NSArray *)blocks {
NSMutableArray *blockListFormatted = [[NSMutableArray alloc] init];
for (FIRVisionTextBlock *block in blocks) {
@@ -132,9 +98,9 @@ - (NSArray *)getVisionTextBlocksList:(NSArray *)blocks {
textBlockFormatted[@"text"] = block.text;
textBlockFormatted[@"confidence"] = block.confidence;
- textBlockFormatted[@"boundingBox"] = [RNFBMLVisionCommon rectToIntArray:block.frame];
+ textBlockFormatted[@"boundingBox"] = [RNFBMLCommon rectToIntArray:block.frame];
textBlockFormatted[@"recognizedLanguages"] = [self getLanguageCodesList:block.recognizedLanguages];
- textBlockFormatted[@"cornerPoints"] = [RNFBMLVisionCommon visionPointsToArray:block.cornerPoints];
+ textBlockFormatted[@"cornerPoints"] = [RNFBMLCommon pointsToArray:block.cornerPoints];
textBlockFormatted[@"lines"] = [self getLinesList:block.lines];
[blockListFormatted addObject:textBlockFormatted];
@@ -149,11 +115,11 @@ - (NSArray *)getLinesList:(NSArray *)lines {
for (FIRVisionTextLine *line in lines) {
NSMutableDictionary *lineFormatted = [[NSMutableDictionary alloc] init];
- lineFormatted[@"boundingBox"] = [RNFBMLVisionCommon rectToIntArray:line.frame];
+ lineFormatted[@"boundingBox"] = [RNFBMLCommon rectToIntArray:line.frame];
lineFormatted[@"text"] = line.text;
lineFormatted[@"confidence"] = line.confidence;
lineFormatted[@"recognizedLanguages"] = [self getLanguageCodesList:line.recognizedLanguages];
- lineFormatted[@"cornerPoints"] = [RNFBMLVisionCommon visionPointsToArray:line.cornerPoints];
+ lineFormatted[@"cornerPoints"] = [RNFBMLCommon pointsToArray:line.cornerPoints];
lineFormatted[@"elements"] = [self getElementsList:line.elements];
[lineListFormatted addObject:lineFormatted];
@@ -168,11 +134,11 @@ - (NSArray *)getElementsList:(NSArray *)elements {
for (FIRVisionTextElement *element in elements) {
NSMutableDictionary *elementFormatted = [[NSMutableDictionary alloc] init];
- elementFormatted[@"boundingBox"] = [RNFBMLVisionCommon rectToIntArray:element.frame];
+ elementFormatted[@"boundingBox"] = [RNFBMLCommon rectToIntArray:element.frame];
elementFormatted[@"text"] = element.text;
elementFormatted[@"confidence"] = element.confidence;
elementFormatted[@"recognizedLanguages"] = [self getLanguageCodesList:element.recognizedLanguages];
- elementFormatted[@"cornerPoints"] = [RNFBMLVisionCommon visionPointsToArray:element.cornerPoints];
+ elementFormatted[@"cornerPoints"] = [RNFBMLCommon pointsToArray:element.cornerPoints];
[elementsListFormatted addObject:elementFormatted];
}
diff --git a/packages/ml-vision/lib/visionCloudDocumentTextRecognizerOptions.js b/packages/ml/lib/MLCloudDocumentTextRecognizerOptions.js
similarity index 95%
rename from packages/ml-vision/lib/visionCloudDocumentTextRecognizerOptions.js
rename to packages/ml/lib/MLCloudDocumentTextRecognizerOptions.js
index 73a1a20ed4a..7263a1e38ac 100644
--- a/packages/ml-vision/lib/visionCloudDocumentTextRecognizerOptions.js
+++ b/packages/ml/lib/MLCloudDocumentTextRecognizerOptions.js
@@ -24,9 +24,7 @@ import {
isUndefined,
} from '@react-native-firebase/app/lib/common';
-export default function visionCloudDocumentTextRecognizerOptions(
- cloudDocumentTextRecognizerOptions,
-) {
+export default function MLCloudDocumentTextRecognizerOptions(cloudDocumentTextRecognizerOptions) {
const out = {
enforceCertFingerprintMatch: false,
};
diff --git a/packages/ml-vision/lib/visionCloudImageLabelerOptions.js b/packages/ml/lib/MLCloudImageLabelerOptions.js
similarity index 96%
rename from packages/ml-vision/lib/visionCloudImageLabelerOptions.js
rename to packages/ml/lib/MLCloudImageLabelerOptions.js
index 6900225242f..63399684543 100644
--- a/packages/ml-vision/lib/visionCloudImageLabelerOptions.js
+++ b/packages/ml/lib/MLCloudImageLabelerOptions.js
@@ -24,7 +24,7 @@ import {
isUndefined,
} from '@react-native-firebase/app/lib/common';
-export default function visionCloudImageLabelerOptions(cloudImageLabelerOptions) {
+export default function MLCloudImageLabelerOptions(cloudImageLabelerOptions) {
const out = {
enforceCertFingerprintMatch: false,
confidenceThreshold: 0.5,
diff --git a/packages/ml-vision/lib/VisionCloudLandmarkRecognizerModelType.js b/packages/ml/lib/MLCloudLandmarkRecognizerModelType.js
similarity index 100%
rename from packages/ml-vision/lib/VisionCloudLandmarkRecognizerModelType.js
rename to packages/ml/lib/MLCloudLandmarkRecognizerModelType.js
diff --git a/packages/ml-vision/lib/visionCloudLandmarkRecognizerOptions.js b/packages/ml/lib/MLCloudLandmarkRecognizerOptions.js
similarity index 81%
rename from packages/ml-vision/lib/visionCloudLandmarkRecognizerOptions.js
rename to packages/ml/lib/MLCloudLandmarkRecognizerOptions.js
index 0f44db69dcf..4b1997309c2 100644
--- a/packages/ml-vision/lib/visionCloudLandmarkRecognizerOptions.js
+++ b/packages/ml/lib/MLCloudLandmarkRecognizerOptions.js
@@ -23,13 +23,13 @@ import {
isString,
isUndefined,
} from '@react-native-firebase/app/lib/common';
-import VisionCloudLandmarkRecognizerModelType from './VisionCloudLandmarkRecognizerModelType';
+import MLCloudLandmarkRecognizerModelType from './MLCloudLandmarkRecognizerModelType';
-export default function visionCloudLandmarkRecognizerOptions(cloudLandmarkRecognizerOptions) {
+export default function MLCloudLandmarkRecognizerOptions(cloudLandmarkRecognizerOptions) {
const out = {
enforceCertFingerprintMatch: false,
maxResults: 10,
- model: VisionCloudLandmarkRecognizerModelType.STABLE_MODEL,
+ model: MLCloudLandmarkRecognizerModelType.STABLE_MODEL,
};
if (isUndefined(cloudLandmarkRecognizerOptions)) {
@@ -69,12 +69,11 @@ export default function visionCloudLandmarkRecognizerOptions(cloudLandmarkRecogn
if (cloudLandmarkRecognizerOptions.modelType) {
if (
cloudLandmarkRecognizerOptions.modelType !==
- VisionCloudLandmarkRecognizerModelType.STABLE_MODEL &&
- cloudLandmarkRecognizerOptions.modelType !==
- VisionCloudLandmarkRecognizerModelType.LATEST_MODEL
+ MLCloudLandmarkRecognizerModelType.STABLE_MODEL &&
+ cloudLandmarkRecognizerOptions.modelType !== MLCloudLandmarkRecognizerModelType.LATEST_MODEL
) {
throw new Error(
- "'cloudLandmarkRecognizerOptions.modelType' invalid model. Expected VisionCloudLandmarkRecognizerModelType.STABLE_MODEL or VisionCloudLandmarkRecognizerModelType.LATEST_MODEL.",
+ "'cloudLandmarkRecognizerOptions.modelType' invalid model. Expected MLCloudLandmarkRecognizerModelType.STABLE_MODEL or MLCloudLandmarkRecognizerModelType.LATEST_MODEL.",
);
}
diff --git a/packages/ml-vision/lib/VisionCloudTextRecognizerModelType.js b/packages/ml/lib/MLCloudTextRecognizerModelType.js
similarity index 100%
rename from packages/ml-vision/lib/VisionCloudTextRecognizerModelType.js
rename to packages/ml/lib/MLCloudTextRecognizerModelType.js
diff --git a/packages/ml-vision/lib/visionCloudTextRecognizerOptions.js b/packages/ml/lib/MLCloudTextRecognizerOptions.js
similarity index 85%
rename from packages/ml-vision/lib/visionCloudTextRecognizerOptions.js
rename to packages/ml/lib/MLCloudTextRecognizerOptions.js
index 2d013f0b035..8dcc45eee23 100644
--- a/packages/ml-vision/lib/visionCloudTextRecognizerOptions.js
+++ b/packages/ml/lib/MLCloudTextRecognizerOptions.js
@@ -23,12 +23,12 @@ import {
isString,
isUndefined,
} from '@react-native-firebase/app/lib/common';
-import VisionCloudTextRecognizerModelType from './VisionCloudTextRecognizerModelType';
+import MLCloudTextRecognizerModelType from './MLCloudTextRecognizerModelType';
-export default function visionCloudTextRecognizerOptions(cloudTextRecognizerOptions) {
+export default function MLCloudTextRecognizerOptions(cloudTextRecognizerOptions) {
const out = {
enforceCertFingerprintMatch: false,
- modelType: VisionCloudTextRecognizerModelType.SPARSE_MODEL,
+ modelType: MLCloudTextRecognizerModelType.SPARSE_MODEL,
};
if (isUndefined(cloudTextRecognizerOptions)) {
@@ -59,8 +59,8 @@ export default function visionCloudTextRecognizerOptions(cloudTextRecognizerOpti
if (cloudTextRecognizerOptions.modelType) {
if (
- cloudTextRecognizerOptions.modelType !== VisionCloudTextRecognizerModelType.DENSE_MODEL &&
- cloudTextRecognizerOptions.modelType !== VisionCloudTextRecognizerModelType.SPARSE_MODEL
+ cloudTextRecognizerOptions.modelType !== MLCloudTextRecognizerModelType.DENSE_MODEL &&
+ cloudTextRecognizerOptions.modelType !== MLCloudTextRecognizerModelType.SPARSE_MODEL
) {
throw new Error(
"'cloudTextRecognizerOptions.modelType' invalid model. Expected VisionCloudTextRecognizerModelType.DENSE_MODEL or VisionCloudTextRecognizerModelType.SPARSE_MODEL.",
diff --git a/packages/ml-vision/lib/VisionDocumentTextRecognizedBreakType.js b/packages/ml/lib/MLDocumentTextRecognizedBreakType.js
similarity index 100%
rename from packages/ml-vision/lib/VisionDocumentTextRecognizedBreakType.js
rename to packages/ml/lib/MLDocumentTextRecognizedBreakType.js
diff --git a/packages/ml/lib/index.d.ts b/packages/ml/lib/index.d.ts
new file mode 100644
index 00000000000..9012b33ca67
--- /dev/null
+++ b/packages/ml/lib/index.d.ts
@@ -0,0 +1,701 @@
+/*
+ * Copyright (c) 2016-present Invertase Limited & Contributors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this library except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+import { ReactNativeFirebase } from '@react-native-firebase/app';
+/**
+ * Firebase ML package for React Native.
+ *
+ * #### Example 1
+ *
+ * Access the firebase export from the `ml` package:
+ *
+ * ```js
+ * import { firebase } from '@react-native-firebase/ml';
+ *
+ * // firebase.ml().X
+ * ```
+ *
+ * #### Example 2
+ *
+ * Using the default export from the `ml` package:
+ *
+ * ```js
+ * import ml from '@react-native-firebase/ml';
+ *
+ * // ml().X
+ * ```
+ *
+ * #### Example 3
+ *
+ * Using the default export from the `app` package:
+ *
+ * ```js
+ * import firebase from '@react-native-firebase/app';
+ * import '@react-native-firebase/ml';
+ *
+ * // firebase.ml().X
+ * ```
+ *
+ * @firebase ml
+ */
+export namespace FirebaseMLTypes {
+ import FirebaseModule = ReactNativeFirebase.FirebaseModule;
+
+ export interface Statics {
+ MLCloudTextRecognizerModelType: typeof MLCloudTextRecognizerModelType;
+ MLCloudLandmarkRecognizerModelType: typeof MLCloudLandmarkRecognizerModelType;
+ MLDocumentTextRecognizedBreakType: typeof MLDocumentTextRecognizedBreakType;
+ }
+
+ /**
+ * Options for cloud image labeler. Confidence threshold could be provided for the label detection.
+ *
+ * For example, if the confidence threshold is set to 0.7, only labels with confidence >= 0.7 would be returned. The default threshold is 0.5.
+ *
+ * Note: at most 20 labels will be returned for cloud image labeler.
+ */
+ export interface MLCloudImageLabelerOptions {
+ /**
+ * Only allow registered application instances with matching certificate fingerprint to use ML API.
+ *
+ * > Do not set this for debug build if you use simulators to test.
+ *
+ * #### Example
+ *
+ * ```js
+ * await firebase.ml().cloudImageLabelerProcessImage(filePath, {
+ * enforceCertFingerprintMatch: true,
+ * });
+ * ```
+ */
+ enforceCertFingerprintMatch?: boolean;
+
+ /**
+ * Sets confidence threshold in the range of [0.0 - 1.0] of detected labels. Only labels detected with confidence higher than this threshold are returned.
+ *
+ * Defaults to 0.5.
+ *
+ * #### Example
+ *
+ * ```js
+ * await firebase.ml().cloudImageLabelerProcessImage(filePath, {
+ * confidenceThreshold: 0.8,
+ * });
+ * ```
+ */
+ confidenceThreshold?: number;
+
+ /**
+ * API key to use for ML API. If not set, the default API key from `firebase.app()` will be used.
+ *
+ * #### Example
+ *
+ * ```js
+ * await firebase.ml().cloudImageLabelerProcessImage(filePath, {
+ * apiKeyOverride: 'xyz123',
+ * });
+ * ```
+ *
+ * @ios
+ */
+ apiKeyOverride?: string;
+ }
+
+ /**
+ * Detector for finding popular natural and man-made structures within an image.
+ */
+ export interface MLCloudLandmarkRecognizerOptions {
+ /**
+ * Only allow registered application instances with matching certificate fingerprint to use ML API.
+ *
+ * > Do not set this for debug build if you use simulators to test.
+ */
+ enforceCertFingerprintMatch?: boolean;
+
+ /**
+ * Sets the maximum number of results of this type.
+ *
+ * Defaults to 10.
+ */
+ maxResults?: number;
+
+ /**
+ * Sets model type for the detection.
+ *
+ * Defaults to `MLCloudLandmarkRecognizerModelType.STABLE_MODEL`.
+ */
+ modelType?:
+ | MLCloudLandmarkRecognizerModelType.STABLE_MODEL
+ | MLCloudLandmarkRecognizerModelType.LATEST_MODEL;
+
+ /**
+ * API key to use for ML API. If not set, the default API key from `firebase.app()` will be used.
+ *
+ * @ios
+ */
+ apiKeyOverride?: string;
+ }
+
+ /**
+ * Model types for cloud landmark recognition.
+ */
+ export enum MLCloudLandmarkRecognizerModelType {
+ /**
+ * Stable model would be used.
+ */
+ STABLE_MODEL = 1,
+
+ /**
+ * Latest model would be used.
+ */
+ LATEST_MODEL = 2,
+ }
+
+ /**
+ * Options for cloud text recognizer.
+ */
+ export interface MLCloudTextRecognizerOptions {
+ /**
+ * Only allow registered application instances with matching certificate fingerprint to use ML API.
+ *
+ * > Do not set this for debug build if you use simulators to test.
+ *
+ * #### Example
+ *
+ * ```js
+ * await firebase.ml().cloudTextRecognizerProcessImage(filePath, {
+ * enforceCertFingerprintMatch: true,
+ * });
+ * ```
+ */
+ enforceCertFingerprintMatch?: boolean;
+
+ /**
+ * Sets model type for cloud text recognition. The two models SPARSE_MODEL and DENSE_MODEL handle different text densities in an image.
+ *
+ * See `MLCloudTextRecognizerModelType` for types.
+ *
+ * Defaults to `MLCloudTextRecognizerModelType.SPARSE_MODEL`.
+ *
+ * #### Example
+ *
+ * ```js
+ * import {
+ * firebase,
+ * MLCloudTextRecognizerModelType,
+ * } from '@react-native-firebase/ml';
+ *
+ * await firebase.ml().cloudTextRecognizerProcessImage(filePath, {
+ * modelType: MLCloudTextRecognizerModelType.DENSE_MODEL,
+ * });
+ * ```
+ */
+ modelType?:
+ | MLCloudTextRecognizerModelType.SPARSE_MODEL
+ | MLCloudTextRecognizerModelType.DENSE_MODEL;
+
+ /**
+ * Sets language hints. In most cases, not setting this yields the best results since it enables automatic language
+ * detection. For languages based on the Latin alphabet, setting language hints is not needed. In rare cases, when
+ * the language of the text in the image is known, setting a hint will help get better results (although it will be a
+ * significant hindrance if the hint is wrong).
+ *
+ * Each language code must be a BCP-47 identifier. See [Google Cloud OCR Language Support](https://cloud.google.com/vision/docs/languages) for more information.
+ *
+ * #### Example
+ *
+ * ```js
+ * await firebase.ml().cloudTextRecognizerProcessImage(filePath, {
+ * languageHints: ['fr', 'de'],
+ * });
+ * ```
+ */
+ languageHints?: string[];
+
+ /**
+ * API key to use for Cloud ML API. If not set, the default API key from `firebase.app()` will be used.
+ *
+ * #### Example
+ *
+ * ```js
+ * await firebase.ml().cloudTextRecognizerProcessImage(filePath, {
+ * apiKeyOverride: 'xyz123',
+ * });
+ * ```
+ *
+ * @ios
+ */
+ apiKeyOverride?: string;
+ }
+
+ /**
+ * Options for the cloud document text recognizer.
+ */
+ export interface MLCloudDocumentTextRecognizerOptions {
+ /**
+ * Only allow registered application instances with matching certificate fingerprint to use ML API.
+ *
+ * > Do not set this for debug build if you use simulators to test.
+ *
+ * #### Example
+ *
+ * ```js
+ * await firebase.ml().cloudTextRecognizerProcessImage(filePath, {
+ * enforceCertFingerprintMatch: true,
+ * });
+ * ```
+ */
+ enforceCertFingerprintMatch?: boolean;
+
+ /**
+ * Sets language hints. In most cases, not setting this yields the best results since it enables automatic language
+ * detection. For languages based on the Latin alphabet, setting language hints is not needed. In rare cases, when
+ * the language of the text in the image is known, setting a hint will help get better results (although it will be a
+ * significant hindrance if the hint is wrong).
+ *
+ * Each language code must be a BCP-47 identifier. See [Google Cloud OCR Language Support](https://cloud.google.com/vision/docs/languages) for more information.
+ *
+ * #### Example
+ *
+ * ```js
+ * await firebase.ml().cloudTextRecognizerProcessImage(filePath, {
+ * languageHints: ['fr', 'de'],
+ * });
+ * ```
+ */
+ languageHints?: string[];
+
+ /**
+ * API key to use for ML API. If not set, the default API key from `firebase.app()` will be used.
+ *
+ * #### Example
+ *
+ * ```js
+ * await firebase.ml().cloudTextRecognizerProcessImage(filePath, {
+ * apiKeyOverride: 'xyz123',
+ * });
+ * ```
+ *
+ * @ios
+ */
+ apiKeyOverride?: string;
+ }
+
+ /**
+ * The cloud model type used for in MLCloudTextRecognizerOptions & MLCloudDocumentTextRecognizerOptions
+ *
+ * Defaults to `SPARSE_MODEL`
+ */
+ export enum MLCloudTextRecognizerModelType {
+ /**
+ * Dense model type. It is more suitable for well-formatted dense text.
+ */
+ SPARSE_MODEL = 1,
+ /**
+ * Sparse model type. It is more suitable for sparse text.
+ */
+ DENSE_MODEL = 2,
+ }
+
+ /**
+ * A Rectangle holds four number coordinates relative to the processed image.
+ * Rectangle are represented as [left, top, right, bottom].
+ *
+ * Used by ML Text Recognizer & Landmark Recognition APIs.
+ */
+ export type MLRectangle = [number, number, number, number];
+
+ /**
+ * A point holds two number coordinates relative to the processed image.
+ * Points are represented as [x, y].
+ *
+ * Used by ML Text Recognizer & Landmark Recognition APIs.
+ */
+ export type MLPoint = [number, number];
+
+ /**
+ * A hierarchical representation of texts recognized in an image.
+ */
+ export interface MLText {
+ /**
+ * Retrieve the recognized text as a string.
+ */
+ text: string;
+
+ /**
+ * Gets an array `MLTextBlock`, which is a block of text that can be further decomposed to an array of `MLTextLine`.
+ */
+ blocks: MLTextBlock[];
+ }
+
+ /**
+ * Represents a block of text.
+ */
+ export interface MLDocumentTextBlock extends MLDocumentTextBase {
+ /**
+ * Gets an Array of `MLDocumentTextParagraph`s that make up this block.
+ */
+ paragraphs: MLDocumentTextParagraph[];
+ }
+
+ /**
+ * A structural unit of text representing a number of words in certain order.
+ */
+ export interface MLDocumentTextParagraph extends MLDocumentTextBase {
+ /**
+ * Gets an Array of `MLDocumentTextWord`s that make up this paragraph.
+ *
+ * Returns an empty list if no Word is found.
+ */
+ words: MLDocumentTextWord[];
+ }
+
+ /**
+ * A single word representation.
+ */
+ export interface MLDocumentTextWord extends MLDocumentTextBase {
+ /**
+ * Gets an Array of `MLDocumentTextSymbol`s that make up this word.
+ * The order of the symbols follows the natural reading order.
+ */
+ symbols: MLDocumentTextSymbol[];
+ }
+
+ /**
+ * A single symbol representation.
+ */
+ export type MLDocumentTextSymbol = MLDocumentTextBase;
+
+ /**
+ * Enum representing the detected break type.
+ */
+ export enum MLDocumentTextRecognizedBreakType {
+ /**
+ * Line-wrapping break.
+ */
+ EOL_SURE_SPACE = 3,
+
+ /**
+ * End-line hyphen that is not present in text; does not co-occur with `SPACE`, `LEADER_SPACE`, or `LINE_BREAK`.
+ */
+ HYPHEN = 4,
+
+ /**
+ * Line break that ends a paragraph.
+ */
+ LINE_BREAK = 5,
+
+ /**
+ * Regular space.
+ */
+ SPACE = 1,
+
+ /**
+ * Sure space (very wide).
+ */
+ SURE_SPACE = 2,
+
+ /**
+ * Unknown break label type.
+ */
+ UNKNOWN = 0,
+ }
+
+ /**
+ * A recognized break is the detected start or end of a structural component.
+ */
+ export interface MLDocumentTextRecognizedBreak {
+ /**
+ * Gets detected break type.
+ */
+ breakType: MLDocumentTextRecognizedBreakType;
+
+ /**
+ * Returns true if break prepends an element.
+ */
+ isPrefix: boolean;
+ }
+ /**
+ * A shared type that all MLDocumentText components inherit from
+ */
+ export interface MLDocumentTextBase {
+ /**
+ * Gets the recognized text as a string. Returned in reading order for the language. For Latin, this is top to bottom within a `MLTextBlock`, and left-to-right within a `MLTextLine`.
+ */
+ text: string;
+
+ /**
+ * The confidence of the recognized text. It only return valid result from cloud recognizers. For on-device text recognition, the confidence is always null.
+ */
+ confidence: null | number;
+
+ /**
+ * Gets a list of recognized languages. (Cloud API only. On-Device returns empty array)
+ *
+ * A language is the BCP-47 language code, such as "en-US" or "sr-Latn".
+ */
+ recognizedLanguages: string[];
+
+ /**
+ * Returns the bounding rectangle of the detected text.
+ */
+ boundingBox: MLRectangle;
+
+ /**
+ * Gets the recognized break - the detected start or end of a structural component.
+ */
+ recognizedBreak: MLDocumentTextRecognizedBreak;
+ }
+
+ /**
+ * A hierarchical representation of document text recognized in an image.
+ */
+ export interface MLDocumentText {
+ /**
+ * Retrieve the recognized text as a string.
+ */
+ text: string;
+
+ /**
+ * Gets an array `MLTextBlock`, which is a block of text that can be further decomposed to an array of `MLDocumentTextParagraph`.
+ */
+ blocks: MLDocumentTextBlock[];
+ }
+
+ /**
+ * A shared type that all ML Text components inherit from
+ */
+ export interface MLTextBase {
+ /**
+ * Gets the recognized text as a string. Returned in reading order for the language. For Latin, this is top to bottom within a `MLTextBlock`, and left-to-right within a `MLTextLine`.
+ */
+ text: string;
+
+ /**
+ * The confidence of the recognized text. It only return valid result from cloud recognizers. For on-device text recognition, the confidence is always null.
+ */
+ confidence: null | number;
+
+ /**
+ * Gets a list of recognized languages. (Cloud API only. On-Device returns empty array)
+ *
+ * A language is the BCP-47 language code, such as "en-US" or "sr-Latn".
+ */
+ recognizedLanguages: string[];
+
+ /**
+ * Returns the bounding rectangle of the detected text.
+ */
+ boundingBox: MLRectangle;
+
+ /**
+ * Gets the four corner points in clockwise direction starting with top-left. Due to the possible perspective distortions, this is not necessarily a rectangle. Parts of the region could be outside of the image.
+ */
+ cornerPoints: MLPoint[];
+ }
+
+ /**
+ * Represents a block of text (similar to a paragraph).
+ */
+ export interface MLTextBlock extends MLTextBase {
+ /**
+ * Gets an Array of MLTextLine's that make up this text block.
+ */
+ lines: MLTextLine[];
+ }
+
+ /**
+ * Represents a line of text.
+ */
+ export interface MLTextLine extends MLTextBase {
+ /**
+ * Gets an Array of MLTextElement's that make up this text block.
+ *
+ * An element is roughly equivalent to a space-separated "word" in most Latin languages, or a character in others. For instance, if a word is split between two lines by a hyphen, each part is encoded as a separate Element.
+ */
+ elements: MLTextElement[];
+ }
+
+ /**
+ * Roughly equivalent to a space-separated "word" in most Latin languages, or a character in others. For instance, if a word is split between two lines by a hyphen, each part is encoded as a separate Element.
+ */
+ export type MLTextElement = MLTextBase;
+
+ /**
+ * Represents an image label return from `imageLabelerProcessImage()` and `cloudImageLabelerProcessImage()`.
+ */
+ export interface MLImageLabel {
+ /**
+ * Returns a detected label from the given image. The label returned here is in English only.
+ *
+ * Use `entityId` to retrieve a unique id.
+ */
+ text: string;
+
+ /**
+ * Returns an opaque entity ID. IDs are available in [Google Knowledge Graph Search API](https://developers.google.com/knowledge-graph/).
+ */
+ entityId: string;
+
+ /**
+ * Gets overall confidence of the result.
+ *
+ * Range between 0 (low confidence) and 1 (high confidence).
+ */
+ confidence: number;
+ }
+
+ /**
+ * Represents a detected landmark returned from `cloudLandmarkRecognizerProcessImage()`.
+ */
+ export interface MLLandmark {
+ /**
+ * Gets image region of the detected landmark. Returns null if nothing was detected
+ */
+ boundingBox: MLRectangle | null;
+
+ /**
+ * Gets overall confidence of the result. Ranging between 0 & 1.
+ */
+ confidence: number;
+
+ /**
+ * Gets opaque entity ID. Some IDs may be available in [Google Knowledge Graph Search API](https://developers.google.com/knowledge-graph/).
+ */
+ entityId: string;
+
+ /**
+ * Gets the detected landmark.
+ */
+ landmark: string;
+
+ /**
+ * Gets the location information for the detected entity.
+ *
+ * Multiple MLGeoPoint elements can be present because one location may indicate the location of the scene
+ * in the image, and another location may indicate the location of the place where the image was taken.
+ * Location information is usually present for landmarks.
+ */
+ locations: MLGeoPoint[];
+ }
+
+ /**
+ * A representation of a latitude/longitude pair.
+ *
+ * This is expressed as an array of numbers representing degrees latitude and degrees longitude, in the form `[lat, lng]`.
+ */
+ export type MLGeoPoint = [number, number];
+
+ /**
+ * The Firebase ML service interface.
+ *
+ * > This module is available for the default app only.
+ *
+ * #### Example
+ *
+ * Get the ML service for the default app:
+ *
+ * ```js
+ * const defaultAppML = firebase.ml();
+ * ```
+ */
+ export class Module extends FirebaseModule {
+ /**
+ * Detect text from a local image file.
+ *
+ * @param imageFilePath A local path to an image on the device.
+ * @param cloudTextRecognizerOptions An instance of `MLCloudTextRecognizerOptions`.
+ */
+ cloudTextRecognizerProcessImage(
+ imageFilePath: string,
+ cloudTextRecognizerOptions?: MLCloudTextRecognizerOptions,
+ ): Promise;
+
+ /**
+ * Detect text within a document using a local image file.
+ *
+ * @param imageFilePath A local path to an image on the device.
+ * @param cloudDocumentTextRecognizerOptions An instance of `MLCloudDocumentTextRecognizerOptions`.
+ */
+ cloudDocumentTextRecognizerProcessImage(
+ imageFilePath: string,
+ cloudDocumentTextRecognizerOptions?: MLCloudDocumentTextRecognizerOptions,
+ ): Promise;
+
+ /**
+ * Returns an array of landmarks (as `MLLandmark`) of a given local image file path
+ *
+ * @param imageFilePath A local image file path.
+ * @param cloudLandmarkRecognizerOptions An optional instance of `MLCloudLandmarkRecognizerOptions`.
+ */
+ cloudLandmarkRecognizerProcessImage(
+ imageFilePath: string,
+ cloudLandmarkRecognizerOptions?: MLCloudLandmarkRecognizerOptions,
+ ): Promise;
+
+ /**
+ * Returns an array of labels (as `MLImageLabel`) of a given local image file path.
+ *
+ * #### Example
+ *
+ * ```js
+ * const labels = await firebase.ml().cloudImageLabelerProcessImage(filePath, {
+ * confidenceThreshold: 0.8,
+ * });
+ * ```
+ *
+ * @param imageFilePath A local image file path.
+ * @param cloudImageLabelerOptions An optional instance of `MLCloudImageLabelerOptions`.
+ */
+ cloudImageLabelerProcessImage(
+ imageFilePath: string,
+ cloudImageLabelerOptions?: MLCloudImageLabelerOptions,
+ ): Promise;
+ }
+}
+
+declare const defaultExport: ReactNativeFirebase.FirebaseModuleWithStaticsAndApp<
+ FirebaseMLTypes.Module,
+ FirebaseMLTypes.Statics
+>;
+
+export const firebase: ReactNativeFirebase.Module & {
+ analytics: typeof defaultExport;
+ app(name?: string): ReactNativeFirebase.FirebaseApp & { ml(): FirebaseMLTypes.Module };
+};
+
+export const MLCloudTextRecognizerModelType: FirebaseMLTypes.Statics['MLCloudTextRecognizerModelType'];
+export const MLDocumentTextRecognizedBreakType: FirebaseMLTypes.Statics['MLDocumentTextRecognizedBreakType'];
+export const MLCloudLandmarkRecognizerModelType: FirebaseMLTypes.Statics['MLCloudLandmarkRecognizerModelType'];
+
+export default defaultExport;
+
+/**
+ * Attach namespace to `firebase.` and `FirebaseApp.`.
+ */
+declare module '@react-native-firebase/app' {
+ namespace ReactNativeFirebase {
+ import FirebaseModuleWithStaticsAndApp = ReactNativeFirebase.FirebaseModuleWithStaticsAndApp;
+ interface Module {
+ ml: FirebaseModuleWithStaticsAndApp;
+ }
+
+ interface FirebaseApp {
+ ml(): FirebaseMLTypes.Module;
+ }
+ }
+}
diff --git a/packages/ml/lib/index.js b/packages/ml/lib/index.js
new file mode 100644
index 00000000000..79b25a42476
--- /dev/null
+++ b/packages/ml/lib/index.js
@@ -0,0 +1,145 @@
+/*
+ * Copyright (c) 2016-present Invertase Limited & Contributors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this library except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+import { isString, toFilePath } from '@react-native-firebase/app/lib/common';
+import {
+ createModuleNamespace,
+ FirebaseModule,
+ getFirebaseRoot,
+} from '@react-native-firebase/app/lib/internal';
+import version from './version';
+import MLCloudDocumentTextRecognizerOptions from './MLCloudDocumentTextRecognizerOptions';
+import MLCloudImageLabelerOptions from './MLCloudImageLabelerOptions';
+import MLCloudLandmarkRecognizerModelType from './MLCloudLandmarkRecognizerModelType';
+import MLCloudLandmarkRecognizerOptions from './MLCloudLandmarkRecognizerOptions';
+import MLCloudTextRecognizerModelType from './MLCloudTextRecognizerModelType';
+import MLCloudTextRecognizerOptions from './MLCloudTextRecognizerOptions';
+import MLDocumentTextRecognizedBreakType from './MLDocumentTextRecognizedBreakType';
+
+const statics = {
+ MLCloudTextRecognizerModelType,
+ MLCloudLandmarkRecognizerModelType,
+ MLDocumentTextRecognizedBreakType,
+};
+
+const namespace = 'ml';
+const nativeModuleName = [
+ 'RNFBMLImageLabelerModule',
+ 'RNFBMLTextRecognizerModule',
+ 'RNFBMLLandmarkRecognizerModule',
+ 'RNFBMLDocumentTextRecognizerModule',
+];
+
+class FirebaseMLModule extends FirebaseModule {
+ cloudTextRecognizerProcessImage(localImageFilePath, cloudTextRecognizerOptions) {
+ if (!isString(localImageFilePath)) {
+ throw new Error(
+ "firebase.ml().cloudTextRecognizerProcessImage(*) 'localImageFilePath' expected a string local file path.",
+ );
+ }
+
+ let options;
+ try {
+ options = MLCloudTextRecognizerOptions(cloudTextRecognizerOptions);
+ } catch (e) {
+ throw new Error(`firebase.ml().cloudTextRecognizerProcessImage(_, *) ${e.message}`);
+ }
+
+ return this.native.cloudTextRecognizerProcessImage(toFilePath(localImageFilePath), options);
+ }
+
+ cloudDocumentTextRecognizerProcessImage(localImageFilePath, cloudDocumentTextRecognizerOptions) {
+ if (!isString(localImageFilePath)) {
+ throw new Error(
+ "firebase.ml().cloudDocumentTextRecognizerProcessImage(*) 'localImageFilePath' expected a string local file path.",
+ );
+ }
+
+ let options;
+ try {
+ options = MLCloudDocumentTextRecognizerOptions(cloudDocumentTextRecognizerOptions);
+ } catch (e) {
+ throw new Error(`firebase.ml().cloudDocumentTextRecognizerProcessImage(_, *) ${e.message}.`);
+ }
+
+ return this.native.cloudDocumentTextRecognizerProcessImage(
+ toFilePath(localImageFilePath),
+ options,
+ );
+ }
+
+ cloudLandmarkRecognizerProcessImage(localImageFilePath, cloudLandmarkRecognizerOptions) {
+ if (!isString(localImageFilePath)) {
+ throw new Error(
+ "firebase.ml().cloudLandmarkRecognizerProcessImage(*) 'localImageFilePath' expected a string local file path.",
+ );
+ }
+
+ let options;
+ try {
+ options = MLCloudLandmarkRecognizerOptions(cloudLandmarkRecognizerOptions);
+ } catch (e) {
+ throw new Error(`firebase.ml().cloudLandmarkRecognizerProcessImage(_, *) ${e.message}.`);
+ }
+
+ return this.native.cloudLandmarkRecognizerProcessImage(toFilePath(localImageFilePath), options);
+ }
+
+ cloudImageLabelerProcessImage(localImageFilePath, cloudImageLabelerOptions) {
+ if (!isString(localImageFilePath)) {
+ throw new Error(
+ "firebase.ml().cloudImageLabelerProcessImage(*) 'localImageFilePath' expected a string local file path.",
+ );
+ }
+
+ let options;
+ try {
+ options = MLCloudImageLabelerOptions(cloudImageLabelerOptions);
+ } catch (e) {
+ throw new Error(`firebase.ml().cloudImageLabelerProcessImage(_, *) ${e.message}.`);
+ }
+
+ return this.native.cloudImageLabelerProcessImage(toFilePath(localImageFilePath), options);
+ }
+}
+
+// import { SDK_VERSION } from '@react-native-firebase/ml';
+export const SDK_VERSION = version;
+
+// import ML from '@react-native-firebase/ml';
+// ml().X(...);
+export default createModuleNamespace({
+ statics,
+ version,
+ namespace,
+ nativeModuleName,
+ nativeEvents: false,
+ hasMultiAppSupport: true,
+ hasCustomUrlOrRegionSupport: false,
+ ModuleClass: FirebaseMLModule,
+});
+
+// import ml, { firebase } from '@react-native-firebase/ml';
+// ml().X(...);
+// firebase.ml().X(...);
+export const firebase = getFirebaseRoot();
+
+// e.g.
+// // import { MLCloudTextRecognizerModelType } from '@react-native-firebase/ml';
+export { default as MLCloudTextRecognizerModelType } from './MLCloudTextRecognizerModelType';
+export { default as MLDocumentTextRecognizedBreakType } from './MLDocumentTextRecognizedBreakType';
+export { default as MLCloudLandmarkRecognizerModelType } from './MLCloudLandmarkRecognizerModelType';
diff --git a/packages/ml-vision/package.json b/packages/ml/package.json
similarity index 65%
rename from packages/ml-vision/package.json
rename to packages/ml/package.json
index d1d21234d15..4f9111ab461 100644
--- a/packages/ml-vision/package.json
+++ b/packages/ml/package.json
@@ -1,8 +1,8 @@
{
- "name": "@react-native-firebase/ml-vision",
+ "name": "@react-native-firebase/ml",
"version": "7.4.13",
"author": "Invertase (http://invertase.io)",
- "description": "React Native Firebase - Firebase ML Kit brings the power of machine learning vision to your React Native application, supporting both Android & iOS.",
+ "description": "React Native Firebase - Firebase ML brings the power of machine learning vision to your React Native application, supporting both Android & iOS.",
"main": "lib/index.js",
"types": "lib/index.d.ts",
"scripts": {
@@ -12,24 +12,18 @@
},
"repository": {
"type": "git",
- "url": "https://github.com/invertase/react-native-firebase/tree/master/packages/ml-vision"
+ "url": "https://github.com/invertase/react-native-firebase/tree/master/packages/ml"
},
"license": "Apache-2.0",
"keywords": [
"react",
"react-native",
"firebase",
- "mlkit",
+ "ml",
"machine learning",
"text recognition",
"landmark recognition",
- "image labeler",
- "face detector",
- "barcode",
- "label",
- "natural language",
- "nlp",
- "vision"
+ "image labeler"
],
"peerDependencies": {
"@react-native-firebase/app": "8.4.7"
diff --git a/packages/ml/type-test.ts b/packages/ml/type-test.ts
new file mode 100644
index 00000000000..42313332a82
--- /dev/null
+++ b/packages/ml/type-test.ts
@@ -0,0 +1,37 @@
+import firebase from '@react-native-firebase/app';
+import * as ml from '@react-native-firebase/ml';
+
+console.log(ml.default().app);
+
+// checks module exists at root
+console.log(firebase.ml().app.name);
+
+// checks module exists at app level
+console.log(firebase.app().ml().app.name);
+
+// checks statics exist
+console.log(firebase.ml.SDK_VERSION);
+
+// checks statics exist on defaultExport
+console.log(firebase.SDK_VERSION);
+
+// checks root exists
+console.log(firebase.SDK_VERSION);
+
+// checks firebase named export exists on module
+console.log(ml.firebase.SDK_VERSION);
+
+// checks multi-app support exists
+console.log(firebase.ml(firebase.app()).app.name);
+
+// checks default export supports app arg
+console.log(firebase.ml(firebase.app('foo')).app.name);
+
+console.log(firebase.ml.MLCloudTextRecognizerModelType.DENSE_MODEL);
+console.log(ml.MLCloudTextRecognizerModelType.SPARSE_MODEL);
+
+console.log(firebase.ml.MLDocumentTextRecognizedBreakType.EOL_SURE_SPACE);
+console.log(ml.MLDocumentTextRecognizedBreakType.HYPHEN);
+
+console.log(firebase.ml.MLCloudLandmarkRecognizerModelType.LATEST_MODEL);
+console.log(ml.MLCloudLandmarkRecognizerModelType.STABLE_MODEL);
diff --git a/packages/perf/android/build.gradle b/packages/perf/android/build.gradle
index 8189c21af1f..ca421bd4173 100644
--- a/packages/perf/android/build.gradle
+++ b/packages/perf/android/build.gradle
@@ -11,7 +11,7 @@ buildscript {
}
dependencies {
- classpath("com.android.tools.build:gradle:4.0.1")
+ classpath("com.android.tools.build:gradle:4.1.0")
}
}
}
diff --git a/packages/remote-config/android/build.gradle b/packages/remote-config/android/build.gradle
index af93bf88d8e..927814031b6 100644
--- a/packages/remote-config/android/build.gradle
+++ b/packages/remote-config/android/build.gradle
@@ -11,7 +11,7 @@ buildscript {
}
dependencies {
- classpath("com.android.tools.build:gradle:4.0.1")
+ classpath("com.android.tools.build:gradle:4.1.0")
}
}
}
diff --git a/packages/remote-config/android/src/main/java/io/invertase/firebase/config/UniversalFirebaseConfigModule.java b/packages/remote-config/android/src/main/java/io/invertase/firebase/config/UniversalFirebaseConfigModule.java
index d3280f6d390..6338cfbeace 100644
--- a/packages/remote-config/android/src/main/java/io/invertase/firebase/config/UniversalFirebaseConfigModule.java
+++ b/packages/remote-config/android/src/main/java/io/invertase/firebase/config/UniversalFirebaseConfigModule.java
@@ -109,7 +109,7 @@ Task setDefaultsFromResource(String appName, String resourceName) {
}
if (xmlResourceParser != null) {
- FirebaseRemoteConfig.getInstance(firebaseApp).setDefaults(resourceId);
+ Tasks.await(FirebaseRemoteConfig.getInstance(firebaseApp).setDefaultsAsync(resourceId));
return null;
}
diff --git a/packages/remote-config/e2e/config.e2e.js b/packages/remote-config/e2e/config.e2e.js
index ffe906e681f..82907cdaa08 100644
--- a/packages/remote-config/e2e/config.e2e.js
+++ b/packages/remote-config/e2e/config.e2e.js
@@ -427,7 +427,7 @@ describe('remoteConfig()', () => {
const config = firebase.remoteConfig().getAll();
- const remoteProps = ['bool', 'string', 'number'];
+ const remoteProps = ['some_key'];
config.should.have.keys(...remoteProps);
@@ -436,10 +436,6 @@ describe('remoteConfig()', () => {
const configRetrieveAgain = firebase.remoteConfig().getAll();
should(configRetrieveAgain).not.have.properties(remoteProps);
-
- const configRetrieve = firebase.remoteConfig().getValue('some_key').value;
-
- should(configRetrieve).be.equal(undefined);
});
it('returns a "null" value as reset() API is not supported on iOS', async () => {
@@ -450,43 +446,4 @@ describe('remoteConfig()', () => {
}
});
});
-
- describe('call methods, getters & setters that are deprecated, removed or not supported', () => {
- it('call methods, getters & setters that fire a console.warn() & have no return value', () => {
- const config = firebase.remoteConfig();
- const testValue = config.getValue('testValue');
- const testValueSpy = sinon.spy(testValue, 'value', ['get']);
- const testSourceSpy = sinon.spy(testValue, 'source', ['get']);
- const defaultSpy = sinon.spy(config, 'defaultConfig', ['get', 'set']);
- const settingSpy = sinon.spy(config, 'settings', ['set']);
- const isDeveloperModeEnabledSpy = sinon.spy(config, 'isDeveloperModeEnabled', ['get']);
- const minimumFetchIntervalSpy = sinon.spy(config, 'minimumFetchInterval', ['get']);
- const setLogLevelSpy = sinon.spy(config, 'setLogLevel');
- const setConfigSettingsSpy = sinon.spy(config, 'setConfigSettings');
-
- config.defaultConfig;
- config.defaultConfig = {};
- config.settings = {};
- config.fetchTimeMillis;
- config.isDeveloperModeEnabled;
- config.minimumFetchInterval;
- config.setLogLevel();
- config.setConfigSettings({ isDeveloperModeEnabled: true, minimumFetchInterval: 300 });
-
- testValue.value;
- testValue.source;
-
- setConfigSettingsSpy.should.be.calledOnce();
- testValueSpy.get.should.be.calledOnce();
- testSourceSpy.get.should.be.calledOnce();
-
- defaultSpy.get.should.be.calledOnce();
- defaultSpy.set.should.be.calledOnce();
-
- settingSpy.set.should.be.calledOnce();
- isDeveloperModeEnabledSpy.get.should.be.calledOnce();
- minimumFetchIntervalSpy.get.should.be.calledOnce();
- setLogLevelSpy.should.be.calledOnce();
- });
- });
});
diff --git a/packages/remote-config/ios/RNFBConfig/RNFBConfigModule.m b/packages/remote-config/ios/RNFBConfig/RNFBConfigModule.m
index efb14d4fb43..41f9fe6982b 100644
--- a/packages/remote-config/ios/RNFBConfig/RNFBConfigModule.m
+++ b/packages/remote-config/ios/RNFBConfig/RNFBConfigModule.m
@@ -157,20 +157,17 @@ + (BOOL)requiresMainQueueSetup {
: (RCTPromiseResolveBlock) resolve
: (RCTPromiseRejectBlock) reject
) {
- FIRRemoteConfigActivateCompletion completionHandler = ^(NSError *__nullable error) {
- if(error){
- if(error.userInfo && error.userInfo[@"ActivationFailureReason"] != nil && [error.userInfo[@"ActivationFailureReason"] containsString:@"already activated"]){
+ [[FIRRemoteConfig remoteConfigWithApp:firebaseApp] activateWithCompletion:^(BOOL changed, NSError *_Nullable error) {
+ if (error){
+ if (error.userInfo && error.userInfo[@"ActivationFailureReason"] != nil && [error.userInfo[@"ActivationFailureReason"] containsString:@"already activated"]){
resolve([self resultWithConstants:@([RCTConvert BOOL:@(NO)]) firebaseApp:firebaseApp]);
} else {
[RNFBSharedUtils rejectPromiseWithNSError:reject error:error];
}
-
} else {
- resolve([self resultWithConstants:@([RCTConvert BOOL:@(YES)]) firebaseApp:firebaseApp]);
+ resolve([self resultWithConstants:@([RCTConvert BOOL:@(changed)]) firebaseApp:firebaseApp]);
}
- };
-
- [[FIRRemoteConfig remoteConfigWithApp:firebaseApp] activateWithCompletionHandler:completionHandler];
+ }];
}
RCT_EXPORT_METHOD(setConfigSettings:
@@ -242,7 +239,7 @@ - (NSDictionary *)getConstantsForApp:(FIRApp *) firebaseApp {
values[key] = convertFIRRemoteConfigValueToNSDictionary(value);
}
- NSArray *defaultKeys = [remoteConfig allKeysFromSource:FIRRemoteConfigSourceDefault namespace:FIRNamespaceGoogleMobilePlatform];
+ NSArray *defaultKeys = [remoteConfig allKeysFromSource:FIRRemoteConfigSourceDefault];
for (NSString *key in defaultKeys) {
if ([values valueForKey:key] == nil) {
FIRRemoteConfigValue *value = [[FIRRemoteConfig remoteConfigWithApp:firebaseApp] configValueForKey:key];
diff --git a/packages/remote-config/lib/index.js b/packages/remote-config/lib/index.js
index 2f78e0d8cb3..ae9f2152405 100644
--- a/packages/remote-config/lib/index.js
+++ b/packages/remote-config/lib/index.js
@@ -95,31 +95,10 @@ class FirebaseConfigModule extends FirebaseModule {
return values;
}
- get defaultConfig() {
- // eslint-disable-next-line no-console
- console.warn(
- 'firebase.remoteConfig().defaultConfig is not supported. Default values are merged with config values',
- );
- }
-
- set defaultConfig(defaults) {
- // eslint-disable-next-line no-console
- console.warn(
- 'firebase.remoteConfig().defaultConfig is not supported. Please use firebase.remoteConfig().setDefaults({ [key] : value }) to set default values',
- );
- }
-
get settings() {
return this._settings;
}
- set settings(settings) {
- // eslint-disable-next-line no-console
- console.warn(
- "firebase.remoteConfig().settings = { [key]: string }; is not supported. Please use 'firebase.remoteConfig().setConfigSettings({ ...[key]: string, })' instead'",
- );
- }
-
get fetchTimeMillis() {
// android returns -1 if no fetch yet and iOS returns 0
return this._lastFetchTime;
@@ -129,20 +108,6 @@ class FirebaseConfigModule extends FirebaseModule {
return this._lastFetchStatus;
}
- get isDeveloperModeEnabled() {
- // eslint-disable-next-line no-console
- console.warn(
- 'firebase.remoteConfig().isDeveloperModeEnabled has now been removed. Please consider setting `settings.minimumFetchIntervalMillis` in remoteConfig.Settings',
- );
- }
-
- get minimumFetchInterval() {
- // eslint-disable-next-line no-console
- console.warn(
- 'firebase.remoteConfig().minimumFetchInterval has been removed. Use `firebase.remoteConfig().settings.minimumFetchIntervalMillis` instead.',
- );
- }
-
/**
* Deletes all activated, fetched and defaults configs and resets all Firebase Remote Config settings.
* @returns {Promise}
@@ -166,20 +131,6 @@ class FirebaseConfigModule extends FirebaseModule {
throw new Error('firebase.remoteConfig().setConfigSettings(*): settings must set an object.');
}
- if (hasOwnProperty(settings, 'isDeveloperModeEnabled')) {
- // eslint-disable-next-line no-console
- console.warn(
- "firebase.remoteConfig().setConfigSettings(): 'settings.isDeveloperModeEnabled' has now been removed. Please consider setting 'settings.minimumFetchIntervalMillis'",
- );
- }
-
- if (hasOwnProperty(settings, 'minimumFetchInterval')) {
- // eslint-disable-next-line no-console
- console.warn(
- "firebase.remoteConfig().setConfigSettings(): 'settings.minimumFetchInterval' has now been removed. Please consider setting 'settings.minimumFetchIntervalMillis'",
- );
- }
-
if (hasOwnProperty(settings, 'minimumFetchIntervalMillis')) {
if (!isNumber(settings.minimumFetchIntervalMillis)) {
throw new Error(
@@ -265,11 +216,6 @@ class FirebaseConfigModule extends FirebaseModule {
return this._promiseWithConstants(this.native.setDefaultsFromResource(resourceName));
}
- setLogLevel() {
- // eslint-disable-next-line no-console
- console.warn('firebase.remoteConfig().setLogLevel() is not supported natively.');
- }
-
_updateFromConstants(constants) {
this._lastFetchTime = constants.lastFetchTime;
this._lastFetchStatus = constants.lastFetchStatus;
diff --git a/packages/storage/android/build.gradle b/packages/storage/android/build.gradle
index 884aa5fe0ff..ccf15deed03 100644
--- a/packages/storage/android/build.gradle
+++ b/packages/storage/android/build.gradle
@@ -11,7 +11,7 @@ buildscript {
}
dependencies {
- classpath("com.android.tools.build:gradle:4.0.1")
+ classpath("com.android.tools.build:gradle:4.1.0")
}
}
}
diff --git a/packages/template/.gitignore b/packages/template/.gitignore
deleted file mode 100644
index fd4d0e16045..00000000000
--- a/packages/template/.gitignore
+++ /dev/null
@@ -1,62 +0,0 @@
-# Built application files
-project/android/*/build/
-
-# Crashlytics configuations
-project/android/com_crashlytics_export_strings.xml
-
-# Local configuration file (sdk path, etc)
-project/android/local.properties
-
-# Gradle generated files
-project/android/.gradle/
-
-# Signing files
-project/android/.signing/
-
-# User-specific configurations
-project/android/.idea/gradle.xml
-project/android/.idea/libraries/
-project/android/.idea/workspace.xml
-project/android/.idea/tasks.xml
-project/android/.idea/.name
-project/android/.idea/compiler.xml
-project/android/.idea/copyright/profiles_settings.xml
-project/android/.idea/encodings.xml
-project/android/.idea/misc.xml
-project/android/.idea/modules.xml
-project/android/.idea/scopes/scope_settings.xml
-project/android/.idea/vcs.xml
-project/android/*.iml
-
-# Xcode
-*.pbxuser
-*.mode1v3
-*.mode2v3
-*.perspectivev3
-*.xcuserstate
-project/ios/Pods
-project/ios/build
-*project.xcworkspace*
-*xcuserdata*
-
-# OS-specific files
-.DS_Store
-.DS_Store?
-._*
-.Spotlight-V100
-.Trashes
-ehthumbs.db
-Thumbs.db
-
-# Android
-project/android/build
-project/android/.settings
-
-.idea
-yarn.lock
-.github
-.vscode
-.nyc_output
-*.coverage.json
-.circleci
-.eslintignore
diff --git a/packages/template/.npmignore b/packages/template/.npmignore
deleted file mode 100644
index 109d5949f9e..00000000000
--- a/packages/template/.npmignore
+++ /dev/null
@@ -1,65 +0,0 @@
-# Built application files
-project/android/*/build/
-
-# Crashlytics configuations
-project/android/com_crashlytics_export_strings.xml
-
-# Local configuration file (sdk path, etc)
-project/android/local.properties
-
-# Gradle generated files
-project/android/.gradle/
-
-# Signing files
-project/android/.signing/
-
-# User-specific configurations
-project/android/.idea/gradle.xml
-project/android/.idea/libraries/
-project/android/.idea/workspace.xml
-project/android/.idea/tasks.xml
-project/android/.idea/.name
-project/android/.idea/compiler.xml
-project/android/.idea/copyright/profiles_settings.xml
-project/android/.idea/encodings.xml
-project/android/.idea/misc.xml
-project/android/.idea/modules.xml
-project/android/.idea/scopes/scope_settings.xml
-project/android/.idea/vcs.xml
-project/android/*.iml
-
-# Xcode
-*.pbxuser
-*.mode1v3
-*.mode2v3
-*.perspectivev3
-*.xcuserstate
-project/ios/Pods
-project/ios/build
-*project.xcworkspace*
-*xcuserdata*
-
-# OS-specific files
-.DS_Store
-.DS_Store?
-._*
-.Spotlight-V100
-.Trashes
-ehthumbs.db
-Thumbs.db
-
-# Android
-project/android/build
-project/android/.settings
-
-# Node
-project/node_modules
-
-.idea
-yarn.lock
-.github
-.vscode
-.nyc_output
-*.coverage.json
-.circleci
-.eslintignore
diff --git a/packages/template/CHANGELOG.md b/packages/template/CHANGELOG.md
deleted file mode 100644
index 69a18e7e57d..00000000000
--- a/packages/template/CHANGELOG.md
+++ /dev/null
@@ -1,26 +0,0 @@
-# Change Log
-
-All notable changes to this project will be documented in this file.
-See [Conventional Commits](https://conventionalcommits.org) for commit guidelines.
-
-# [6.10.0](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/template@6.9.0...@react-native-firebase/template@6.10.0) (2020-08-26)
-
-### Features
-
-- bump firebase sdk versions, add GoogleApi dep, use Android API29 ([#4122](https://github.com/invertase/react-native-firebase/issues/4122)) ([728f418](https://github.com/invertase/react-native-firebase/commit/728f41863832d21230c6eb1f55385284fef03c09))
-
-# [6.9.0](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/template@6.8.1...@react-native-firebase/template@6.9.0) (2020-08-03)
-
-### Features
-
-- use latest android & ios Firebase SDKs version ([#3956](https://github.com/invertase/react-native-firebase/issues/3956)) ([e7b4bb3](https://github.com/invertase/react-native-firebase/commit/e7b4bb31b05985c044b1f01625a43e364bb653ef))
-
-## [6.8.1](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/template@6.8.0...@react-native-firebase/template@6.8.1) (2020-05-13)
-
-**Note:** Version bump only for package @react-native-firebase/template
-
-# [6.8.0](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/template@6.8.0...@react-native-firebase/template@6.8.0) (2020-05-13)
-
-### Features
-
-- update RFNB version ([8f007fa](https://github.com/invertase/react-native-firebase/commit/8f007fa97aa8025520098a234118a15293eb1c55))
diff --git a/packages/template/LICENSE b/packages/template/LICENSE
deleted file mode 100644
index ef3ed44f066..00000000000
--- a/packages/template/LICENSE
+++ /dev/null
@@ -1,32 +0,0 @@
-Apache-2.0 License
-------------------
-
-Copyright (c) 2016-present Invertase Limited & Contributors
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this library except in compliance with the License.
-
-You may obtain a copy of the Apache-2.0 License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-
-Creative Commons Attribution 3.0 License
-----------------------------------------
-
-Copyright (c) 2016-present Invertase Limited & Contributors
-
-Documentation and other instructional materials provided for this project
-(including on a separate documentation repository or it's documentation website) are
-licensed under the Creative Commons Attribution 3.0 License. Code samples/blocks
-contained therein are licensed under the Apache License, Version 2.0 (the "License"), as above.
-
-You may obtain a copy of the Creative Commons Attribution 3.0 License at
-
- https://creativecommons.org/licenses/by/3.0/
diff --git a/packages/template/README.md b/packages/template/README.md
deleted file mode 100644
index c4c245ab0fa..00000000000
--- a/packages/template/README.md
+++ /dev/null
@@ -1,36 +0,0 @@
-
-
-
-
-
React Native Firebase - Template
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
----
-
-```
-npx @react-native-community/cli@next init --template=@react-native-firebase/template@alpha
-```
-
----
-
-
-
-
- Built and maintained with 💛 by Invertase .
-
-
-
----
diff --git a/packages/template/package.json b/packages/template/package.json
deleted file mode 100644
index 00b028d3617..00000000000
--- a/packages/template/package.json
+++ /dev/null
@@ -1,24 +0,0 @@
-{
- "name": "@react-native-firebase/template",
- "version": "6.10.0",
- "author": "Invertase (http://invertase.io)",
- "description": "React Native Firebase - Template",
- "scripts": {
- "build:clean": "rimraf project/android/build && rimraf project/ios/build"
- },
- "repository": {
- "type": "git",
- "url": "https://github.com/invertase/react-native-firebase/tree/master/packages/template"
- },
- "license": "Apache-2.0",
- "keywords": [
- "react",
- "react-native",
- "cli",
- "firebase",
- "template"
- ],
- "publishConfig": {
- "access": "public"
- }
-}
diff --git a/packages/template/post-init.js b/packages/template/post-init.js
deleted file mode 100644
index d56585315e3..00000000000
--- a/packages/template/post-init.js
+++ /dev/null
@@ -1,4 +0,0 @@
-#!/usr/bin/env node
-/* eslint-disable no-console */
-
-console.log('FooBar');
diff --git a/packages/template/project/App.js b/packages/template/project/App.js
deleted file mode 100644
index d21272386ca..00000000000
--- a/packages/template/project/App.js
+++ /dev/null
@@ -1,67 +0,0 @@
-/**
- * Sample React Native App with Firebase
- * https://github.com/invertase/react-native-firebase
- *
- * @format
- * @flow
- */
-
-import React, { Component } from 'react';
-import { Platform, StyleSheet, Text, View } from 'react-native';
-
-import firebase from '@react-native-firebase/app';
-
-// TODO(you): import any additional firebase services that you require for your app, e.g for auth:
-// 1) install the npm package: `yarn add @react-native-firebase/auth@alpha` - you do not need to
-// run linking commands - this happens automatically at build time now
-// 2) rebuild your app via `yarn run run:android` or `yarn run run:ios`
-// 3) import the package here in your JavaScript code: `import '@react-native-firebase/auth';`
-// 4) The Firebase Auth service is now available to use here: `firebase.auth().currentUser`
-
-const instructions = Platform.select({
- ios: 'Press Cmd+R to reload,\nCmd+D or shake for dev menu',
- android: 'Double tap R on your keyboard to reload,\nShake or press menu button for dev menu',
-});
-
-const firebaseCredentials = Platform.select({
- ios: 'https://invertase.link/firebase-ios',
- android: 'https://invertase.link/firebase-android',
-});
-
-type Props = {};
-
-export default class App extends Component {
- render() {
- return (
-
- Welcome to React Native + Firebase!
- To get started, edit App.js
- {instructions}
- {!firebase.apps.length && (
-
- {`\nYou currently have no Firebase apps registered, this most likely means you've not downloaded your project credentials. Visit the link below to learn more. \n\n ${firebaseCredentials}`}
-
- )}
-
- );
- }
-}
-
-const styles = StyleSheet.create({
- container: {
- flex: 1,
- justifyContent: 'center',
- alignItems: 'center',
- backgroundColor: '#F5FCFF',
- },
- welcome: {
- fontSize: 20,
- textAlign: 'center',
- margin: 10,
- },
- instructions: {
- textAlign: 'center',
- color: '#333333',
- marginBottom: 5,
- },
-});
diff --git a/packages/template/project/__tests__/App-test.js b/packages/template/project/__tests__/App-test.js
deleted file mode 100644
index 178476699b6..00000000000
--- a/packages/template/project/__tests__/App-test.js
+++ /dev/null
@@ -1,14 +0,0 @@
-/**
- * @format
- */
-
-import 'react-native';
-import React from 'react';
-import App from '../App';
-
-// Note: test renderer must be required after react-native.
-import renderer from 'react-test-renderer';
-
-it('renders correctly', () => {
- renderer.create( );
-});
diff --git a/packages/template/project/_buckconfig b/packages/template/project/_buckconfig
deleted file mode 100644
index 934256cb29d..00000000000
--- a/packages/template/project/_buckconfig
+++ /dev/null
@@ -1,6 +0,0 @@
-
-[android]
- target = Google Inc.:Google APIs:23
-
-[maven_repositories]
- central = https://repo1.maven.org/maven2
diff --git a/packages/template/project/_eslintrc.js b/packages/template/project/_eslintrc.js
deleted file mode 100644
index 40c6dcd05f3..00000000000
--- a/packages/template/project/_eslintrc.js
+++ /dev/null
@@ -1,4 +0,0 @@
-module.exports = {
- root: true,
- extends: '@react-native-community',
-};
diff --git a/packages/template/project/_flowconfig b/packages/template/project/_flowconfig
deleted file mode 100644
index 4afc766a296..00000000000
--- a/packages/template/project/_flowconfig
+++ /dev/null
@@ -1,75 +0,0 @@
-[ignore]
-; We fork some components by platform
-.*/*[.]android.js
-
-; Ignore "BUCK" generated dirs
-/\.buckd/
-
-; Ignore polyfills
-node_modules/react-native/Libraries/polyfills/.*
-
-; These should not be required directly
-; require from fbjs/lib instead: require('fbjs/lib/warning')
-node_modules/warning/.*
-
-; Flow doesn't support platforms
-.*/Libraries/Utilities/LoadingView.js
-
-[untyped]
-.*/node_modules/@react-native-community/cli/.*/.*
-
-[include]
-
-[libs]
-node_modules/react-native/Libraries/react-native/react-native-interface.js
-node_modules/react-native/flow/
-
-[options]
-emoji=true
-
-esproposal.optional_chaining=enable
-esproposal.nullish_coalescing=enable
-
-module.file_ext=.js
-module.file_ext=.json
-module.file_ext=.ios.js
-
-munge_underscores=true
-
-module.name_mapper='^react-native$' -> '/node_modules/react-native/Libraries/react-native/react-native-implementation'
-module.name_mapper='^react-native/\(.*\)$' -> '/node_modules/react-native/\1'
-module.name_mapper='^[./a-zA-Z0-9$_-]+\.\(bmp\|gif\|jpg\|jpeg\|png\|psd\|svg\|webp\|m4v\|mov\|mp4\|mpeg\|mpg\|webm\|aac\|aiff\|caf\|m4a\|mp3\|wav\|html\|pdf\)$' -> '/node_modules/react-native/Libraries/Image/RelativeImageStub'
-
-suppress_type=$FlowIssue
-suppress_type=$FlowFixMe
-suppress_type=$FlowFixMeProps
-suppress_type=$FlowFixMeState
-
-suppress_comment=\\(.\\|\n\\)*\\$FlowFixMe\\($\\|[^(]\\|(\\(\\)? *\\(site=[a-z,_]*react_native\\(_ios\\)?_\\(oss\\|fb\\)[a-z,_]*\\)?)\\)
-suppress_comment=\\(.\\|\n\\)*\\$FlowIssue\\((\\(\\)? *\\(site=[a-z,_]*react_native\\(_ios\\)?_\\(oss\\|fb\\)[a-z,_]*\\)?)\\)?:? #[0-9]+
-suppress_comment=\\(.\\|\n\\)*\\$FlowExpectedError
-
-[lints]
-sketchy-null-number=warn
-sketchy-null-mixed=warn
-sketchy-number=warn
-untyped-type-import=warn
-nonstrict-import=warn
-deprecated-type=warn
-unsafe-getters-setters=warn
-inexact-spread=warn
-unnecessary-invariant=warn
-signature-verification-failure=warn
-deprecated-utility=error
-
-[strict]
-deprecated-type
-nonstrict-import
-sketchy-null
-unclear-type
-unsafe-getters-setters
-untyped-import
-untyped-type-import
-
-[version]
-^0.105.0
diff --git a/packages/template/project/_gitattributes b/packages/template/project/_gitattributes
deleted file mode 100644
index d42ff18354d..00000000000
--- a/packages/template/project/_gitattributes
+++ /dev/null
@@ -1 +0,0 @@
-*.pbxproj -text
diff --git a/packages/template/project/_gitignore b/packages/template/project/_gitignore
deleted file mode 100644
index ad572e632bc..00000000000
--- a/packages/template/project/_gitignore
+++ /dev/null
@@ -1,59 +0,0 @@
-# OSX
-#
-.DS_Store
-
-# Xcode
-#
-build/
-*.pbxuser
-!default.pbxuser
-*.mode1v3
-!default.mode1v3
-*.mode2v3
-!default.mode2v3
-*.perspectivev3
-!default.perspectivev3
-xcuserdata
-*.xccheckout
-*.moved-aside
-DerivedData
-*.hmap
-*.ipa
-*.xcuserstate
-
-# Android/IntelliJ
-#
-build/
-.idea
-.gradle
-local.properties
-*.iml
-
-# node.js
-#
-node_modules/
-npm-debug.log
-yarn-error.log
-
-# BUCK
-buck-out/
-\.buckd/
-*.keystore
-!debug.keystore
-
-# fastlane
-#
-# It is recommended to not store the screenshots in the git repo. Instead, use fastlane to re-generate the
-# screenshots whenever they are needed.
-# For more information about the recommended setup visit:
-# https://docs.fastlane.tools/best-practices/source-control/
-
-*/fastlane/report.xml
-*/fastlane/Preview.html
-*/fastlane/screenshots
-
-# Bundle artifact
-*.jsbundle
-
-# CocoaPods
-/ios/Pods/
diff --git a/packages/template/project/_watchmanconfig b/packages/template/project/_watchmanconfig
deleted file mode 100644
index 9e26dfeeb6e..00000000000
--- a/packages/template/project/_watchmanconfig
+++ /dev/null
@@ -1 +0,0 @@
-{}
\ No newline at end of file
diff --git a/packages/template/project/android/app/_BUCK b/packages/template/project/android/app/_BUCK
deleted file mode 100644
index 0e779048347..00000000000
--- a/packages/template/project/android/app/_BUCK
+++ /dev/null
@@ -1,55 +0,0 @@
-# To learn about Buck see [Docs](https://buckbuild.com/).
-# To run your application with Buck:
-# - install Buck
-# - `npm start` - to start the packager
-# - `cd android`
-# - `keytool -genkey -v -keystore keystores/debug.keystore -storepass android -alias androiddebugkey -keypass android -dname "CN=Android Debug,O=Android,C=US"`
-# - `./gradlew :app:copyDownloadableDepsToLibs` - make all Gradle compile dependencies available to Buck
-# - `buck install -r android/app` - compile, install and run application
-#
-
-load(":build_defs.bzl", "create_aar_targets", "create_jar_targets")
-
-lib_deps = []
-
-create_aar_targets(glob(["libs/*.aar"]))
-
-create_jar_targets(glob(["libs/*.jar"]))
-
-android_library(
- name = "all-libs",
- exported_deps = lib_deps,
-)
-
-android_library(
- name = "app-code",
- srcs = glob([
- "src/main/java/**/*.java",
- ]),
- deps = [
- ":all-libs",
- ":build_config",
- ":res",
- ],
-)
-
-android_build_config(
- name = "build_config",
- package = "com.helloworld",
-)
-
-android_resource(
- name = "res",
- package = "com.helloworld",
- res = "src/main/res",
-)
-
-android_binary(
- name = "app",
- keystore = "//android/keystores:debug",
- manifest = "src/main/AndroidManifest.xml",
- package_type = "debug",
- deps = [
- ":app-code",
- ],
-)
diff --git a/packages/template/project/android/app/build.gradle b/packages/template/project/android/app/build.gradle
deleted file mode 100644
index 749e80cefa3..00000000000
--- a/packages/template/project/android/app/build.gradle
+++ /dev/null
@@ -1,205 +0,0 @@
-apply plugin: "com.android.application"
-
-import com.android.build.OutputFile
-
-/**
- * The react.gradle file registers a task for each build variant (e.g. bundleDebugJsAndAssets
- * and bundleReleaseJsAndAssets).
- * These basically call `react-native bundle` with the correct arguments during the Android build
- * cycle. By default, bundleDebugJsAndAssets is skipped, as in debug/dev mode we prefer to load the
- * bundle directly from the development server. Below you can see all the possible configurations
- * and their defaults. If you decide to add a configuration block, make sure to add it before the
- * `apply from: "../../node_modules/react-native/react.gradle"` line.
- *
- * project.ext.react = [
- * // the name of the generated asset file containing your JS bundle
- * bundleAssetName: "index.android.bundle",
- *
- * // the entry file for bundle generation
- * entryFile: "index.android.js",
- *
- * // https://facebook.github.io/react-native/docs/performance#enable-the-ram-format
- * bundleCommand: "ram-bundle",
- *
- * // whether to bundle JS and assets in debug mode
- * bundleInDebug: false,
- *
- * // whether to bundle JS and assets in release mode
- * bundleInRelease: true,
- *
- * // whether to bundle JS and assets in another build variant (if configured).
- * // See http://tools.android.com/tech-docs/new-build-system/user-guide#TOC-Build-Variants
- * // The configuration property can be in the following formats
- * // 'bundleIn${productFlavor}${buildType}'
- * // 'bundleIn${buildType}'
- * // bundleInFreeDebug: true,
- * // bundleInPaidRelease: true,
- * // bundleInBeta: true,
- *
- * // whether to disable dev mode in custom build variants (by default only disabled in release)
- * // for example: to disable dev mode in the staging build type (if configured)
- * devDisabledInStaging: true,
- * // The configuration property can be in the following formats
- * // 'devDisabledIn${productFlavor}${buildType}'
- * // 'devDisabledIn${buildType}'
- *
- * // the root of your project, i.e. where "package.json" lives
- * root: "../../",
- *
- * // where to put the JS bundle asset in debug mode
- * jsBundleDirDebug: "$buildDir/intermediates/assets/debug",
- *
- * // where to put the JS bundle asset in release mode
- * jsBundleDirRelease: "$buildDir/intermediates/assets/release",
- *
- * // where to put drawable resources / React Native assets, e.g. the ones you use via
- * // require('./image.png')), in debug mode
- * resourcesDirDebug: "$buildDir/intermediates/res/merged/debug",
- *
- * // where to put drawable resources / React Native assets, e.g. the ones you use via
- * // require('./image.png')), in release mode
- * resourcesDirRelease: "$buildDir/intermediates/res/merged/release",
- *
- * // by default the gradle tasks are skipped if none of the JS files or assets change; this means
- * // that we don't look at files in android/ or ios/ to determine whether the tasks are up to
- * // date; if you have any other folders that you want to ignore for performance reasons (gradle
- * // indexes the entire tree), add them here. Alternatively, if you have JS files in android/
- * // for example, you might want to remove it from here.
- * inputExcludes: ["android/**", "ios/**"],
- *
- * // override which node gets called and with what additional arguments
- * nodeExecutableAndArgs: ["node"],
- *
- * // supply additional arguments to the packager
- * extraPackagerArgs: []
- * ]
- */
-
-project.ext.react = [
- entryFile: "index.js",
- enableHermes: false, // clean and rebuild if changing
-]
-
-apply from: "../../node_modules/react-native/react.gradle"
-
-/**
- * Set this to true to create two separate APKs instead of one:
- * - An APK that only works on ARM devices
- * - An APK that only works on x86 devices
- * The advantage is the size of the APK is reduced by about 4MB.
- * Upload all the APKs to the Play Store and people will download
- * the correct one based on the CPU architecture of their device.
- */
-def enableSeparateBuildPerCPUArchitecture = false
-
-/**
- * Run Proguard to shrink the Java bytecode in release builds.
- */
-def enableProguardInReleaseBuilds = false
-
-/**
- * The preferred build flavor of JavaScriptCore.
- *
- * For example, to use the international variant, you can use:
- * `def jscFlavor = 'org.webkit:android-jsc-intl:+'`
- *
- * The international variant includes ICU i18n library and necessary data
- * allowing to use e.g. `Date.toLocaleString` and `String.localeCompare` that
- * give correct results when using with locales other than en-US. Note that
- * this variant is about 6MiB larger per architecture than default.
- */
-def jscFlavor = 'org.webkit:android-jsc:+'
-
-/**
- * Whether to enable the Hermes VM.
- *
- * This should be set on project.ext.react and mirrored here. If it is not set
- * on project.ext.react, JavaScript will not be compiled to Hermes Bytecode
- * and the benefits of using Hermes will therefore be sharply reduced.
- */
-def enableHermes = project.ext.react.get("enableHermes", false);
-
-android {
- compileSdkVersion rootProject.ext.compileSdkVersion
-
- compileOptions {
- sourceCompatibility JavaVersion.VERSION_1_8
- targetCompatibility JavaVersion.VERSION_1_8
- }
-
- defaultConfig {
- applicationId "com.helloworld"
- minSdkVersion rootProject.ext.minSdkVersion
- targetSdkVersion rootProject.ext.targetSdkVersion
- versionCode 1
- versionName "1.0"
- aaptOptions{
- noCompress "tflite","model"
- }
- }
- splits {
- abi {
- reset()
- enable enableSeparateBuildPerCPUArchitecture
- universalApk false // If true, also generate a universal APK
- include "armeabi-v7a", "x86", "arm64-v8a", "x86_64"
- }
- }
- signingConfigs {
- debug {
- storeFile file('debug.keystore')
- storePassword 'android'
- keyAlias 'androiddebugkey'
- keyPassword 'android'
- }
- }
- buildTypes {
- debug {
- signingConfig signingConfigs.debug
- }
- release {
- // Caution! In production, you need to generate your own keystore file.
- // see https://facebook.github.io/react-native/docs/signed-apk-android.
- signingConfig signingConfigs.debug
- minifyEnabled enableProguardInReleaseBuilds
- proguardFiles getDefaultProguardFile("proguard-android.txt"), "proguard-rules.pro"
- }
- }
- // applicationVariants are e.g. debug, release
- applicationVariants.all { variant ->
- variant.outputs.each { output ->
- // For each separate APK per architecture, set a unique version code as described here:
- // https://developer.android.com/studio/build/configure-apk-splits.html
- def versionCodes = ["armeabi-v7a": 1, "x86": 2, "arm64-v8a": 3, "x86_64": 4]
- def abi = output.getFilter(OutputFile.ABI)
- if (abi != null) { // null for the universal-debug, universal-release variants
- output.versionCodeOverride =
- versionCodes.get(abi) * 1048576 + defaultConfig.versionCode
- }
-
- }
- }
-}
-
-dependencies {
- implementation fileTree(dir: "libs", include: ["*.jar"])
- implementation "com.facebook.react:react-native:+" // From node_modules
-
- if (enableHermes) {
- def hermesPath = "../../node_modules/hermes-engine/android/";
- debugImplementation files(hermesPath + "hermes-debug.aar")
- releaseImplementation files(hermesPath + "hermes-release.aar")
- } else {
- implementation jscFlavor
- }
-}
-
-// Run this once to be able to run the application with BUCK
-// puts all compile dependencies into folder libs for BUCK to use
-task copyDownloadableDepsToLibs(type: Copy) {
- from configurations.compile
- into 'libs'
-}
-
-apply from: file("../../node_modules/@react-native-community/cli-platform-android/native_modules.gradle"); applyNativeModulesAppBuildGradle(project)
-apply plugin: 'com.google.gms.google-services'
diff --git a/packages/template/project/android/app/build_defs.bzl b/packages/template/project/android/app/build_defs.bzl
deleted file mode 100644
index fff270f8d1d..00000000000
--- a/packages/template/project/android/app/build_defs.bzl
+++ /dev/null
@@ -1,19 +0,0 @@
-"""Helper definitions to glob .aar and .jar targets"""
-
-def create_aar_targets(aarfiles):
- for aarfile in aarfiles:
- name = "aars__" + aarfile[aarfile.rindex("/") + 1:aarfile.rindex(".aar")]
- lib_deps.append(":" + name)
- android_prebuilt_aar(
- name = name,
- aar = aarfile,
- )
-
-def create_jar_targets(jarfiles):
- for jarfile in jarfiles:
- name = "jars__" + jarfile[jarfile.rindex("/") + 1:jarfile.rindex(".jar")]
- lib_deps.append(":" + name)
- prebuilt_jar(
- name = name,
- binary_jar = jarfile,
- )
diff --git a/packages/template/project/android/app/debug.keystore b/packages/template/project/android/app/debug.keystore
deleted file mode 100644
index 364e105ed39..00000000000
Binary files a/packages/template/project/android/app/debug.keystore and /dev/null differ
diff --git a/packages/template/project/android/app/proguard-rules.pro b/packages/template/project/android/app/proguard-rules.pro
deleted file mode 100644
index 11b025724a3..00000000000
--- a/packages/template/project/android/app/proguard-rules.pro
+++ /dev/null
@@ -1,10 +0,0 @@
-# Add project specific ProGuard rules here.
-# By default, the flags in this file are appended to flags specified
-# in /usr/local/Cellar/android-sdk/24.3.3/tools/proguard/proguard-android.txt
-# You can edit the include path and order by changing the proguardFiles
-# directive in build.gradle.
-#
-# For more details, see
-# http://developer.android.com/guide/developing/tools/proguard.html
-
-# Add any project specific keep options here:
diff --git a/packages/template/project/android/app/src/debug/AndroidManifest.xml b/packages/template/project/android/app/src/debug/AndroidManifest.xml
deleted file mode 100644
index fa26aa56e1c..00000000000
--- a/packages/template/project/android/app/src/debug/AndroidManifest.xml
+++ /dev/null
@@ -1,8 +0,0 @@
-
-
-
-
-
-
-
diff --git a/packages/template/project/android/app/src/main/AndroidManifest.xml b/packages/template/project/android/app/src/main/AndroidManifest.xml
deleted file mode 100644
index 7bfb4481abb..00000000000
--- a/packages/template/project/android/app/src/main/AndroidManifest.xml
+++ /dev/null
@@ -1,26 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/packages/template/project/android/app/src/main/java/com/helloworld/MainActivity.java b/packages/template/project/android/app/src/main/java/com/helloworld/MainActivity.java
deleted file mode 100644
index 1e9c1894804..00000000000
--- a/packages/template/project/android/app/src/main/java/com/helloworld/MainActivity.java
+++ /dev/null
@@ -1,15 +0,0 @@
-package com.helloworld;
-
-import com.facebook.react.ReactActivity;
-
-public class MainActivity extends ReactActivity {
-
- /**
- * Returns the name of the main component registered from JavaScript.
- * This is used to schedule rendering of the component.
- */
- @Override
- protected String getMainComponentName() {
- return "HelloWorld";
- }
-}
diff --git a/packages/template/project/android/app/src/main/java/com/helloworld/MainApplication.java b/packages/template/project/android/app/src/main/java/com/helloworld/MainApplication.java
deleted file mode 100644
index ae1ff7cf28e..00000000000
--- a/packages/template/project/android/app/src/main/java/com/helloworld/MainApplication.java
+++ /dev/null
@@ -1,74 +0,0 @@
-package com.helloworld;
-
-import android.app.Application;
-import android.content.Context;
-import com.facebook.react.PackageList;
-import com.facebook.react.ReactApplication;
-import com.facebook.react.ReactNativeHost;
-import com.facebook.react.ReactPackage;
-import com.facebook.soloader.SoLoader;
-import java.lang.reflect.InvocationTargetException;
-import java.util.List;
-
-public class MainApplication extends Application implements ReactApplication {
-
- private final ReactNativeHost mReactNativeHost =
- new ReactNativeHost(this) {
- @Override
- public boolean getUseDeveloperSupport() {
- return BuildConfig.DEBUG;
- }
-
- @Override
- protected List getPackages() {
- @SuppressWarnings("UnnecessaryLocalVariable")
- List packages = new PackageList(this).getPackages();
- // Packages that cannot be autolinked yet can be added manually here, for example:
- // packages.add(new MyReactNativePackage());
- return packages;
- }
-
- @Override
- protected String getJSMainModuleName() {
- return "index";
- }
- };
-
- @Override
- public ReactNativeHost getReactNativeHost() {
- return mReactNativeHost;
- }
-
- @Override
- public void onCreate() {
- super.onCreate();
- SoLoader.init(this, /* native exopackage */ false);
- initializeFlipper(this); // Remove this line if you don't want Flipper enabled
- }
-
- /**
- * Loads Flipper in React Native templates.
- *
- * @param context
- */
- private static void initializeFlipper(Context context) {
- if (BuildConfig.DEBUG) {
- try {
- /*
- We use reflection here to pick up the class that initializes Flipper,
- since Flipper library is not available in release mode
- */
- Class> aClass = Class.forName("com.facebook.flipper.ReactNativeFlipper");
- aClass.getMethod("initializeFlipper", Context.class).invoke(null, context);
- } catch (ClassNotFoundException e) {
- e.printStackTrace();
- } catch (NoSuchMethodException e) {
- e.printStackTrace();
- } catch (IllegalAccessException e) {
- e.printStackTrace();
- } catch (InvocationTargetException e) {
- e.printStackTrace();
- }
- }
- }
-}
diff --git a/packages/template/project/android/app/src/main/res/mipmap-hdpi/ic_launcher.png b/packages/template/project/android/app/src/main/res/mipmap-hdpi/ic_launcher.png
deleted file mode 100644
index a2f5908281d..00000000000
Binary files a/packages/template/project/android/app/src/main/res/mipmap-hdpi/ic_launcher.png and /dev/null differ
diff --git a/packages/template/project/android/app/src/main/res/mipmap-hdpi/ic_launcher_round.png b/packages/template/project/android/app/src/main/res/mipmap-hdpi/ic_launcher_round.png
deleted file mode 100644
index 1b523998081..00000000000
Binary files a/packages/template/project/android/app/src/main/res/mipmap-hdpi/ic_launcher_round.png and /dev/null differ
diff --git a/packages/template/project/android/app/src/main/res/mipmap-mdpi/ic_launcher.png b/packages/template/project/android/app/src/main/res/mipmap-mdpi/ic_launcher.png
deleted file mode 100644
index ff10afd6e18..00000000000
Binary files a/packages/template/project/android/app/src/main/res/mipmap-mdpi/ic_launcher.png and /dev/null differ
diff --git a/packages/template/project/android/app/src/main/res/mipmap-mdpi/ic_launcher_round.png b/packages/template/project/android/app/src/main/res/mipmap-mdpi/ic_launcher_round.png
deleted file mode 100644
index 115a4c768a2..00000000000
Binary files a/packages/template/project/android/app/src/main/res/mipmap-mdpi/ic_launcher_round.png and /dev/null differ
diff --git a/packages/template/project/android/app/src/main/res/mipmap-xhdpi/ic_launcher.png b/packages/template/project/android/app/src/main/res/mipmap-xhdpi/ic_launcher.png
deleted file mode 100644
index dcd3cd80833..00000000000
Binary files a/packages/template/project/android/app/src/main/res/mipmap-xhdpi/ic_launcher.png and /dev/null differ
diff --git a/packages/template/project/android/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png b/packages/template/project/android/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png
deleted file mode 100644
index 459ca609d3a..00000000000
Binary files a/packages/template/project/android/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png and /dev/null differ
diff --git a/packages/template/project/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png b/packages/template/project/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png
deleted file mode 100644
index 8ca12fe024b..00000000000
Binary files a/packages/template/project/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png and /dev/null differ
diff --git a/packages/template/project/android/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png b/packages/template/project/android/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png
deleted file mode 100644
index 8e19b410a1b..00000000000
Binary files a/packages/template/project/android/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png and /dev/null differ
diff --git a/packages/template/project/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png b/packages/template/project/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png
deleted file mode 100644
index b824ebdd48d..00000000000
Binary files a/packages/template/project/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png and /dev/null differ
diff --git a/packages/template/project/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png b/packages/template/project/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png
deleted file mode 100644
index 4c19a13c239..00000000000
Binary files a/packages/template/project/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png and /dev/null differ
diff --git a/packages/template/project/android/app/src/main/res/values/strings.xml b/packages/template/project/android/app/src/main/res/values/strings.xml
deleted file mode 100644
index 0c79c4bad47..00000000000
--- a/packages/template/project/android/app/src/main/res/values/strings.xml
+++ /dev/null
@@ -1,3 +0,0 @@
-
- Hello App Display Name
-
diff --git a/packages/template/project/android/app/src/main/res/values/styles.xml b/packages/template/project/android/app/src/main/res/values/styles.xml
deleted file mode 100644
index 319eb0ca100..00000000000
--- a/packages/template/project/android/app/src/main/res/values/styles.xml
+++ /dev/null
@@ -1,8 +0,0 @@
-
-
-
-
-
-
diff --git a/packages/template/project/android/build.gradle b/packages/template/project/android/build.gradle
deleted file mode 100644
index af3e38da6ac..00000000000
--- a/packages/template/project/android/build.gradle
+++ /dev/null
@@ -1,40 +0,0 @@
-// Top-level build file where you can add configuration options common to all sub-projects/modules.
-
-buildscript {
- ext {
- buildToolsVersion = "29.0.3"
- minSdkVersion = 16
- compileSdkVersion = 29
- targetSdkVersion = 29
- }
- repositories {
- google()
- jcenter()
- mavenCentral()
- }
- dependencies {
- classpath 'com.android.tools.build:gradle:4.0.1'
- classpath 'com.google.gms:google-services:4.3.3'
-
- // NOTE: Do not place your application dependencies here; they belong
- // in the individual module build.gradle files
- }
-}
-
-allprojects {
- repositories {
- mavenLocal()
- maven {
- // All of React Native (JS, Obj-C sources, Android binaries) is installed from npm
- url("$rootDir/../node_modules/react-native/android")
- }
- maven {
- // Android JSC is installed from npm
- url("$rootDir/../node_modules/jsc-android/dist")
- }
-
- google()
- jcenter()
- maven { url 'https://jitpack.io' }
- }
-}
diff --git a/packages/template/project/android/gradle.properties b/packages/template/project/android/gradle.properties
deleted file mode 100644
index c62d03dc2ca..00000000000
--- a/packages/template/project/android/gradle.properties
+++ /dev/null
@@ -1,21 +0,0 @@
-# Project-wide Gradle settings.
-
-# IDE (e.g. Android Studio) users:
-# Gradle settings configured through the IDE *will override*
-# any settings specified in this file.
-
-# For more details on how to configure your build environment visit
-# http://www.gradle.org/docs/current/userguide/build_environment.html
-
-# Specifies the JVM arguments used for the daemon process.
-# The setting is particularly useful for tweaking memory settings.
-# Default value: -Xmx10248m -XX:MaxPermSize=256m
-org.gradle.jvmargs=-Xmx2048m -XX:MaxPermSize=512m -XX:+HeapDumpOnOutOfMemoryError -Dfile.encoding=UTF-8
-
-# When configured, Gradle will run in incubating parallel mode.
-# This option should only be used with decoupled projects. More details, visit
-# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
-# org.gradle.parallel=true
-
-android.useAndroidX=true
-android.enableJetifier=true
diff --git a/packages/template/project/android/gradle/wrapper/gradle-wrapper.jar b/packages/template/project/android/gradle/wrapper/gradle-wrapper.jar
deleted file mode 100644
index 5c2d1cf016b..00000000000
Binary files a/packages/template/project/android/gradle/wrapper/gradle-wrapper.jar and /dev/null differ
diff --git a/packages/template/project/android/gradle/wrapper/gradle-wrapper.properties b/packages/template/project/android/gradle/wrapper/gradle-wrapper.properties
deleted file mode 100644
index bca17f36566..00000000000
--- a/packages/template/project/android/gradle/wrapper/gradle-wrapper.properties
+++ /dev/null
@@ -1,5 +0,0 @@
-distributionBase=GRADLE_USER_HOME
-distributionPath=wrapper/dists
-distributionUrl=https\://services.gradle.org/distributions/gradle-6.6-all.zip
-zipStoreBase=GRADLE_USER_HOME
-zipStorePath=wrapper/dists
diff --git a/packages/template/project/android/gradlew b/packages/template/project/android/gradlew
deleted file mode 100755
index b0d6d0ab5de..00000000000
--- a/packages/template/project/android/gradlew
+++ /dev/null
@@ -1,188 +0,0 @@
-#!/usr/bin/env sh
-
-#
-# Copyright 2015 the original author or authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-##############################################################################
-##
-## Gradle start up script for UN*X
-##
-##############################################################################
-
-# Attempt to set APP_HOME
-# Resolve links: $0 may be a link
-PRG="$0"
-# Need this for relative symlinks.
-while [ -h "$PRG" ] ; do
- ls=`ls -ld "$PRG"`
- link=`expr "$ls" : '.*-> \(.*\)$'`
- if expr "$link" : '/.*' > /dev/null; then
- PRG="$link"
- else
- PRG=`dirname "$PRG"`"/$link"
- fi
-done
-SAVED="`pwd`"
-cd "`dirname \"$PRG\"`/" >/dev/null
-APP_HOME="`pwd -P`"
-cd "$SAVED" >/dev/null
-
-APP_NAME="Gradle"
-APP_BASE_NAME=`basename "$0"`
-
-# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
-DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
-
-# Use the maximum available, or set MAX_FD != -1 to use that value.
-MAX_FD="maximum"
-
-warn () {
- echo "$*"
-}
-
-die () {
- echo
- echo "$*"
- echo
- exit 1
-}
-
-# OS specific support (must be 'true' or 'false').
-cygwin=false
-msys=false
-darwin=false
-nonstop=false
-case "`uname`" in
- CYGWIN* )
- cygwin=true
- ;;
- Darwin* )
- darwin=true
- ;;
- MINGW* )
- msys=true
- ;;
- NONSTOP* )
- nonstop=true
- ;;
-esac
-
-CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
-
-# Determine the Java command to use to start the JVM.
-if [ -n "$JAVA_HOME" ] ; then
- if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
- # IBM's JDK on AIX uses strange locations for the executables
- JAVACMD="$JAVA_HOME/jre/sh/java"
- else
- JAVACMD="$JAVA_HOME/bin/java"
- fi
- if [ ! -x "$JAVACMD" ] ; then
- die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
-
-Please set the JAVA_HOME variable in your environment to match the
-location of your Java installation."
- fi
-else
- JAVACMD="java"
- which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
-
-Please set the JAVA_HOME variable in your environment to match the
-location of your Java installation."
-fi
-
-# Increase the maximum file descriptors if we can.
-if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
- MAX_FD_LIMIT=`ulimit -H -n`
- if [ $? -eq 0 ] ; then
- if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
- MAX_FD="$MAX_FD_LIMIT"
- fi
- ulimit -n $MAX_FD
- if [ $? -ne 0 ] ; then
- warn "Could not set maximum file descriptor limit: $MAX_FD"
- fi
- else
- warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
- fi
-fi
-
-# For Darwin, add options to specify how the application appears in the dock
-if $darwin; then
- GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
-fi
-
-# For Cygwin, switch paths to Windows format before running java
-if $cygwin ; then
- APP_HOME=`cygpath --path --mixed "$APP_HOME"`
- CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
- JAVACMD=`cygpath --unix "$JAVACMD"`
-
- # We build the pattern for arguments to be converted via cygpath
- ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
- SEP=""
- for dir in $ROOTDIRSRAW ; do
- ROOTDIRS="$ROOTDIRS$SEP$dir"
- SEP="|"
- done
- OURCYGPATTERN="(^($ROOTDIRS))"
- # Add a user-defined pattern to the cygpath arguments
- if [ "$GRADLE_CYGPATTERN" != "" ] ; then
- OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
- fi
- # Now convert the arguments - kludge to limit ourselves to /bin/sh
- i=0
- for arg in "$@" ; do
- CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
- CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
-
- if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
- eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
- else
- eval `echo args$i`="\"$arg\""
- fi
- i=$((i+1))
- done
- case $i in
- (0) set -- ;;
- (1) set -- "$args0" ;;
- (2) set -- "$args0" "$args1" ;;
- (3) set -- "$args0" "$args1" "$args2" ;;
- (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
- (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
- (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
- (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
- (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
- (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
- esac
-fi
-
-# Escape application args
-save () {
- for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
- echo " "
-}
-APP_ARGS=$(save "$@")
-
-# Collect all arguments for the java command, following the shell quoting and substitution rules
-eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
-
-# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
-if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
- cd "$(dirname "$0")"
-fi
-
-exec "$JAVACMD" "$@"
diff --git a/packages/template/project/android/gradlew.bat b/packages/template/project/android/gradlew.bat
deleted file mode 100644
index 9991c503266..00000000000
--- a/packages/template/project/android/gradlew.bat
+++ /dev/null
@@ -1,100 +0,0 @@
-@rem
-@rem Copyright 2015 the original author or authors.
-@rem
-@rem Licensed under the Apache License, Version 2.0 (the "License");
-@rem you may not use this file except in compliance with the License.
-@rem You may obtain a copy of the License at
-@rem
-@rem http://www.apache.org/licenses/LICENSE-2.0
-@rem
-@rem Unless required by applicable law or agreed to in writing, software
-@rem distributed under the License is distributed on an "AS IS" BASIS,
-@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-@rem See the License for the specific language governing permissions and
-@rem limitations under the License.
-@rem
-
-@if "%DEBUG%" == "" @echo off
-@rem ##########################################################################
-@rem
-@rem Gradle startup script for Windows
-@rem
-@rem ##########################################################################
-
-@rem Set local scope for the variables with windows NT shell
-if "%OS%"=="Windows_NT" setlocal
-
-set DIRNAME=%~dp0
-if "%DIRNAME%" == "" set DIRNAME=.
-set APP_BASE_NAME=%~n0
-set APP_HOME=%DIRNAME%
-
-@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
-set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
-
-@rem Find java.exe
-if defined JAVA_HOME goto findJavaFromJavaHome
-
-set JAVA_EXE=java.exe
-%JAVA_EXE% -version >NUL 2>&1
-if "%ERRORLEVEL%" == "0" goto init
-
-echo.
-echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
-echo.
-echo Please set the JAVA_HOME variable in your environment to match the
-echo location of your Java installation.
-
-goto fail
-
-:findJavaFromJavaHome
-set JAVA_HOME=%JAVA_HOME:"=%
-set JAVA_EXE=%JAVA_HOME%/bin/java.exe
-
-if exist "%JAVA_EXE%" goto init
-
-echo.
-echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
-echo.
-echo Please set the JAVA_HOME variable in your environment to match the
-echo location of your Java installation.
-
-goto fail
-
-:init
-@rem Get command-line arguments, handling Windows variants
-
-if not "%OS%" == "Windows_NT" goto win9xME_args
-
-:win9xME_args
-@rem Slurp the command line arguments.
-set CMD_LINE_ARGS=
-set _SKIP=2
-
-:win9xME_args_slurp
-if "x%~1" == "x" goto execute
-
-set CMD_LINE_ARGS=%*
-
-:execute
-@rem Setup the command line
-
-set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
-
-@rem Execute Gradle
-"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
-
-:end
-@rem End local scope for the variables with windows NT shell
-if "%ERRORLEVEL%"=="0" goto mainEnd
-
-:fail
-rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
-rem the _cmd.exe /c_ return code!
-if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
-exit /b 1
-
-:mainEnd
-if "%OS%"=="Windows_NT" endlocal
-
-:omega
diff --git a/packages/template/project/android/settings.gradle b/packages/template/project/android/settings.gradle
deleted file mode 100644
index e50c29d6298..00000000000
--- a/packages/template/project/android/settings.gradle
+++ /dev/null
@@ -1,3 +0,0 @@
-rootProject.name = 'HelloWorld'
-apply from: file("../node_modules/@react-native-community/cli-platform-android/native_modules.gradle"); applyNativeModulesSettingsGradle(settings)
-include ':app'
diff --git a/packages/template/project/app.json b/packages/template/project/app.json
deleted file mode 100644
index cbbc305181a..00000000000
--- a/packages/template/project/app.json
+++ /dev/null
@@ -1,4 +0,0 @@
-{
- "name": "HelloWorld",
- "displayName": "HelloWorld"
-}
diff --git a/packages/template/project/babel.config.js b/packages/template/project/babel.config.js
deleted file mode 100644
index f842b77fcfb..00000000000
--- a/packages/template/project/babel.config.js
+++ /dev/null
@@ -1,3 +0,0 @@
-module.exports = {
- presets: ['module:metro-react-native-babel-preset'],
-};
diff --git a/packages/template/project/firebase.json b/packages/template/project/firebase.json
deleted file mode 100644
index d7cf66190a6..00000000000
--- a/packages/template/project/firebase.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
- "react-native": {}
-}
diff --git a/packages/template/project/index.js b/packages/template/project/index.js
deleted file mode 100644
index 9b739329140..00000000000
--- a/packages/template/project/index.js
+++ /dev/null
@@ -1,9 +0,0 @@
-/**
- * @format
- */
-
-import { AppRegistry } from 'react-native';
-import App from './App';
-import { name as appName } from './app.json';
-
-AppRegistry.registerComponent(appName, () => App);
diff --git a/packages/template/project/ios/HelloWorld-tvOS/Info.plist b/packages/template/project/ios/HelloWorld-tvOS/Info.plist
deleted file mode 100644
index ecbd496be7d..00000000000
--- a/packages/template/project/ios/HelloWorld-tvOS/Info.plist
+++ /dev/null
@@ -1,53 +0,0 @@
-
-
-
-
- CFBundleDevelopmentRegion
- en
- CFBundleExecutable
- $(EXECUTABLE_NAME)
- CFBundleIdentifier
- $(PRODUCT_BUNDLE_IDENTIFIER)
- CFBundleInfoDictionaryVersion
- 6.0
- CFBundleName
- $(PRODUCT_NAME)
- CFBundlePackageType
- APPL
- CFBundleShortVersionString
- 1.0
- CFBundleSignature
- ????
- CFBundleVersion
- 1
- LSRequiresIPhoneOS
-
- NSAppTransportSecurity
-
- NSExceptionDomains
-
- localhost
-
- NSExceptionAllowsInsecureHTTPLoads
-
-
-
-
- NSLocationWhenInUseUsageDescription
-
- UILaunchStoryboardName
- LaunchScreen
- UIRequiredDeviceCapabilities
-
- armv7
-
- UISupportedInterfaceOrientations
-
- UIInterfaceOrientationPortrait
- UIInterfaceOrientationLandscapeLeft
- UIInterfaceOrientationLandscapeRight
-
- UIViewControllerBasedStatusBarAppearance
-
-
-
diff --git a/packages/template/project/ios/HelloWorld-tvOSTests/Info.plist b/packages/template/project/ios/HelloWorld-tvOSTests/Info.plist
deleted file mode 100644
index 886825ccc9b..00000000000
--- a/packages/template/project/ios/HelloWorld-tvOSTests/Info.plist
+++ /dev/null
@@ -1,24 +0,0 @@
-
-
-
-
- CFBundleDevelopmentRegion
- en
- CFBundleExecutable
- $(EXECUTABLE_NAME)
- CFBundleIdentifier
- org.reactjs.native.example.$(PRODUCT_NAME:rfc1034identifier)
- CFBundleInfoDictionaryVersion
- 6.0
- CFBundleName
- $(PRODUCT_NAME)
- CFBundlePackageType
- BNDL
- CFBundleShortVersionString
- 1.0
- CFBundleSignature
- ????
- CFBundleVersion
- 1
-
-
diff --git a/packages/template/project/ios/HelloWorld.xcodeproj/project.pbxproj b/packages/template/project/ios/HelloWorld.xcodeproj/project.pbxproj
deleted file mode 100644
index 7d7f9320723..00000000000
--- a/packages/template/project/ios/HelloWorld.xcodeproj/project.pbxproj
+++ /dev/null
@@ -1,782 +0,0 @@
-// !$*UTF8*$!
-{
- archiveVersion = 1;
- classes = {
- };
- objectVersion = 46;
- objects = {
-
-/* Begin PBXBuildFile section */
- 00E356F31AD99517003FC87E /* HelloWorldTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 00E356F21AD99517003FC87E /* HelloWorldTests.m */; };
- 13B07FBC1A68108700A75B9A /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 13B07FB01A68108700A75B9A /* AppDelegate.m */; };
- 13B07FBD1A68108700A75B9A /* LaunchScreen.xib in Resources */ = {isa = PBXBuildFile; fileRef = 13B07FB11A68108700A75B9A /* LaunchScreen.xib */; };
- 13B07FBF1A68108700A75B9A /* Images.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 13B07FB51A68108700A75B9A /* Images.xcassets */; };
- 13B07FC11A68108700A75B9A /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 13B07FB71A68108700A75B9A /* main.m */; };
- 2D02E4BC1E0B4A80006451C7 /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 13B07FB01A68108700A75B9A /* AppDelegate.m */; };
- 2D02E4BD1E0B4A84006451C7 /* Images.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 13B07FB51A68108700A75B9A /* Images.xcassets */; };
- 2D02E4BF1E0B4AB3006451C7 /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 13B07FB71A68108700A75B9A /* main.m */; };
- 2DCD954D1E0B4F2C00145EB5 /* HelloWorldTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 00E356F21AD99517003FC87E /* HelloWorldTests.m */; };
-/* End PBXBuildFile section */
-
-/* Begin PBXContainerItemProxy section */
- 00E356F41AD99517003FC87E /* PBXContainerItemProxy */ = {
- isa = PBXContainerItemProxy;
- containerPortal = 83CBB9F71A601CBA00E9B192 /* Project object */;
- proxyType = 1;
- remoteGlobalIDString = 13B07F861A680F5B00A75B9A;
- remoteInfo = HelloWorld;
- };
- 2D02E4911E0B4A5D006451C7 /* PBXContainerItemProxy */ = {
- isa = PBXContainerItemProxy;
- containerPortal = 83CBB9F71A601CBA00E9B192 /* Project object */;
- proxyType = 1;
- remoteGlobalIDString = 2D02E47A1E0B4A5D006451C7;
- remoteInfo = "HelloWorld-tvOS";
- };
-/* End PBXContainerItemProxy section */
-
-/* Begin PBXFileReference section */
- 008F07F21AC5B25A0029DE68 /* main.jsbundle */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text; path = main.jsbundle; sourceTree = ""; };
- 00E356EE1AD99517003FC87E /* HelloWorldTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = HelloWorldTests.xctest; sourceTree = BUILT_PRODUCTS_DIR; };
- 00E356F11AD99517003FC87E /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; };
- 00E356F21AD99517003FC87E /* HelloWorldTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = HelloWorldTests.m; sourceTree = ""; };
- 13B07F961A680F5B00A75B9A /* HelloWorld.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = HelloWorld.app; sourceTree = BUILT_PRODUCTS_DIR; };
- 13B07FAF1A68108700A75B9A /* AppDelegate.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = AppDelegate.h; path = HelloWorld/AppDelegate.h; sourceTree = ""; };
- 13B07FB01A68108700A75B9A /* AppDelegate.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = AppDelegate.m; path = HelloWorld/AppDelegate.m; sourceTree = ""; };
- 13B07FB21A68108700A75B9A /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.xib; name = Base; path = Base.lproj/LaunchScreen.xib; sourceTree = ""; };
- 13B07FB51A68108700A75B9A /* Images.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; name = Images.xcassets; path = HelloWorld/Images.xcassets; sourceTree = ""; };
- 13B07FB61A68108700A75B9A /* Info.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; name = Info.plist; path = HelloWorld/Info.plist; sourceTree = ""; };
- 13B07FB71A68108700A75B9A /* main.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = main.m; path = HelloWorld/main.m; sourceTree = ""; };
- 2D02E47B1E0B4A5D006451C7 /* HelloWorld-tvOS.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = "HelloWorld-tvOS.app"; sourceTree = BUILT_PRODUCTS_DIR; };
- 2D02E4901E0B4A5D006451C7 /* HelloWorld-tvOSTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = "HelloWorld-tvOSTests.xctest"; sourceTree = BUILT_PRODUCTS_DIR; };
- ED297162215061F000B7C4FE /* JavaScriptCore.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = JavaScriptCore.framework; path = System/Library/Frameworks/JavaScriptCore.framework; sourceTree = SDKROOT; };
- ED2971642150620600B7C4FE /* JavaScriptCore.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = JavaScriptCore.framework; path = Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS12.0.sdk/System/Library/Frameworks/JavaScriptCore.framework; sourceTree = DEVELOPER_DIR; };
-/* End PBXFileReference section */
-
-/* Begin PBXFrameworksBuildPhase section */
- 00E356EB1AD99517003FC87E /* Frameworks */ = {
- isa = PBXFrameworksBuildPhase;
- buildActionMask = 2147483647;
- files = (
- );
- runOnlyForDeploymentPostprocessing = 0;
- };
- 13B07F8C1A680F5B00A75B9A /* Frameworks */ = {
- isa = PBXFrameworksBuildPhase;
- buildActionMask = 2147483647;
- files = (
- );
- runOnlyForDeploymentPostprocessing = 0;
- };
- 2D02E4781E0B4A5D006451C7 /* Frameworks */ = {
- isa = PBXFrameworksBuildPhase;
- buildActionMask = 2147483647;
- files = (
- );
- runOnlyForDeploymentPostprocessing = 0;
- };
- 2D02E48D1E0B4A5D006451C7 /* Frameworks */ = {
- isa = PBXFrameworksBuildPhase;
- buildActionMask = 2147483647;
- files = (
- );
- runOnlyForDeploymentPostprocessing = 0;
- };
-/* End PBXFrameworksBuildPhase section */
-
-/* Begin PBXGroup section */
- 00E356EF1AD99517003FC87E /* HelloWorldTests */ = {
- isa = PBXGroup;
- children = (
- 00E356F21AD99517003FC87E /* HelloWorldTests.m */,
- 00E356F01AD99517003FC87E /* Supporting Files */,
- );
- path = HelloWorldTests;
- sourceTree = "";
- };
- 00E356F01AD99517003FC87E /* Supporting Files */ = {
- isa = PBXGroup;
- children = (
- 00E356F11AD99517003FC87E /* Info.plist */,
- );
- name = "Supporting Files";
- sourceTree = "";
- };
- 13B07FAE1A68108700A75B9A /* HelloWorld */ = {
- isa = PBXGroup;
- children = (
- 008F07F21AC5B25A0029DE68 /* main.jsbundle */,
- 13B07FAF1A68108700A75B9A /* AppDelegate.h */,
- 13B07FB01A68108700A75B9A /* AppDelegate.m */,
- 13B07FB51A68108700A75B9A /* Images.xcassets */,
- 13B07FB61A68108700A75B9A /* Info.plist */,
- 13B07FB11A68108700A75B9A /* LaunchScreen.xib */,
- 13B07FB71A68108700A75B9A /* main.m */,
- );
- name = HelloWorld;
- sourceTree = "";
- };
- 2D16E6871FA4F8E400B85C8A /* Frameworks */ = {
- isa = PBXGroup;
- children = (
- ED297162215061F000B7C4FE /* JavaScriptCore.framework */,
- ED2971642150620600B7C4FE /* JavaScriptCore.framework */,
- );
- name = Frameworks;
- sourceTree = "";
- };
- 832341AE1AAA6A7D00B99B32 /* Libraries */ = {
- isa = PBXGroup;
- children = (
- );
- name = Libraries;
- sourceTree = "";
- };
- 83CBB9F61A601CBA00E9B192 = {
- isa = PBXGroup;
- children = (
- 13B07FAE1A68108700A75B9A /* HelloWorld */,
- 832341AE1AAA6A7D00B99B32 /* Libraries */,
- 00E356EF1AD99517003FC87E /* HelloWorldTests */,
- 83CBBA001A601CBA00E9B192 /* Products */,
- 2D16E6871FA4F8E400B85C8A /* Frameworks */,
- );
- indentWidth = 2;
- sourceTree = "";
- tabWidth = 2;
- usesTabs = 0;
- };
- 83CBBA001A601CBA00E9B192 /* Products */ = {
- isa = PBXGroup;
- children = (
- 13B07F961A680F5B00A75B9A /* HelloWorld.app */,
- 00E356EE1AD99517003FC87E /* HelloWorldTests.xctest */,
- 2D02E47B1E0B4A5D006451C7 /* HelloWorld-tvOS.app */,
- 2D02E4901E0B4A5D006451C7 /* HelloWorld-tvOSTests.xctest */,
- );
- name = Products;
- sourceTree = "