This commit is contained in:
haneulai 2025-07-09 11:33:00 +09:00
commit 73aee09d9c
519 changed files with 95823 additions and 0 deletions

92
.circleci/config.yml Normal file
View File

@ -0,0 +1,92 @@
version: 2
executorType: docker
jobs:
build-app:
resource_class: medium
environment:
- GRADLE_OPTS: '-Dorg.gradle.daemon=false -Dorg.gradle.jvmargs="-Xmx3072m -XX:+HeapDumpOnOutOfMemoryError"'
- REACT_NATIVE_MAX_WORKERS: 2
- ANDROID_BUILD_TOOLS_VERSION: '28.0.3'
working_directory: ~/app
docker:
- image: reactnativecommunity/react-native-android
steps:
- checkout
- restore_cache:
keys:
- v1-npm-{{ .Branch }}-{{ checksum "yarn.lock" }}
- v1-npm
- run:
name: Install Dependencies
command: yarn install --ignore-engines
- save_cache:
key: v1-npm
paths:
- node_modules/
- save_cache:
key: v1-npm-{{ .Branch }}-{{ checksum "yarn.lock" }}
paths:
- node_modules/
- run:
name: Lint
command: yarn lint
- restore_cache:
keys:
- v1-gradle-{{ checksum "android/gradle/wrapper/gradle-wrapper.properties" }}-{{ checksum "examples/basic/android/gradle/wrapper/gradle-wrapper.properties" }}
- v1-gradle-wrapper
- restore_cache:
keys:
- v1-gradle-cache-{{ checksum "android/build.gradle" }}-{{ checksum "examples/basic/android/build.gradle" }}
- v1-gradle-cache
- run:
name: Run Checks
command: |
cd android
chmod +x ./gradlew && ./gradlew check
- store_artifacts:
path: android/build/reports
- run:
name: Run Yarn to Generate react.gradle
command: cd examples/basic/android && yarn
- run:
name: Build Sample App
command: |
cd examples/basic/android && chmod +x ./gradlew && ./gradlew clean && ./gradlew build
- store_artifacts:
path: examples/basic/android/app/build/reports
destination: app
- save_cache:
key: v1-gradle-wrapper-{{ checksum "examples/basic/android/gradle/wrapper/gradle-wrapper.properties" }}
paths:
- ~/.gradle/wrapper
- save_cache:
key: v1-gradle-cache-{{ checksum "examples/basic/android/build.gradle" }}
paths:
- ~/.gradle/caches
- deploy:
command: |
if [ "${CIRCLE_BRANCH}" == "master" ]; then
yarn ci:publish
fi
deploy-docs:
working_directory: ~/app
docker:
- image: circleci/node:8.11.1
steps:
- checkout
- run:
name: Deploying to GitHub Pages
command: |
git config --global user.email "${GH_EMAIL}@users.noreply.github.com"
git config --global user.name "${GH_NAME}"
echo "machine github.com login $GH_NAME password $GH_TOKEN_DOCS" > ~/.netrc
cd website && yarn install && GIT_USER=${GH_NAME} yarn run publish-gh-pages
workflows:
version: 2
build-and-deploy-docs:
jobs:
- build-app
- deploy-docs:
filters:
branches:
only: master

2
.dockerignore Normal file
View File

@ -0,0 +1,2 @@
*/node_modules
*.log

59
.eslintrc Normal file
View File

@ -0,0 +1,59 @@
{
"parser": "babel-eslint",
"env": {
"browser": true,
"node": true,
"jest": true,
"es6": true
},
"plugins": ["react", "react-native", "flowtype", "import"],
"parserOptions": {
"ecmaVersion": 6,
"sourceType": "module",
"ecmaFeatures": {
"modules": true
}
},
"extends": [
"eslint:recommended",
"plugin:react/recommended",
"plugin:import/errors"
],
"rules": {
"comma-dangle": [2, "always-multiline"],
"quotes": [2, "single", { "allowTemplateLiterals": true }],
"react/prop-types": 0,
"no-case-declarations": 0,
"react/jsx-no-bind": 0,
"react/display-name": 0,
"new-cap": 0,
"react-native/no-unused-styles": 2,
"react-native/split-platform-components": 0,
"react-native/no-inline-styles": 0,
"react-native/no-color-literals": 0,
"no-unexpected-multiline": 0,
"no-class-assign": 1,
"no-console": 2,
"object-curly-spacing": [1, "always"],
"flowtype/define-flow-type": 1,
"flowtype/use-flow-type": 1,
"import/first": 2,
"import/default": 0,
"no-unused-vars": ["error", { "ignoreRestSiblings": true }],
"import/named": 0,
"import/namespace": [2, { "allowComputed": true }],
"no-extra-boolean-cast": 0,
"import/no-duplicates": 2,
"react/no-deprecated": 0
},
"settings": {
"import/resolver": {
"node": {
"extensions": [".js", ".android.js", ".ios.js", ".json"]
}
}
},
"globals": {
"__DEV__": true
}
}

12
.flowconfig Normal file
View File

@ -0,0 +1,12 @@
[ignore]
.*/node_modules/.*
[include]
[libs]
[lints]
[options]
[strict]

2
.gitattributes vendored Normal file
View File

@ -0,0 +1,2 @@
# Disable git large files for now!
# RNCameraExample/ios/Frameworks/FaceDetector/Frameworks/frameworks/FaceDetector.framework/FaceDetector filter=lfs diff=lfs merge=lfs -text

4
.github/FUNDING.yml vendored Normal file
View File

@ -0,0 +1,4 @@
patreon: # Replace with a single Patreon username
open_collective: react-native-camera
tidelift: npm/react-native-camera
custom: # Replace with a single custom sponsorship URL

52
.github/ISSUE_TEMPLATE/bug_report.md vendored Normal file
View File

@ -0,0 +1,52 @@
---
name: Bug report
about: Create a report to help us improve
---
# Bug Report
**To Do First**
- [ ] Did you try latest release?
- [ ] Did you try master?
- [ ] Did you look for existing matching issues?
**Platforms**
<!--Comment in the related ones-->
<!--Android-->
<!--iOS-->
**Versions**
<!--Please add the used versions/branches or leave blank and comment in the optionals if used-->
- Android:
- iOS:
- react-native-camera:
- react-native:
- react:
<!---react-navigation:-->
**Description/Current Behaviour**
<!--place your bug description below-->
**Expected Behaviour**
<!--place your expected behaviour below-->
**Steps to Reproduce**
<!--describe how to produce the error below-->
<!--**Does it work with Expo Camera?**-->
<!--Check usage with Expo and comment in this section- https://github.com/react-native-community/react-native-camera/blob/master/docs/Expo_Usage.md
You should open an issue there as well, so we can cooperate in a solution.-->
**Additionals**
<!--place screenshots/suggestions and other additional infos below-->
> Love react-native-camera? Please consider supporting our collective: 👉 https://opencollective.com/react-native-camera/donate
> Want this issue to be resolved faster? Please consider adding a bounty to it https://issuehunt.io/repos/33218414

View File

@ -0,0 +1,19 @@
---
name: Feature request
about: Suggest an idea for this project
---
# Feature-Request
**Describe the Feature**
<!--describe the requested Feature-->
**Possible Implementations**
<!--describe how to implement the feature-->
**Related Issues**
<!--link related issues here-->
> Love react-native-camera? Please consider supporting our collective: 👉 https://opencollective.com/react-native-camera/donate
> Want this feature to be resolved faster? Please consider adding a bounty to it https://issuehunt.io/repos/33218414

21
.github/ISSUE_TEMPLATE/question.md vendored Normal file
View File

@ -0,0 +1,21 @@
---
name: Question
about: Ask your question
---
# Question
**To Do First**
- [ ] Take a look in the [README](https://github.com/react-native-community/react-native-camera/blob/master/README.md)
- [ ] Take a look in the [docs](https://github.com/react-native-community/react-native-camera/blob/master/docs/RNCamera.md)
- [ ] Take a look in the [QA](https://github.com/react-native-community/react-native-camera/blob/master/docs/QA.md)
**Ask your Question**
<!--ask your question-->
**Tags**
<!--add some related tags to your question-->
> Love react-native-camera? Please consider supporting our collective: 👉 https://opencollective.com/react-native-camera/donate
> Want this issue to be resolved faster? Please consider adding a bounty to it https://issuehunt.io/repos/33218414

45
.github/stale.yml vendored Normal file
View File

@ -0,0 +1,45 @@
# Configuration for probot-stale based on: https://github.com/facebook/react-native/blob/master/.github/stale.yml
# Number of days of inactivity before an Issue or Pull Request becomes stale
daysUntilStale: 60
# Number of days of inactivity before an Issue or Pull Request with the stale label is closed.
daysUntilClose: 7
# Issues or Pull Requests with these labels will never be considered stale. Set to `[]` to disable
exemptLabels:
- pinned
- security
- For Discussion
- semantic-release
- Needs revision
# Set to true to ignore issues in a project (defaults to false)
exemptProjects: false
# Set to true to ignore issues in a milestone (defaults to false)
exemptMilestones: false
# Set to true to ignore issues with an assignee (defaults to false)
exemptAssignees: false
# Label to use when marking as stale
staleLabel: stale
# Comment to post when marking as stale. Set to `false` to disable
markComment: >
This issue has been automatically marked as stale because it has not had
recent activity. It will be closed if no further activity occurs. Thank you
for your contributions. You may also mark this issue as a "discussion" and i
will leave this open.
# Comment to post when closing a stale Issue or Pull Request.
closeComment: >
Closing this issue after a prolonged period of inactivity. Fell free to reopen
this issue, if this still affecting you.
# Limit the number of actions per hour, from 1-30. Default is 30
limitPerRun: 30
# Limit to only `issues` or `pulls`
only: issues

51
.gitignore vendored Normal file
View File

@ -0,0 +1,51 @@
# OSX
#
.DS_Store
# Xcode
#
build/
*.pbxuser
!default.pbxuser
*.mode1v3
!default.mode1v3
*.mode2v3
!default.mode2v3
*.perspectivev3
!default.perspectivev3
xcuserdata
*.xccheckout
*.moved-aside
DerivedData
*.hmap
*.ipa
*.xcuserstate
project.xcworkspace
# Android/IJ
#
*.iml
.idea
.gradle
local.properties
# node.js
#
node_modules/
npm-debug.log
# yarn
yarn-error.log
# BUCK
buck-out/
\.buckd/
android/app/libs
android/keystores/debug.keystore
package-json.lock
# vscode
.vscode
examples/mlkit/android/app/google-services.json
examples/mlkit/ios/Pods
examples/mlkit/ios/mlkit/GoogleService-Info.plist

7
.npmignore Normal file
View File

@ -0,0 +1,7 @@
/.github
/examples
circle.yml
commitlint.config.js
/android/build
/website
/docs

5
.prettierrc Normal file
View File

@ -0,0 +1,5 @@
{
"singleQuote": true,
"trailingComma": "all",
"printWidth": 100
}

316
CHANGELOG.md Normal file
View File

@ -0,0 +1,316 @@
[**THE CHANGELOG OF FURTHER VERSIONS (STARTING WITH 1.4.0) IS MAINTAINED WITH GITHUB RELEASES AND CAN BE FOUND HERE**](https://github.com/react-native-community/react-native-camera/releases)
#### 1.3.1-9 (2018-10-24)
##### Chores
* **package:** bump to 1.3.0 ([501d4ad8](https://github.com/react-native-community/react-native-camera/commit/501d4ad8deb013f36abda18794fbf58c04bd190b))
* **yarn:** update yarn.lock ([fafe4c11](https://github.com/react-native-community/react-native-camera/commit/fafe4c119230a7378ab18ea1dd0634d8eb55a538))
##### Documentation Changes
* add slide-up zoom recipe ([737a5a2e](https://github.com/react-native-community/react-native-camera/commit/737a5a2ef51d52b5a82b2b16972ebe0df40a2fd4))
##### Bug Fixes
* Do not rely in to jcenter but google ([#1874](https://github.com/react-native-community/react-native-camera/pull/1874)) ([92615246](https://github.com/react-native-community/react-native-camera/commit/9261524609ac4d9f29a33fb94847ef78a50b38f3))
* Error retrieving camcorder profile params ([#1835](https://github.com/react-native-community/react-native-camera/pull/1835)) ([8de827e6](https://github.com/react-native-community/react-native-camera/commit/8de827e6c2a027b30668c108b147fb02affb8a35))
* **build:** fix no face detection project ([a7a7abf6](https://github.com/react-native-community/react-native-camera/commit/a7a7abf648e5f2c2f4b6f7e11bc3ed9669916a24))
##### Reverts
* check if face or text detector libraries are included independently ([#1882](https://github.com/react-native-community/react-native-camera/pull/1882)) ([9efd7554](https://github.com/react-native-community/react-native-camera/commit/9efd7554586deed6d8e59cce579b21479365f984))
### 1.3.0-8 (2018-09-26)
##### Build System / Dependencies
* **package:** 1.2.0 ([da8e79cd](https://github.com/react-native-community/react-native-camera/commit/da8e79cd7aa2d62fb1209892c83c440b8f80e0b6))
* **change-log:** 1.2.0 ([1da60b2f](https://github.com/react-native-community/react-native-camera/commit/1da60b2feb260748a3da268486717d854193e7b6))
##### Chores
* **ts:** add doNotSave to ts type definitions ([f0c18b7c](https://github.com/react-native-community/react-native-camera/commit/f0c18b7c2074bb88a92df526525dcf8845afd854))
* **update:** update package.json packages ([#1739](https://github.com/react-native-community/react-native-camera/pull/1739)) ([843cbf4f](https://github.com/react-native-community/react-native-camera/commit/843cbf4ff685a3f211f3b5c31613943d1672bb88))
##### New Features
* adds autoFocusPointOfInterest to iOS ([39cc29de](https://github.com/react-native-community/react-native-camera/commit/39cc29deca9c1de9bb4b1e3a9f31f07b32f7ddc6))
##### Bug Fixes
* **rn-camera:**
* bump platform version to 9.0. closes [#1806](https://github.com/react-native-community/react-native-camera/pull/1806) ([bdbc2564](https://github.com/react-native-community/react-native-camera/commit/bdbc2564ba113e1316306333febb1763b1003aaf))
* use `componentDidMount` instead of `componentWillMount`. closes [#1809](https://github.com/react-native-community/react-native-camera/pull/1809) closes [#1760](https://github.com/react-native-community/react-native-camera/pull/1760) ([2d311ff1](https://github.com/react-native-community/react-native-camera/commit/2d311ff1866977d77d6b0018dbc2a7a2ee511040))
* added some missing typescript declarations ([#1792](https://github.com/react-native-community/react-native-camera/pull/1792)) ([a5c67376](https://github.com/react-native-community/react-native-camera/commit/a5c67376af4bdddef163480441cc773004b86471))
* android base64 string format for consistency with ios ([#1776](https://github.com/react-native-community/react-native-camera/pull/1776)) ([d4b4ee11](https://github.com/react-native-community/react-native-camera/commit/d4b4ee116861919d0ba70f3edea774d1ab252182))
* update docs and add check for focus mode support ([ca5a12b4](https://github.com/react-native-community/react-native-camera/commit/ca5a12b400c599e6ec76d60214c3b479891f4d02))
### 1.2.0-7 (2018-08-09)
##### Build System / Dependencies
* **change-log:** v1.1.5-2 ([e49e35a0](https://github.com/react-native-community/react-native-camera/commit/e49e35a085b1793cc8692d2c1600eb2e14ffbe75))
##### Documentation Changes
* **expo:** explain how to migrate to and from expo camera module ([#1605](https://github.com/react-native-community/react-native-camera/pull/1605)) ([4a9322cb](https://github.com/react-native-community/react-native-camera/commit/4a9322cb8b7d455fc28f7e67a15bff2fd9d7ea3e))
##### New Features
* **preview:**
* add android code ([497a7039](https://github.com/react-native-community/react-native-camera/commit/497a703964e925b6e3e62e39a54a9734a7ed6c40))
* add new props to JS ([9bf9a2e3](https://github.com/react-native-community/react-native-camera/commit/9bf9a2e3162b919d98cab104029250394b2dd3a8))
* add preview methods and more fixes ([b9fb708f](https://github.com/react-native-community/react-native-camera/commit/b9fb708ffc3fd6865191ce6e2bd0a2404a9c657c))
##### Bug Fixes
* **rn-camera:**
* fix codec backwards compat ([91f5bf45](https://github.com/react-native-community/react-native-camera/commit/91f5bf45672a8b83253ed17c3f90eee64b0f07bf))
* fix types, conversions and casts ([83d0618e](https://github.com/react-native-community/react-native-camera/commit/83d0618e988656dfd9a216b85394ceb5f3a05e9b))
* **picture-size:**
* create None default value ([ad87c8e3](https://github.com/react-native-community/react-native-camera/commit/ad87c8e3421f2ff1836674a01cb86deb619cdc4e))
* export method and change default value ([9efb7f14](https://github.com/react-native-community/react-native-camera/commit/9efb7f141f8970ad160c852fa837427a79f3d0dc))
##### Other Changes
* Implement video stabilization mode property for ios ([#1606](https://github.com/react-native-community/react-native-camera/pull/1606)) ([a090faa0](https://github.com/react-native-community/react-native-camera/commit/a090faa09b417afd41af3739ec2b895de9dca6b6))
#### 1.1.5-2 (2018-06-14)
##### Build System / Dependencies
* **change-log:**
* v1.1.4-6 ([86bf1d28](https://github.com/react-native-community/react-native-camera/commit/86bf1d284baf64caa94e3815c9ebed5b0e662369))
* v1.1.3-5 ([98b18950](https://github.com/react-native-community/react-native-camera/commit/98b1895038ccf47f94d7d27811f3540d3847feb7))
* v1.1.2-4 ([4f6b213d](https://github.com/react-native-community/react-native-camera/commit/4f6b213dc63e7ae96c77a1cf1627c14fcda99a94))
* v1.1.1-3 ([821a1b24](https://github.com/react-native-community/react-native-camera/commit/821a1b24e6251ad2a9ba9087c9a427a3b20d0778))
* v1.1.0 ([01e6c843](https://github.com/react-native-community/react-native-camera/commit/01e6c8434d87f4723feff7fec568028bfb140cb5))
* v1.1.0-2 ([deb42144](https://github.com/react-native-community/react-native-camera/commit/deb42144769c3ccc2e593d5dbf586abab244f219))
##### Chores
* **cameraview:**
* integrate google's cameraview directly on rncamera? ([d11ed319](https://github.com/react-native-community/react-native-camera/commit/d11ed31917c26df151b4fb46ab166d2921a9ac99))
* update camera view ([501ffe83](https://github.com/react-native-community/react-native-camera/commit/501ffe8336b9d8bc9743c1ed803fe20b77f2c270))
* **lint:**
* more lint checks ([3bb9a648](https://github.com/react-native-community/react-native-camera/commit/3bb9a6484af306ac66083dd05ac6c46de542f3b4))
* fix some warnings ([7967e2fb](https://github.com/react-native-community/react-native-camera/commit/7967e2fbce44b15a77ae0cbddf76f0b37fc530ba))
* fix lint to make ci work ([919d07b1](https://github.com/react-native-community/react-native-camera/commit/919d07b162f4a39a2454bebdb387224e21a4ba7a))
* **package:** enforce no errors on lint and update packages ([00f4f4c1](https://github.com/react-native-community/react-native-camera/commit/00f4f4c13714a9d4e03a2cd76f2b19de7a78cfe4))
* **gms:** change default gms to 12.0.0 ([94c8968b](https://github.com/react-native-community/react-native-camera/commit/94c8968b2633cfa4e16d1e4275eb831065232014))
##### Documentation Changes
* **expo:** explain how to migrate to and from expo camera module ([#1605](https://github.com/react-native-community/react-native-camera/pull/1605)) ([4a9322cb](https://github.com/react-native-community/react-native-camera/commit/4a9322cb8b7d455fc28f7e67a15bff2fd9d7ea3e))
* **recipes:** add some recipes ([ef5c2fef](https://github.com/react-native-community/react-native-camera/commit/ef5c2fef14530110b0c5aec3a044ca27dcfa8d72))
##### New Features
* **preview:**
* add android code ([497a7039](https://github.com/react-native-community/react-native-camera/commit/497a703964e925b6e3e62e39a54a9734a7ed6c40))
* add new props to JS ([9bf9a2e3](https://github.com/react-native-community/react-native-camera/commit/9bf9a2e3162b919d98cab104029250394b2dd3a8))
* add preview methods and more fixes ([b9fb708f](https://github.com/react-native-community/react-native-camera/commit/b9fb708ffc3fd6865191ce6e2bd0a2404a9c657c))
* **types:**
* add types for [#1547](https://github.com/react-native-community/react-native-camera/pull/1547) ([#1548](https://github.com/react-native-community/react-native-camera/pull/1548)) ([3ce3c80d](https://github.com/react-native-community/react-native-camera/commit/3ce3c80db670cc05dead7636d70dc8fc911a2c6b))
* add types for [#1523](https://github.com/react-native-community/react-native-camera/pull/1523) ([f61004de](https://github.com/react-native-community/react-native-camera/commit/f61004de623a2011e99a6a8092048b513025f5ed))
* add types for [#1518](https://github.com/react-native-community/react-native-camera/pull/1518) (FaCC) ([842dc1cb](https://github.com/react-native-community/react-native-camera/commit/842dc1cb581bd28653549dee86f70c2ff5d65ee2))
* add types for [#1441](https://github.com/react-native-community/react-native-camera/pull/1441) ([be3e0ebf](https://github.com/react-native-community/react-native-camera/commit/be3e0ebfb8ff42a48211b55054325548cd304694))
* add types for [#1428](https://github.com/react-native-community/react-native-camera/pull/1428) ([6cc3d89b](https://github.com/react-native-community/react-native-camera/commit/6cc3d89bec2a55b31c2e7c4f0e597eafc8c31323))
* add types for text detection feature ([c0ace2e9](https://github.com/react-native-community/react-native-camera/commit/c0ace2e94c47a9122a386bcbe99911182da80744))
* **rn-camera:** use and export constants ([c8c6fdea](https://github.com/react-native-community/react-native-camera/commit/c8c6fdea0bf15de60c638f504f38dcb9ac80a3e4))
* **rn_camera:** add function as children ([45cc8f25](https://github.com/react-native-community/react-native-camera/commit/45cc8f25d2de71b9eee29e1fe14e2f4f3d2feee9))
* **ci:** add first circleci lint and check script ([ee385eec](https://github.com/react-native-community/react-native-camera/commit/ee385eec05b9be5e1f96524206e50aa96085ce19))
* **android:** make android gradle check work ([1c7f231a](https://github.com/react-native-community/react-native-camera/commit/1c7f231af460127bebf1f9970367bf64987de34b))
* **play-sound:** play sound on capture (android) ([69242183](https://github.com/react-native-community/react-native-camera/commit/69242183cc65460040795b866095f34090a9598d))
##### Bug Fixes
* **rn-camera:**
* fix codec backwards compat ([91f5bf45](https://github.com/react-native-community/react-native-camera/commit/91f5bf45672a8b83253ed17c3f90eee64b0f07bf))
* fix types, conversions and casts ([83d0618e](https://github.com/react-native-community/react-native-camera/commit/83d0618e988656dfd9a216b85394ceb5f3a05e9b))
* inject correct status ([858cc4c9](https://github.com/react-native-community/react-native-camera/commit/858cc4c9c8fd456390b274ee4cfddb62fee198ee))
* **picture-size:**
* create None default value ([ad87c8e3](https://github.com/react-native-community/react-native-camera/commit/ad87c8e3421f2ff1836674a01cb86deb619cdc4e))
* export method and change default value ([9efb7f14](https://github.com/react-native-community/react-native-camera/commit/9efb7f141f8970ad160c852fa837427a79f3d0dc))
* **cache:** store video recordings in same directory as photos ([bba84a98](https://github.com/react-native-community/react-native-camera/commit/bba84a983446c25f76aa77793f49d4252cd63ea3))
* **rn_camera:** improve naming ([3811d82c](https://github.com/react-native-community/react-native-camera/commit/3811d82c75ceedc27b8aa5550e352159d5daf2b8))
* **search-paths:** remove unnecessary search paths and add missing one ([dee298b4](https://github.com/react-native-community/react-native-camera/commit/dee298b4fefca4659468fd43e914fd1c970ca930))
* **styles:** place style sheet above everything,prevent undefined styles ([01501892](https://github.com/react-native-community/react-native-camera/commit/01501892b5711db765cc367a24ba7c3233678791))
* **warnings:** remove inline styles ([716c4e38](https://github.com/react-native-community/react-native-camera/commit/716c4e389da45fd7d240a8b4acf60a620fa2c372))
* **barcode:** better name google variables and correct init ([38e96ed2](https://github.com/react-native-community/react-native-camera/commit/38e96ed24d6b59e108a0ac175eefff22d7b33c27))
* **Android:** image stretched instead of cropped ([73eb5fd2](https://github.com/react-native-community/react-native-camera/commit/73eb5fd272c28a6369705d30379dcabae3429301))
* **barcode-prop:** fix default value and add more values ([2c87b44b](https://github.com/react-native-community/react-native-camera/commit/2c87b44b1660f44e9f2bc8e7fce207c872933806))
* **docs:**
* move skipProcessing to 'Supported options' ([8054200f](https://github.com/react-native-community/react-native-camera/commit/8054200f81a754ae2d29532b636f55331e996703))
* Header on the wrong position ([589a0819](https://github.com/react-native-community/react-native-camera/commit/589a08192930f96aa4f7cf255aa4ac0adfd31a12))
* **types:** fix types for [#1402](https://github.com/react-native-community/react-native-camera/pull/1402) ([26f9a1e5](https://github.com/react-native-community/react-native-camera/commit/26f9a1e53b3f3b21b86f28d27236849995e7baf9))
* **ios:** add video output early to avoid underexposed beginning ([9ef5b29a](https://github.com/react-native-community/react-native-camera/commit/9ef5b29ad5d66f0e6d52e504dab00b862148c60f))
##### Other Changes
* Implement video stabilization mode property for ios ([#1606](https://github.com/react-native-community/react-native-camera/pull/1606)) ([a090faa0](https://github.com/react-native-community/react-native-camera/commit/a090faa09b417afd41af3739ec2b895de9dca6b6))
* Fix java.lang.ArrayIndexOutOfBoundsException with image rotation ([6ce014d3](https://github.com/react-native-community/react-native-camera/commit/6ce014d3ca3805f908fbdcd30da9b982de3bc2da))
#### 1.1.4-6 (2018-05-21)
#### 1.1.3-5 (2018-05-18)
##### New Features
* **types:**
* add types for [#1547](https://github.com/react-native-community/react-native-camera/pull/1547) ([#1548](https://github.com/react-native-community/react-native-camera/pull/1548)) ([3ce3c80d](https://github.com/react-native-community/react-native-camera/commit/3ce3c80db670cc05dead7636d70dc8fc911a2c6b))
* add types for [#1523](https://github.com/react-native-community/react-native-camera/pull/1523) ([f61004de](https://github.com/react-native-community/react-native-camera/commit/f61004de623a2011e99a6a8092048b513025f5ed))
* add types for [#1518](https://github.com/react-native-community/react-native-camera/pull/1518) (FaCC) ([842dc1cb](https://github.com/react-native-community/react-native-camera/commit/842dc1cb581bd28653549dee86f70c2ff5d65ee2))
* **rn-camera:** use and export constants ([c8c6fdea](https://github.com/react-native-community/react-native-camera/commit/c8c6fdea0bf15de60c638f504f38dcb9ac80a3e4))
* **rn_camera:** add function as children ([45cc8f25](https://github.com/react-native-community/react-native-camera/commit/45cc8f25d2de71b9eee29e1fe14e2f4f3d2feee9))
##### Bug Fixes
* **rn-camera:** inject correct status ([858cc4c9](https://github.com/react-native-community/react-native-camera/commit/858cc4c9c8fd456390b274ee4cfddb62fee198ee))
* **cache:** store video recordings in same directory as photos ([bba84a98](https://github.com/react-native-community/react-native-camera/commit/bba84a983446c25f76aa77793f49d4252cd63ea3))
* **rn_camera:** improve naming ([3811d82c](https://github.com/react-native-community/react-native-camera/commit/3811d82c75ceedc27b8aa5550e352159d5daf2b8))
##### Other Changes
* Fix java.lang.ArrayIndexOutOfBoundsException with image rotation ([6ce014d3](https://github.com/react-native-community/react-native-camera/commit/6ce014d3ca3805f908fbdcd30da9b982de3bc2da))
#### 1.1.2-4 (2018-04-25)
##### Chores
* **cameraview:** integrate google's cameraview directly on rncamera? ([d11ed319](https://github.com/react-native-community/react-native-camera/commit/d11ed31917c26df151b4fb46ab166d2921a9ac99))
##### Bug Fixes
* **search-paths:** remove unnecessary search paths and add missing one ([dee298b4](https://github.com/react-native-community/react-native-camera/commit/dee298b4fefca4659468fd43e914fd1c970ca930))
#### 1.1.1-3 (2018-04-15)
##### Build System / Dependencies
* **change-log:** v1.1.0 ([01e6c843](https://github.com/react-native-community/react-native-camera/commit/01e6c8434d87f4723feff7fec568028bfb140cb5))
##### Chores
* **lint:**
* more lint checks ([3bb9a648](https://github.com/react-native-community/react-native-camera/commit/3bb9a6484af306ac66083dd05ac6c46de542f3b4))
* fix some warnings ([7967e2fb](https://github.com/react-native-community/react-native-camera/commit/7967e2fbce44b15a77ae0cbddf76f0b37fc530ba))
* fix lint to make ci work ([919d07b1](https://github.com/react-native-community/react-native-camera/commit/919d07b162f4a39a2454bebdb387224e21a4ba7a))
* **package:** enforce no errors on lint and update packages ([00f4f4c1](https://github.com/react-native-community/react-native-camera/commit/00f4f4c13714a9d4e03a2cd76f2b19de7a78cfe4))
##### New Features
* **ci:** add first circleci lint and check script ([ee385eec](https://github.com/react-native-community/react-native-camera/commit/ee385eec05b9be5e1f96524206e50aa96085ce19))
* **android:** make android gradle check work ([1c7f231a](https://github.com/react-native-community/react-native-camera/commit/1c7f231af460127bebf1f9970367bf64987de34b))
##### Bug Fixes
* **styles:** place style sheet above everything,prevent undefined styles ([01501892](https://github.com/react-native-community/react-native-camera/commit/01501892b5711db765cc367a24ba7c3233678791))
* **warnings:** remove inline styles ([716c4e38](https://github.com/react-native-community/react-native-camera/commit/716c4e389da45fd7d240a8b4acf60a620fa2c372))
### 1.1.0-2 (2018-04-15)
##### Chores
* **gms:** change default gms to 12.0.0 ([94c8968b](https://github.com/react-native-community/react-native-camera/commit/94c8968b2633cfa4e16d1e4275eb831065232014))
* **cameraview:** update camera view ([501ffe83](https://github.com/react-native-community/react-native-camera/commit/501ffe8336b9d8bc9743c1ed803fe20b77f2c270))
##### Documentation Changes
* **recipes:** add some recipes ([ef5c2fef](https://github.com/react-native-community/react-native-camera/commit/ef5c2fef14530110b0c5aec3a044ca27dcfa8d72))
##### New Features
* **types:**
* add types for [#1441](https://github.com/react-native-community/react-native-camera/pull/1441) ([be3e0ebf](https://github.com/react-native-community/react-native-camera/commit/be3e0ebfb8ff42a48211b55054325548cd304694))
* add types for [#1428](https://github.com/react-native-community/react-native-camera/pull/1428) ([6cc3d89b](https://github.com/react-native-community/react-native-camera/commit/6cc3d89bec2a55b31c2e7c4f0e597eafc8c31323))
* add types for text detection feature ([c0ace2e9](https://github.com/react-native-community/react-native-camera/commit/c0ace2e94c47a9122a386bcbe99911182da80744))
* **play-sound:** play sound on capture (android) ([69242183](https://github.com/react-native-community/react-native-camera/commit/69242183cc65460040795b866095f34090a9598d))
##### Bug Fixes
* **barcode:** better name google variables and correct init ([38e96ed2](https://github.com/react-native-community/react-native-camera/commit/38e96ed24d6b59e108a0ac175eefff22d7b33c27))
* **Android:** image stretched instead of cropped ([73eb5fd2](https://github.com/react-native-community/react-native-camera/commit/73eb5fd272c28a6369705d30379dcabae3429301))
* **barcode-prop:** fix default value and add more values ([2c87b44b](https://github.com/react-native-community/react-native-camera/commit/2c87b44b1660f44e9f2bc8e7fce207c872933806))
* **docs:**
* move skipProcessing to 'Supported options' ([8054200f](https://github.com/react-native-community/react-native-camera/commit/8054200f81a754ae2d29532b636f55331e996703))
* Header on the wrong position ([589a0819](https://github.com/react-native-community/react-native-camera/commit/589a08192930f96aa4f7cf255aa4ac0adfd31a12))
* **types:** fix types for [#1402](https://github.com/react-native-community/react-native-camera/pull/1402) ([26f9a1e5](https://github.com/react-native-community/react-native-camera/commit/26f9a1e53b3f3b21b86f28d27236849995e7baf9))
* **ios:** add video output early to avoid underexposed beginning ([9ef5b29a](https://github.com/react-native-community/react-native-camera/commit/9ef5b29ad5d66f0e6d52e504dab00b862148c60f))
#### 1.0.3-1 (2018-03-24)
##### Chores
* restored original CameraModule.java ([7bea109e](https://github.com/react-native-community/react-native-camera/commit/7bea109e47a5b7302069f9774a4c7fb2d1652275))
##### Documentation Changes
* **rncamera:**
* specifying onTextRecognized callback prototype ([48611212](https://github.com/react-native-community/react-native-camera/commit/48611212f56eed8d9594693c84fe3f00cbb8448b))
* docs for text recognition usage ([68639b82](https://github.com/react-native-community/react-native-camera/commit/68639b82ed98ef53ac1a0cc1762c35c5941b61b6))
* **codec:** document ios codec option ([2b9d8db2](https://github.com/react-native-community/react-native-camera/commit/2b9d8db21389af624fd7ee3fe0eafa8348a3b776))
##### New Features
* **chore:** try to automate changelog ([cc5f6e62](https://github.com/react-native-community/react-native-camera/commit/cc5f6e62eb78a7de884a3b770eaa12c03a626721))
* **android:**
* integrating Google Vision's text recognition ([fcaa9452](https://github.com/react-native-community/react-native-camera/commit/fcaa9452865247ba8aa63e6fd323bd86ea0f7401))
* **Android:**
* **types:** update types for video recording codec ([f9252254](https://github.com/react-native-community/react-native-camera/commit/f925225484ca1599652039b612fc7deba635de6f))
* **rn-camera:** add codec option for ios ([c0d5aabf](https://github.com/react-native-community/react-native-camera/commit/c0d5aabf0b32f71326ff153d31e3cb5c588062da))
##### Bug Fixes
* **typo:** fix typo on package.json ([706278d8](https://github.com/react-native-community/react-native-camera/commit/706278d807edac5bc9eb606e29b3326790d7816c))
* **textrecognition:** height of text block ([01e763b1](https://github.com/react-native-community/react-native-camera/commit/01e763b1430cdb65d82c78c08a5215da65706e6d))
* issue [#1246](https://github.com/react-native-community/react-native-camera/pull/1246) - torch will be disabled when starting the record ([8c696017](https://github.com/react-native-community/react-native-camera/commit/8c6960178922492bf49fc44fbab25b638209dc4e))
* **ios-project:** fix path to parent's ios project ([4496c321](https://github.com/react-native-community/react-native-camera/commit/4496c3217195853a36c261415f126140ddebbcc4))
#### 1.0.2 (2018-03-10)
##### Chores
* **flow:** add missing types to Picture options ([6bff4d93](https://github.com/react-native-community/react-native-camera/commit/6bff4d935ac421f4aea395c58f5916df78cdae0a))
* **types:** add new keys to TakePictureOptions ([cc272036](https://github.com/react-native-community/react-native-camera/commit/cc272036581f68dbdce1b596644a158a42c471dc))
* **face-detector:** make face detection stoppage smoother ([3b3c38dd](https://github.com/react-native-community/react-native-camera/commit/3b3c38dd7d08edd1dad3b6c7fb944515fcb1e9c4))
##### New Features
* **types:**
* add FaceDetector declarations ([ba218750](https://github.com/react-native-community/react-native-camera/commit/ba21875001df2e260feb87d71411ff89fe6942ea))
* add TypeScript definition files ([a94bad5e](https://github.com/react-native-community/react-native-camera/commit/a94bad5e3739927dd50b850f68ed57a59f782e99))
##### Bug Fixes
* **types:**
* fix onBarCodeRead type ([a9947b47](https://github.com/react-native-community/react-native-camera/commit/a9947b47d569227ed6b83ef2988a8cbd3e6b7b41))
* fix definition for RNCameraProps.ratio ([4d1616c5](https://github.com/react-native-community/react-native-camera/commit/4d1616c57a059127db07f52ca18a8b092ba559ad))
* **android-camera:** revert to old camera api ([8d9c06ad](https://github.com/react-native-community/react-native-camera/commit/8d9c06ad903b40abc8bef67927d4621c494aeb3b))
#### 1.0.1 (2018-02-14)
##### New Features
* **release-script:** add script to package json ([b0503dc8](https://github.com/react-native-community/react-native-camera/commit/b0503dc8aefc1d2a992c1778e00c5d0f8dfd6901))
* **changelog:** add changelog script ([d2263937](https://github.com/react-native-community/react-native-camera/commit/d226393783748f973cc99032343fc55e45828717))
* **mirror:** add option to give "mirrorImage" flag to takePicture. ([0b6f0abd](https://github.com/react-native-community/react-native-camera/commit/0b6f0abda07b8a9ff3daa1722a254087f30eec08))
##### Bug Fixes
* **focusWarning:** fix focus depth warning being shown for ios. ([79698b81](https://github.com/react-native-community/react-native-camera/commit/79698b815b44507037a6e89fda40b5c505703c00))
* **imports:** delete some useless imports which may cause problems ([a5b9f7e7](https://github.com/react-native-community/react-native-camera/commit/a5b9f7e717bc11aad9a8e5d9e9a449ad7fd9c9fa))
### master
### 1.0.0
- RNCamera as main camera implementation for both iOS and Android (base on expo module)
- FaceDetector feature for both iOS and Android (based on expo module)
- RCTCamera deprecated
### 0.13.0
- added RNCamera implementation for android
- added FaceDetector for android

10
Dockerfile Normal file
View File

@ -0,0 +1,10 @@
FROM node:8.11.4
WORKDIR /app/website
EXPOSE 3000 35729
COPY ./docs /app/docs
COPY ./website /app/website
RUN yarn install
CMD ["yarn", "start"]

22
LICENSE Normal file
View File

@ -0,0 +1,22 @@
The MIT License (MIT)
Copyright (c) 2015 Loch Wansbrough
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

2
README.md Normal file
View File

@ -0,0 +1,2 @@
This is edit of [https://react-native-community.github.io/react-native-camera/] for indivisual purpose

246
THIRD-PARTY-LICENSES Normal file
View File

@ -0,0 +1,246 @@
===============================================================================
expo/expo
https://github.com/expo/expo
-------------------------------------------------------------------------------
BSD License
For Exponent software
Copyright (c) 2015-present, 650 Industries, Inc. All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the names 650 Industries, Exponent, nor the names of its contributors
may be used to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
===============================================================================
google/cameraview
https://github.com/google/cameraview
-------------------------------------------------------------------------------
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

6
android/.classpath Normal file
View File

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.8/"/>
<classpathentry kind="con" path="org.eclipse.buildship.core.gradleclasspathcontainer"/>
<classpathentry kind="output" path="bin/default"/>
</classpath>

23
android/.project Normal file
View File

@ -0,0 +1,23 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>react-native-camera</name>
<comment>Project react-native-camera created by Buildship.</comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>org.eclipse.jdt.core.javabuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.buildship.core.gradleprojectbuilder</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>org.eclipse.jdt.core.javanature</nature>
<nature>org.eclipse.buildship.core.gradleprojectnature</nature>
</natures>
</projectDescription>

View File

@ -0,0 +1,2 @@
connection.project.dir=../../../android
eclipse.preferences.version=1

90
android/build.gradle Normal file
View File

@ -0,0 +1,90 @@
def safeExtGet(prop, fallback) {
rootProject.ext.has(prop) ? rootProject.ext.get(prop) : fallback
}
buildscript {
// The Android Gradle plugin is only required when opening the android folder stand-alone.
// This avoids unnecessary downloads and potential conflicts when the library is included as a
// module dependency in an application project.
if (project == rootProject) {
repositories {
google()
jcenter()
}
dependencies {
//noinspection GradleDependency
classpath("com.android.tools.build:gradle:3.5.2")
}
}
}
apply plugin: 'com.android.library'
android {
compileSdkVersion safeExtGet('compileSdkVersion', 28)
buildToolsVersion safeExtGet('buildToolsVersion', '28.0.3')
defaultConfig {
minSdkVersion safeExtGet('minSdkVersion', 16)
targetSdkVersion safeExtGet('targetSdkVersion', 28)
}
flavorDimensions "react-native-camera"
productFlavors {
general {
dimension "react-native-camera"
}
mlkit {
dimension "react-native-camera"
}
}
sourceSets {
main {
java.srcDirs = ['src/main/java']
}
general {
java.srcDirs = ['src/general/java']
}
mlkit {
java.srcDirs = ['src/mlkit/java']
}
}
lintOptions {
abortOnError false
warning 'InvalidPackage'
}
packagingOptions {
exclude 'META-INF/androidx.exifinterface_exifinterface.version'
exclude 'META-INF/proguard/androidx-annotations.pro'
}
}
repositories {
google()
jcenter()
mavenCentral()
maven { url "https://jitpack.io" }
maven {
// All of React Native (JS, Obj-C sources, Android binaries) is installed from npm
url "$rootDir/../node_modules/react-native/android"
}
}
dependencies {
def googlePlayServicesVisionVersion = safeExtGet('googlePlayServicesVisionVersion', safeExtGet('googlePlayServicesVersion', '17.0.2'))
implementation 'com.facebook.react:react-native:+'
implementation "com.google.zxing:core:3.3.3"
implementation "com.drewnoakes:metadata-extractor:2.11.0"
generalImplementation "com.google.android.gms:play-services-vision:$googlePlayServicesVisionVersion"
implementation "androidx.exifinterface:exifinterface:1.0.0"
implementation "androidx.annotation:annotation:1.0.0"
implementation "androidx.legacy:legacy-support-v4:1.0.0"
mlkitImplementation "com.google.firebase:firebase-ml-vision:${safeExtGet('firebase-ml-vision', '19.0.3')}"
mlkitImplementation "com.google.firebase:firebase-ml-vision-face-model:${safeExtGet('firebase-ml-vision-face-model', '17.0.2')}"
}

View File

@ -0,0 +1,2 @@
android.enableJetifier=true
android.useAndroidX=true

Binary file not shown.

View File

@ -0,0 +1,6 @@
#Wed Jan 23 23:35:17 CST 2019
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-5.6.2-all.zip

160
android/gradlew vendored Normal file
View File

@ -0,0 +1,160 @@
#!/usr/bin/env bash
##############################################################################
##
## Gradle start up script for UN*X
##
##############################################################################
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS=""
APP_NAME="Gradle"
APP_BASE_NAME=`basename "$0"`
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD="maximum"
warn ( ) {
echo "$*"
}
die ( ) {
echo
echo "$*"
echo
exit 1
}
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
case "`uname`" in
CYGWIN* )
cygwin=true
;;
Darwin* )
darwin=true
;;
MINGW* )
msys=true
;;
esac
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
PRG="$0"
# Need this for relative symlinks.
while [ -h "$PRG" ] ; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "$PRG"`"/$link"
fi
done
SAVED="`pwd`"
cd "`dirname \"$PRG\"`/" >/dev/null
APP_HOME="`pwd -P`"
cd "$SAVED" >/dev/null
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
else
JAVACMD="$JAVA_HOME/bin/java"
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD="java"
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then
MAX_FD_LIMIT=`ulimit -H -n`
if [ $? -eq 0 ] ; then
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
MAX_FD="$MAX_FD_LIMIT"
fi
ulimit -n $MAX_FD
if [ $? -ne 0 ] ; then
warn "Could not set maximum file descriptor limit: $MAX_FD"
fi
else
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
fi
fi
# For Darwin, add options to specify how the application appears in the dock
if $darwin; then
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
fi
# For Cygwin, switch paths to Windows format before running java
if $cygwin ; then
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
JAVACMD=`cygpath --unix "$JAVACMD"`
# We build the pattern for arguments to be converted via cygpath
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
SEP=""
for dir in $ROOTDIRSRAW ; do
ROOTDIRS="$ROOTDIRS$SEP$dir"
SEP="|"
done
OURCYGPATTERN="(^($ROOTDIRS))"
# Add a user-defined pattern to the cygpath arguments
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
fi
# Now convert the arguments - kludge to limit ourselves to /bin/sh
i=0
for arg in "$@" ; do
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
else
eval `echo args$i`="\"$arg\""
fi
i=$((i+1))
done
case $i in
(0) set -- ;;
(1) set -- "$args0" ;;
(2) set -- "$args0" "$args1" ;;
(3) set -- "$args0" "$args1" "$args2" ;;
(4) set -- "$args0" "$args1" "$args2" "$args3" ;;
(5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
(6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
(7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
(8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
(9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
esac
fi
# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules
function splitJvmOpts() {
JVM_OPTS=("$@")
}
eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS
JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME"
exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@"

90
android/gradlew.bat vendored Normal file
View File

@ -0,0 +1,90 @@
@if "%DEBUG%" == "" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS=
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto init
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto init
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:init
@rem Get command-line arguments, handling Windowz variants
if not "%OS%" == "Windows_NT" goto win9xME_args
if "%@eval[2+2]" == "4" goto 4NT_args
:win9xME_args
@rem Slurp the command line arguments.
set CMD_LINE_ARGS=
set _SKIP=2
:win9xME_args_slurp
if "x%~1" == "x" goto execute
set CMD_LINE_ARGS=%*
goto execute
:4NT_args
@rem Get arguments from the 4NT Shell from JP Software
set CMD_LINE_ARGS=%$
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
exit /b 1
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega

View File

@ -0,0 +1,72 @@
package org.reactnative.barcodedetector;
import android.util.SparseArray;
import com.google.android.gms.vision.barcode.Barcode;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
public class BarcodeFormatUtils {
public static final SparseArray<String> FORMATS;
public static final Map<String, Integer> REVERSE_FORMATS;
private static final String UNKNOWN_FORMAT_STRING = "UNKNOWN_FORMAT";
private static final int UNKNOWN_FORMAT_INT = -1;
static {
// Initialize integer to string map
SparseArray<String> map = new SparseArray<>();
map.put(Barcode.CODE_128, "CODE_128");
map.put(Barcode.CODE_39, "CODE_39");
map.put(Barcode.CODE_93, "CODE_93");
map.put(Barcode.CODABAR, "CODABAR");
map.put(Barcode.DATA_MATRIX, "DATA_MATRIX");
map.put(Barcode.EAN_13, "EAN_13");
map.put(Barcode.EAN_8, "EAN_8");
map.put(Barcode.ITF, "ITF");
map.put(Barcode.QR_CODE, "QR_CODE");
map.put(Barcode.UPC_A, "UPC_A");
map.put(Barcode.UPC_E, "UPC_E");
map.put(Barcode.PDF417, "PDF417");
map.put(Barcode.AZTEC, "AZTEC");
map.put(Barcode.ALL_FORMATS, "ALL");
map.put(Barcode.CALENDAR_EVENT, "CALENDAR_EVENT");
map.put(Barcode.CONTACT_INFO, "CONTACT_INFO");
map.put(Barcode.DRIVER_LICENSE, "DRIVER_LICENSE");
map.put(Barcode.EMAIL, "EMAIL");
map.put(Barcode.GEO, "GEO");
map.put(Barcode.ISBN, "ISBN");
map.put(Barcode.PHONE, "PHONE");
map.put(Barcode.PRODUCT, "PRODUCT");
map.put(Barcode.SMS, "SMS");
map.put(Barcode.TEXT, "TEXT");
map.put(Barcode.UPC_A, "UPC_A");
map.put(Barcode.URL, "URL");
map.put(Barcode.WIFI, "WIFI");
map.put(-1, "None");
FORMATS = map;
// Initialize string to integer map
Map<String, Integer> rmap = new HashMap<>();
for (int i = 0; i < map.size(); i++) {
rmap.put(map.valueAt(i), map.keyAt(i));
}
REVERSE_FORMATS = Collections.unmodifiableMap(rmap);
}
public static String get(int format) {
return FORMATS.get(format, UNKNOWN_FORMAT_STRING);
}
public static int get(String format) {
if (REVERSE_FORMATS.containsKey(format)) {
return REVERSE_FORMATS.get(format);
}
return UNKNOWN_FORMAT_INT;
}
}

View File

@ -0,0 +1,80 @@
package org.reactnative.barcodedetector;
import android.content.Context;
import android.util.SparseArray;
import com.google.android.gms.vision.barcode.Barcode;
import com.google.android.gms.vision.barcode.BarcodeDetector;
import org.reactnative.camera.utils.ImageDimensions;
import org.reactnative.frame.RNFrame;
public class RNBarcodeDetector {
public static int NORMAL_MODE = 0;
public static int ALTERNATE_MODE = 1;
public static int INVERTED_MODE = 2;
public static int ALL_FORMATS = Barcode.ALL_FORMATS;
private BarcodeDetector mBarcodeDetector = null;
private ImageDimensions mPreviousDimensions;
private BarcodeDetector.Builder mBuilder;
private int mBarcodeType = Barcode.ALL_FORMATS;
public RNBarcodeDetector(Context context) {
mBuilder = new BarcodeDetector.Builder(context)
.setBarcodeFormats(mBarcodeType);
}
// Public API
public boolean isOperational() {
if (mBarcodeDetector == null) {
createBarcodeDetector();
}
return mBarcodeDetector.isOperational();
}
public SparseArray<Barcode> detect(RNFrame frame) {
// If the frame has different dimensions, create another barcode detector.
// Otherwise we will most likely get nasty "inconsistent image dimensions" error from detector
// and no barcode will be detected.
if (!frame.getDimensions().equals(mPreviousDimensions)) {
releaseBarcodeDetector();
}
if (mBarcodeDetector == null) {
createBarcodeDetector();
mPreviousDimensions = frame.getDimensions();
}
return mBarcodeDetector.detect(frame.getFrame());
}
public void setBarcodeType(int barcodeType) {
if (barcodeType != mBarcodeType) {
release();
mBuilder.setBarcodeFormats(barcodeType);
mBarcodeType = barcodeType;
}
}
public void release() {
releaseBarcodeDetector();
mPreviousDimensions = null;
}
// Lifecycle methods
private void releaseBarcodeDetector() {
if (mBarcodeDetector != null) {
mBarcodeDetector.release();
mBarcodeDetector = null;
}
}
private void createBarcodeDetector() {
mBarcodeDetector = mBuilder.build();
}
}

View File

@ -0,0 +1,128 @@
package org.reactnative.camera.tasks;
import android.graphics.Rect;
import android.util.SparseArray;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.WritableArray;
import com.facebook.react.bridge.WritableMap;
import com.google.android.gms.vision.barcode.Barcode;
import org.reactnative.barcodedetector.BarcodeFormatUtils;
import org.reactnative.camera.utils.ImageDimensions;
import org.reactnative.frame.RNFrame;
import org.reactnative.frame.RNFrameFactory;
import org.reactnative.barcodedetector.RNBarcodeDetector;
public class BarcodeDetectorAsyncTask extends android.os.AsyncTask<Void, Void, SparseArray<Barcode>> {
private byte[] mImageData;
private int mWidth;
private int mHeight;
private int mRotation;
private RNBarcodeDetector mBarcodeDetector;
private BarcodeDetectorAsyncTaskDelegate mDelegate;
private double mScaleX;
private double mScaleY;
private ImageDimensions mImageDimensions;
private int mPaddingLeft;
private int mPaddingTop;
public BarcodeDetectorAsyncTask(
BarcodeDetectorAsyncTaskDelegate delegate,
RNBarcodeDetector barcodeDetector,
byte[] imageData,
int width,
int height,
int rotation,
float density,
int facing,
int viewWidth,
int viewHeight,
int viewPaddingLeft,
int viewPaddingTop
) {
mImageData = imageData;
mWidth = width;
mHeight = height;
mRotation = rotation;
mDelegate = delegate;
mBarcodeDetector = barcodeDetector;
mImageDimensions = new ImageDimensions(width, height, rotation, facing);
mScaleX = (double) (viewWidth) / (mImageDimensions.getWidth() * density);
mScaleY = (double) (viewHeight) / (mImageDimensions.getHeight() * density);
mPaddingLeft = viewPaddingLeft;
mPaddingTop = viewPaddingTop;
}
@Override
protected SparseArray<Barcode> doInBackground(Void... ignored) {
if (isCancelled() || mDelegate == null || mBarcodeDetector == null || !mBarcodeDetector.isOperational()) {
return null;
}
RNFrame frame = RNFrameFactory.buildFrame(mImageData, mWidth, mHeight, mRotation);
return mBarcodeDetector.detect(frame);
}
@Override
protected void onPostExecute(SparseArray<Barcode> barcodes) {
super.onPostExecute(barcodes);
if (barcodes == null) {
mDelegate.onBarcodeDetectionError(mBarcodeDetector);
} else {
if (barcodes.size() > 0) {
mDelegate.onBarcodesDetected(serializeEventData(barcodes));
}
mDelegate.onBarcodeDetectingTaskCompleted();
}
}
private WritableArray serializeEventData(SparseArray<Barcode> barcodes) {
WritableArray barcodesList = Arguments.createArray();
for (int i = 0; i < barcodes.size(); i++) {
Barcode barcode = barcodes.valueAt(i);
WritableMap serializedBarcode = Arguments.createMap();
serializedBarcode.putString("data", barcode.displayValue);
serializedBarcode.putString("rawData", barcode.rawValue);
serializedBarcode.putString("type", BarcodeFormatUtils.get(barcode.format));
serializedBarcode.putMap("bounds", processBounds(barcode.getBoundingBox()));
barcodesList.pushMap(serializedBarcode);
}
return barcodesList;
}
private WritableMap processBounds(Rect frame) {
WritableMap origin = Arguments.createMap();
int x = frame.left;
int y = frame.top;
if (frame.left < mWidth / 2) {
x = x + mPaddingLeft / 2;
} else if (frame.left > mWidth /2) {
x = x - mPaddingLeft / 2;
}
if (frame.top < mHeight / 2) {
y = y + mPaddingTop / 2;
} else if (frame.top > mHeight / 2) {
y = y - mPaddingTop / 2;
}
origin.putDouble("x", x * mScaleX);
origin.putDouble("y", y * mScaleY);
WritableMap size = Arguments.createMap();
size.putDouble("width", frame.width() * mScaleX);
size.putDouble("height", frame.height() * mScaleY);
WritableMap bounds = Arguments.createMap();
bounds.putMap("origin", origin);
bounds.putMap("size", size);
return bounds;
}
}

View File

@ -0,0 +1,97 @@
package org.reactnative.camera.tasks;
import android.util.SparseArray;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.WritableArray;
import com.facebook.react.bridge.WritableMap;
import com.google.android.cameraview.CameraView;
import com.google.android.gms.vision.face.Face;
import org.reactnative.camera.utils.ImageDimensions;
import org.reactnative.facedetector.FaceDetectorUtils;
import org.reactnative.frame.RNFrame;
import org.reactnative.frame.RNFrameFactory;
import org.reactnative.facedetector.RNFaceDetector;
public class FaceDetectorAsyncTask extends android.os.AsyncTask<Void, Void, SparseArray<Face>> {
private byte[] mImageData;
private int mWidth;
private int mHeight;
private int mRotation;
private RNFaceDetector mFaceDetector;
private FaceDetectorAsyncTaskDelegate mDelegate;
private ImageDimensions mImageDimensions;
private double mScaleX;
private double mScaleY;
private int mPaddingLeft;
private int mPaddingTop;
public FaceDetectorAsyncTask(
FaceDetectorAsyncTaskDelegate delegate,
RNFaceDetector faceDetector,
byte[] imageData,
int width,
int height,
int rotation,
float density,
int facing,
int viewWidth,
int viewHeight,
int viewPaddingLeft,
int viewPaddingTop
) {
mImageData = imageData;
mWidth = width;
mHeight = height;
mRotation = rotation;
mDelegate = delegate;
mFaceDetector = faceDetector;
mImageDimensions = new ImageDimensions(width, height, rotation, facing);
mScaleX = (double) (viewWidth) / (mImageDimensions.getWidth() * density);
mScaleY = (double) (viewHeight) / (mImageDimensions.getHeight() * density);
mPaddingLeft = viewPaddingLeft;
mPaddingTop = viewPaddingTop;
}
@Override
protected SparseArray<Face> doInBackground(Void... ignored) {
if (isCancelled() || mDelegate == null || mFaceDetector == null || !mFaceDetector.isOperational()) {
return null;
}
RNFrame frame = RNFrameFactory.buildFrame(mImageData, mWidth, mHeight, mRotation);
return mFaceDetector.detect(frame);
}
@Override
protected void onPostExecute(SparseArray<Face> faces) {
super.onPostExecute(faces);
if (faces == null) {
mDelegate.onFaceDetectionError(mFaceDetector);
} else {
if (faces.size() > 0) {
mDelegate.onFacesDetected(serializeEventData(faces));
}
mDelegate.onFaceDetectingTaskCompleted();
}
}
private WritableArray serializeEventData(SparseArray<Face> faces) {
WritableArray facesList = Arguments.createArray();
for(int i = 0; i < faces.size(); i++) {
Face face = faces.valueAt(i);
WritableMap serializedFace = FaceDetectorUtils.serializeFace(face, mScaleX, mScaleY, mWidth, mHeight, mPaddingLeft, mPaddingTop);
if (mImageDimensions.getFacing() == CameraView.FACING_FRONT) {
serializedFace = FaceDetectorUtils.rotateFaceX(serializedFace, mImageDimensions.getWidth(), mScaleX);
} else {
serializedFace = FaceDetectorUtils.changeAnglesDirection(serializedFace);
}
facesList.pushMap(serializedFace);
}
return facesList;
}
}

View File

@ -0,0 +1,178 @@
package org.reactnative.camera.tasks;
import android.util.SparseArray;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.ReadableArray;
import com.facebook.react.bridge.ReadableMap;
import com.facebook.react.bridge.WritableArray;
import com.facebook.react.bridge.WritableMap;
import com.facebook.react.uimanager.ThemedReactContext;
import com.google.android.cameraview.CameraView;
import com.google.android.gms.vision.text.Line;
import com.google.android.gms.vision.text.Text;
import com.google.android.gms.vision.text.TextBlock;
import com.google.android.gms.vision.text.TextRecognizer;
import org.reactnative.camera.utils.ImageDimensions;
import org.reactnative.facedetector.FaceDetectorUtils;
import org.reactnative.frame.RNFrame;
import org.reactnative.frame.RNFrameFactory;
public class TextRecognizerAsyncTask extends android.os.AsyncTask<Void, Void, SparseArray<TextBlock>> {
private TextRecognizerAsyncTaskDelegate mDelegate;
private ThemedReactContext mThemedReactContext;
private TextRecognizer mTextRecognizer;
private byte[] mImageData;
private int mWidth;
private int mHeight;
private int mRotation;
private ImageDimensions mImageDimensions;
private double mScaleX;
private double mScaleY;
private int mPaddingLeft;
private int mPaddingTop;
public TextRecognizerAsyncTask(
TextRecognizerAsyncTaskDelegate delegate,
ThemedReactContext themedReactContext,
byte[] imageData,
int width,
int height,
int rotation,
float density,
int facing,
int viewWidth,
int viewHeight,
int viewPaddingLeft,
int viewPaddingTop
) {
mDelegate = delegate;
mThemedReactContext = themedReactContext;
mImageData = imageData;
mWidth = width;
mHeight = height;
mRotation = rotation;
mImageDimensions = new ImageDimensions(width, height, rotation, facing);
mScaleX = (double) (viewWidth) / (mImageDimensions.getWidth() * density);
mScaleY = (double) (viewHeight) / (mImageDimensions.getHeight() * density);
mPaddingLeft = viewPaddingLeft;
mPaddingTop = viewPaddingTop;
}
@Override
protected SparseArray<TextBlock> doInBackground(Void... ignored) {
if (isCancelled() || mDelegate == null) {
return null;
}
mTextRecognizer = new TextRecognizer.Builder(mThemedReactContext).build();
RNFrame frame = RNFrameFactory.buildFrame(mImageData, mWidth, mHeight, mRotation);
return mTextRecognizer.detect(frame.getFrame());
}
@Override
protected void onPostExecute(SparseArray<TextBlock> textBlocks) {
super.onPostExecute(textBlocks);
if (mTextRecognizer != null) {
mTextRecognizer.release();
}
if (textBlocks != null) {
WritableArray textBlocksList = Arguments.createArray();
for (int i = 0; i < textBlocks.size(); ++i) {
TextBlock textBlock = textBlocks.valueAt(i);
WritableMap serializedTextBlock = serializeText(textBlock);
if (mImageDimensions.getFacing() == CameraView.FACING_FRONT) {
serializedTextBlock = rotateTextX(serializedTextBlock);
}
textBlocksList.pushMap(serializedTextBlock);
}
mDelegate.onTextRecognized(textBlocksList);
}
mDelegate.onTextRecognizerTaskCompleted();
}
private WritableMap serializeText(Text text) {
WritableMap encodedText = Arguments.createMap();
WritableArray components = Arguments.createArray();
for (Text component : text.getComponents()) {
components.pushMap(serializeText(component));
}
encodedText.putArray("components", components);
encodedText.putString("value", text.getValue());
int x = text.getBoundingBox().left;
int y = text.getBoundingBox().top;
if (text.getBoundingBox().left < mWidth / 2) {
x = x + mPaddingLeft / 2;
} else if (text.getBoundingBox().left > mWidth /2) {
x = x - mPaddingLeft / 2;
}
if (text.getBoundingBox().height() < mHeight / 2) {
y = y + mPaddingTop / 2;
} else if (text.getBoundingBox().height() > mHeight / 2) {
y = y - mPaddingTop / 2;
}
WritableMap origin = Arguments.createMap();
origin.putDouble("x", x * this.mScaleX);
origin.putDouble("y", y * this.mScaleY);
WritableMap size = Arguments.createMap();
size.putDouble("width", text.getBoundingBox().width() * this.mScaleX);
size.putDouble("height", text.getBoundingBox().height() * this.mScaleY);
WritableMap bounds = Arguments.createMap();
bounds.putMap("origin", origin);
bounds.putMap("size", size);
encodedText.putMap("bounds", bounds);
String type_;
if (text instanceof TextBlock) {
type_ = "block";
} else if (text instanceof Line) {
type_ = "line";
} else /*if (text instanceof Element)*/ {
type_ = "element";
}
encodedText.putString("type", type_);
return encodedText;
}
private WritableMap rotateTextX(WritableMap text) {
ReadableMap faceBounds = text.getMap("bounds");
ReadableMap oldOrigin = faceBounds.getMap("origin");
WritableMap mirroredOrigin = FaceDetectorUtils.positionMirroredHorizontally(
oldOrigin, mImageDimensions.getWidth(), mScaleX);
double translateX = -faceBounds.getMap("size").getDouble("width");
WritableMap translatedMirroredOrigin = FaceDetectorUtils.positionTranslatedHorizontally(mirroredOrigin, translateX);
WritableMap newBounds = Arguments.createMap();
newBounds.merge(faceBounds);
newBounds.putMap("origin", translatedMirroredOrigin);
text.putMap("bounds", newBounds);
ReadableArray oldComponents = text.getArray("components");
WritableArray newComponents = Arguments.createArray();
for (int i = 0; i < oldComponents.size(); ++i) {
WritableMap component = Arguments.createMap();
component.merge(oldComponents.getMap(i));
rotateTextX(component);
newComponents.pushMap(component);
}
text.putArray("components", newComponents);
return text;
}
}

View File

@ -0,0 +1,74 @@
package org.reactnative.facedetector;
import org.reactnative.facedetector.tasks.FileFaceDetectionAsyncTask;
import com.facebook.react.bridge.Promise;
import com.facebook.react.bridge.ReactApplicationContext;
import com.facebook.react.bridge.ReactContextBaseJavaModule;
import com.facebook.react.bridge.ReactMethod;
import com.facebook.react.bridge.ReadableMap;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import javax.annotation.Nullable;
public class FaceDetectorModule extends ReactContextBaseJavaModule {
private static final String TAG = "RNFaceDetector";
// private ScopedContext mScopedContext;
private static ReactApplicationContext mScopedContext;
public FaceDetectorModule(ReactApplicationContext reactContext) {
super(reactContext);
mScopedContext = reactContext;
}
@Override
public String getName() {
return TAG;
}
@Nullable
@Override
public Map<String, Object> getConstants() {
return Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("Mode", getFaceDetectionModeConstants());
put("Landmarks", getFaceDetectionLandmarksConstants());
put("Classifications", getFaceDetectionClassificationsConstants());
}
private Map<String, Object> getFaceDetectionModeConstants() {
return Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("fast", RNFaceDetector.FAST_MODE);
put("accurate", RNFaceDetector.ACCURATE_MODE);
}
});
}
private Map<String, Object> getFaceDetectionClassificationsConstants() {
return Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("all", RNFaceDetector.ALL_CLASSIFICATIONS);
put("none", RNFaceDetector.NO_CLASSIFICATIONS);
}
});
}
private Map<String, Object> getFaceDetectionLandmarksConstants() {
return Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("all", RNFaceDetector.ALL_LANDMARKS);
put("none", RNFaceDetector.NO_LANDMARKS);
}
});
}
});
}
@ReactMethod
public void detectFaces(ReadableMap options, final Promise promise) {
new FileFaceDetectionAsyncTask(mScopedContext, options, promise).execute();
}
}

View File

@ -0,0 +1,146 @@
package org.reactnative.facedetector;
import android.graphics.PointF;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.ReadableMap;
import com.facebook.react.bridge.WritableMap;
import com.google.android.gms.vision.face.Face;
import com.google.android.gms.vision.face.Landmark;
public class FaceDetectorUtils {
// All the landmarks reported by Google Mobile Vision in constants' order.
// https://developers.google.com/android/reference/com/google/android/gms/vision/face/Landmark
private static final String[] landmarkNames = {
"bottomMouthPosition", "leftCheekPosition", "leftEarPosition", "leftEarTipPosition",
"leftEyePosition", "leftMouthPosition", "noseBasePosition", "rightCheekPosition",
"rightEarPosition", "rightEarTipPosition", "rightEyePosition", "rightMouthPosition"
};
public static WritableMap serializeFace(Face face) {
return serializeFace(face, 1, 1, 0, 0, 0, 0);
}
public static WritableMap serializeFace(Face face, double scaleX, double scaleY, int width, int height, int paddingLeft, int paddingTop) {
WritableMap encodedFace = Arguments.createMap();
encodedFace.putInt("faceID", face.getId());
encodedFace.putDouble("rollAngle", face.getEulerZ());
encodedFace.putDouble("yawAngle", face.getEulerY());
if (face.getIsSmilingProbability() >= 0) {
encodedFace.putDouble("smilingProbability", face.getIsSmilingProbability());
}
if (face.getIsLeftEyeOpenProbability() >= 0) {
encodedFace.putDouble("leftEyeOpenProbability", face.getIsLeftEyeOpenProbability());
}
if (face.getIsRightEyeOpenProbability() >= 0) {
encodedFace.putDouble("rightEyeOpenProbability", face.getIsRightEyeOpenProbability());
}
for(Landmark landmark : face.getLandmarks()) {
encodedFace.putMap(landmarkNames[landmark.getType()], mapFromPoint(landmark.getPosition(), scaleX, scaleY, width, height, paddingLeft, paddingTop));
}
WritableMap origin = Arguments.createMap();
Float x = face.getPosition().x;
Float y = face.getPosition().y;
if (face.getPosition().x < width / 2) {
x = x + paddingLeft / 2;
} else if (face.getPosition().x > width / 2) {
x = x - paddingLeft / 2;
}
if (face.getPosition().y < height / 2) {
y = y + paddingTop / 2;
} else if (face.getPosition().y > height / 2) {
y = y - paddingTop / 2;
}
origin.putDouble("x", x * scaleX);
origin.putDouble("y", y * scaleY);
WritableMap size = Arguments.createMap();
size.putDouble("width", face.getWidth() * scaleX);
size.putDouble("height", face.getHeight() * scaleY);
WritableMap bounds = Arguments.createMap();
bounds.putMap("origin", origin);
bounds.putMap("size", size);
encodedFace.putMap("bounds", bounds);
return encodedFace;
}
public static WritableMap rotateFaceX(WritableMap face, int sourceWidth, double scaleX) {
ReadableMap faceBounds = face.getMap("bounds");
ReadableMap oldOrigin = faceBounds.getMap("origin");
WritableMap mirroredOrigin = positionMirroredHorizontally(oldOrigin, sourceWidth, scaleX);
double translateX = -faceBounds.getMap("size").getDouble("width");
WritableMap translatedMirroredOrigin = positionTranslatedHorizontally(mirroredOrigin, translateX);
WritableMap newBounds = Arguments.createMap();
newBounds.merge(faceBounds);
newBounds.putMap("origin", translatedMirroredOrigin);
for (String landmarkName : landmarkNames) {
ReadableMap landmark = face.hasKey(landmarkName) ? face.getMap(landmarkName) : null;
if (landmark != null) {
WritableMap mirroredPosition = positionMirroredHorizontally(landmark, sourceWidth, scaleX);
face.putMap(landmarkName, mirroredPosition);
}
}
face.putMap("bounds", newBounds);
return face;
}
public static WritableMap changeAnglesDirection(WritableMap face) {
face.putDouble("rollAngle", (-face.getDouble("rollAngle") + 360) % 360);
face.putDouble("yawAngle", (-face.getDouble("yawAngle") + 360) % 360);
return face;
}
public static WritableMap mapFromPoint(PointF point, double scaleX, double scaleY, int width, int height, int paddingLeft, int paddingTop) {
WritableMap map = Arguments.createMap();
Float x = point.x;
Float y = point.y;
if (point.x < width / 2) {
x = (x + paddingLeft / 2);
} else if (point.x > width / 2) {
x = (x - paddingLeft / 2);
}
if (point.y < height / 2) {
y = (y + paddingTop / 2);
} else if (point.y > height / 2) {
y = (y - paddingTop / 2);
}
map.putDouble("x", point.x * scaleX);
map.putDouble("y", point.y * scaleY);
return map;
}
public static WritableMap positionTranslatedHorizontally(ReadableMap position, double translateX) {
WritableMap newPosition = Arguments.createMap();
newPosition.merge(position);
newPosition.putDouble("x", position.getDouble("x") + translateX);
return newPosition;
}
public static WritableMap positionMirroredHorizontally(ReadableMap position, int containerWidth, double scaleX) {
WritableMap newPosition = Arguments.createMap();
newPosition.merge(position);
newPosition.putDouble("x", valueMirroredHorizontally(position.getDouble("x"), containerWidth, scaleX));
return newPosition;
}
public static double valueMirroredHorizontally(double elementX, int containerWidth, double scaleX) {
double originalX = elementX / scaleX;
double mirroredX = containerWidth - originalX;
return mirroredX * scaleX;
}
}

View File

@ -0,0 +1,108 @@
package org.reactnative.facedetector;
import android.content.Context;
import android.util.SparseArray;
import org.reactnative.camera.utils.ImageDimensions;
import com.google.android.gms.vision.face.Face;
import com.google.android.gms.vision.face.FaceDetector;
import org.reactnative.frame.RNFrame;
public class RNFaceDetector {
public static int ALL_CLASSIFICATIONS = FaceDetector.ALL_CLASSIFICATIONS;
public static int NO_CLASSIFICATIONS = FaceDetector.NO_CLASSIFICATIONS;
public static int ALL_LANDMARKS = FaceDetector.ALL_LANDMARKS;
public static int NO_LANDMARKS = FaceDetector.NO_LANDMARKS;
public static int ACCURATE_MODE = FaceDetector.ACCURATE_MODE;
public static int FAST_MODE = FaceDetector.FAST_MODE;
private FaceDetector mFaceDetector = null;
private ImageDimensions mPreviousDimensions;
private FaceDetector.Builder mBuilder = null;
private int mClassificationType = NO_CLASSIFICATIONS;
private int mLandmarkType = NO_LANDMARKS;
private float mMinFaceSize = 0.15f;
private int mMode = FAST_MODE;
public RNFaceDetector(Context context) {
mBuilder = new FaceDetector.Builder(context);
mBuilder.setMinFaceSize(mMinFaceSize);
mBuilder.setMode(mMode);
mBuilder.setLandmarkType(mLandmarkType);
mBuilder.setClassificationType(mClassificationType);
}
// Public API
public boolean isOperational() {
if (mFaceDetector == null) {
createFaceDetector();
}
return mFaceDetector.isOperational();
}
public SparseArray<Face> detect(RNFrame frame) {
// If the frame has different dimensions, create another face detector.
// Otherwise we will get nasty "inconsistent image dimensions" error from detector
// and no face will be detected.
if (!frame.getDimensions().equals(mPreviousDimensions)) {
releaseFaceDetector();
}
if (mFaceDetector == null) {
createFaceDetector();
mPreviousDimensions = frame.getDimensions();
}
return mFaceDetector.detect(frame.getFrame());
}
public void setTracking(boolean trackingEnabled) {
release();
mBuilder.setTrackingEnabled(trackingEnabled);
}
public void setClassificationType(int classificationType) {
if (classificationType != mClassificationType) {
release();
mBuilder.setClassificationType(classificationType);
mClassificationType = classificationType;
}
}
public void setLandmarkType(int landmarkType) {
if (landmarkType != mLandmarkType) {
release();
mBuilder.setLandmarkType(landmarkType);
mLandmarkType = landmarkType;
}
}
public void setMode(int mode) {
if (mode != mMode) {
release();
mBuilder.setMode(mode);
mMode = mode;
}
}
public void release() {
releaseFaceDetector();
mPreviousDimensions = null;
}
// Lifecycle methods
private void releaseFaceDetector() {
if (mFaceDetector != null) {
mFaceDetector.release();
mFaceDetector = null;
}
}
private void createFaceDetector() {
mFaceDetector = mBuilder.build();
}
}

View File

@ -0,0 +1,150 @@
package org.reactnative.facedetector.tasks;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import androidx.exifinterface.media.ExifInterface;
import android.net.Uri;
import android.os.AsyncTask;
import android.util.Log;
import android.util.SparseArray;
import org.reactnative.facedetector.RNFaceDetector;
import org.reactnative.frame.RNFrame;
import org.reactnative.frame.RNFrameFactory;
import org.reactnative.facedetector.FaceDetectorUtils;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.Promise;
import com.facebook.react.bridge.ReadableMap;
import com.facebook.react.bridge.WritableArray;
import com.facebook.react.bridge.WritableMap;
import com.google.android.gms.vision.face.Face;
import java.io.File;
import java.io.IOException;
public class FileFaceDetectionAsyncTask extends AsyncTask<Void, Void, SparseArray<Face>> {
private static final String ERROR_TAG = "E_FACE_DETECTION_FAILED";
private static final String MODE_OPTION_KEY = "mode";
private static final String DETECT_LANDMARKS_OPTION_KEY = "detectLandmarks";
private static final String RUN_CLASSIFICATIONS_OPTION_KEY = "runClassifications";
private String mUri;
private String mPath;
private Promise mPromise;
private int mWidth = 0;
private int mHeight = 0;
private Context mContext;
private ReadableMap mOptions;
private int mOrientation = ExifInterface.ORIENTATION_UNDEFINED;
private RNFaceDetector mRNFaceDetector;
public FileFaceDetectionAsyncTask(Context context, ReadableMap options, Promise promise) {
mUri = options.getString("uri");
mPromise = promise;
mOptions = options;
mContext = context;
}
@Override
protected void onPreExecute() {
if (mUri == null) {
mPromise.reject(ERROR_TAG, "You have to provide an URI of an image.");
cancel(true);
return;
}
Uri uri = Uri.parse(mUri);
mPath = uri.getPath();
if (mPath == null) {
mPromise.reject(ERROR_TAG, "Invalid URI provided: `" + mUri + "`.");
cancel(true);
return;
}
// We have to check if the requested image is in a directory safely accessible by our app.
boolean fileIsInSafeDirectories =
mPath.startsWith(mContext.getCacheDir().getPath()) || mPath.startsWith(mContext.getFilesDir().getPath());
if (!fileIsInSafeDirectories) {
mPromise.reject(ERROR_TAG, "The image has to be in the local app's directories.");
cancel(true);
return;
}
if(!new File(mPath).exists()) {
mPromise.reject(ERROR_TAG, "The file does not exist. Given path: `" + mPath + "`.");
cancel(true);
}
}
@Override
protected SparseArray<Face> doInBackground(Void... voids) {
if (isCancelled()) {
return null;
}
mRNFaceDetector = detectorForOptions(mOptions, mContext);
Bitmap bitmap = BitmapFactory.decodeFile(mPath);
mWidth = bitmap.getWidth();
mHeight = bitmap.getHeight();
try {
ExifInterface exif = new ExifInterface(mPath);
mOrientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_UNDEFINED);
} catch (IOException e) {
Log.e(ERROR_TAG, "Reading orientation from file `" + mPath + "` failed.", e);
}
RNFrame frame = RNFrameFactory.buildFrame(bitmap);
return mRNFaceDetector.detect(frame);
}
@Override
protected void onPostExecute(SparseArray<Face> faces) {
super.onPostExecute(faces);
WritableMap result = Arguments.createMap();
WritableArray facesArray = Arguments.createArray();
for(int i = 0; i < faces.size(); i++) {
Face face = faces.valueAt(i);
WritableMap encodedFace = FaceDetectorUtils.serializeFace(face);
encodedFace.putDouble("yawAngle", (-encodedFace.getDouble("yawAngle") + 360) % 360);
encodedFace.putDouble("rollAngle", (-encodedFace.getDouble("rollAngle") + 360) % 360);
facesArray.pushMap(encodedFace);
}
result.putArray("faces", facesArray);
WritableMap image = Arguments.createMap();
image.putInt("width", mWidth);
image.putInt("height", mHeight);
image.putInt("orientation", mOrientation);
image.putString("uri", mUri);
result.putMap("image", image);
mRNFaceDetector.release();
mPromise.resolve(result);
}
private static RNFaceDetector detectorForOptions(ReadableMap options, Context context) {
RNFaceDetector detector = new RNFaceDetector(context);
detector.setTracking(false);
if(options.hasKey(MODE_OPTION_KEY)) {
detector.setMode(options.getInt(MODE_OPTION_KEY));
}
if(options.hasKey(RUN_CLASSIFICATIONS_OPTION_KEY)) {
detector.setClassificationType(options.getInt(RUN_CLASSIFICATIONS_OPTION_KEY));
}
if(options.hasKey(DETECT_LANDMARKS_OPTION_KEY)) {
detector.setLandmarkType(options.getInt(DETECT_LANDMARKS_OPTION_KEY));
}
return detector;
}
}

View File

@ -0,0 +1,6 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android" package="org.reactnative.camera">
<uses-permission android:name="android.permission.CAMERA" />
<uses-feature android:name="android.hardware.camera" android:required="false" />
<uses-feature android:name="android.hardware.camera.autofocus" android:required="false" />
</manifest>

View File

@ -0,0 +1,189 @@
/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.cameraview;
import android.os.Parcel;
import android.os.Parcelable;
import androidx.annotation.NonNull;
import androidx.collection.SparseArrayCompat;
/**
* Immutable class for describing proportional relationship between width and height.
*/
public class AspectRatio implements Comparable<AspectRatio>, Parcelable {
private final static SparseArrayCompat<SparseArrayCompat<AspectRatio>> sCache
= new SparseArrayCompat<>(16);
private final int mX;
private final int mY;
/**
* Returns an instance of {@link AspectRatio} specified by {@code x} and {@code y} values.
* The values {@code x} and {@code} will be reduced by their greatest common divider.
*
* @param x The width
* @param y The height
* @return An instance of {@link AspectRatio}
*/
public static AspectRatio of(int x, int y) {
int gcd = gcd(x, y);
x /= gcd;
y /= gcd;
SparseArrayCompat<AspectRatio> arrayX = sCache.get(x);
if (arrayX == null) {
AspectRatio ratio = new AspectRatio(x, y);
arrayX = new SparseArrayCompat<>();
arrayX.put(y, ratio);
sCache.put(x, arrayX);
return ratio;
} else {
AspectRatio ratio = arrayX.get(y);
if (ratio == null) {
ratio = new AspectRatio(x, y);
arrayX.put(y, ratio);
}
return ratio;
}
}
/**
* Parse an {@link AspectRatio} from a {@link String} formatted like "4:3".
*
* @param s The string representation of the aspect ratio
* @return The aspect ratio
* @throws IllegalArgumentException when the format is incorrect.
*/
public static AspectRatio parse(String s) {
int position = s.indexOf(':');
if (position == -1) {
throw new IllegalArgumentException("Malformed aspect ratio: " + s);
}
try {
int x = Integer.parseInt(s.substring(0, position));
int y = Integer.parseInt(s.substring(position + 1));
return AspectRatio.of(x, y);
} catch (NumberFormatException e) {
throw new IllegalArgumentException("Malformed aspect ratio: " + s, e);
}
}
private AspectRatio(int x, int y) {
mX = x;
mY = y;
}
public int getX() {
return mX;
}
public int getY() {
return mY;
}
public boolean matches(Size size) {
int gcd = gcd(size.getWidth(), size.getHeight());
int x = size.getWidth() / gcd;
int y = size.getHeight() / gcd;
return mX == x && mY == y;
}
@Override
public boolean equals(Object o) {
if (o == null) {
return false;
}
if (this == o) {
return true;
}
if (o instanceof AspectRatio) {
AspectRatio ratio = (AspectRatio) o;
return mX == ratio.mX && mY == ratio.mY;
}
return false;
}
@Override
public String toString() {
return mX + ":" + mY;
}
public float toFloat() {
return (float) mX / mY;
}
@Override
public int hashCode() {
// assuming most sizes are <2^16, doing a rotate will give us perfect hashing
return mY ^ ((mX << (Integer.SIZE / 2)) | (mX >>> (Integer.SIZE / 2)));
}
@Override
public int compareTo(@NonNull AspectRatio another) {
if (equals(another)) {
return 0;
} else if (toFloat() - another.toFloat() > 0) {
return 1;
}
return -1;
}
/**
* @return The inverse of this {@link AspectRatio}.
*/
public AspectRatio inverse() {
//noinspection SuspiciousNameCombination
return AspectRatio.of(mY, mX);
}
private static int gcd(int a, int b) {
while (b != 0) {
int c = b;
b = a % b;
a = c;
}
return a;
}
@Override
public int describeContents() {
return 0;
}
@Override
public void writeToParcel(Parcel dest, int flags) {
dest.writeInt(mX);
dest.writeInt(mY);
}
public static final Parcelable.Creator<AspectRatio> CREATOR
= new Parcelable.Creator<AspectRatio>() {
@Override
public AspectRatio createFromParcel(Parcel source) {
int x = source.readInt();
int y = source.readInt();
return AspectRatio.of(x, y);
}
@Override
public AspectRatio[] newArray(int size) {
return new AspectRatio[size];
}
};
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,47 @@
/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.cameraview;
import android.annotation.TargetApi;
import android.content.Context;
import android.graphics.ImageFormat;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.os.Handler;
@TargetApi(23)
class Camera2Api23 extends Camera2 {
Camera2Api23(Callback callback, PreviewImpl preview, Context context, Handler bgHandler) {
super(callback, preview, context, bgHandler);
}
@Override
protected void collectPictureSizes(SizeMap sizes, StreamConfigurationMap map) {
// Try to get hi-res output sizes
android.util.Size[] outputSizes = map.getHighResolutionOutputSizes(ImageFormat.JPEG);
if (outputSizes != null) {
for (android.util.Size size : map.getHighResolutionOutputSizes(ImageFormat.JPEG)) {
sizes.add(new Size(size.getWidth(), size.getHeight()));
}
}
if (sizes.isEmpty()) {
super.collectPictureSizes(sizes, map);
}
}
}

View File

@ -0,0 +1,872 @@
/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.cameraview;
import android.app.Activity;
import android.content.Context;
import android.graphics.Rect;
import android.hardware.Camera;
import android.media.CamcorderProfile;
import android.os.Build;
import android.os.HandlerThread;
import android.os.Handler;
import android.os.Parcel;
import android.os.Parcelable;
import androidx.annotation.IntDef;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.core.os.ParcelableCompat;
import androidx.core.os.ParcelableCompatCreatorCallbacks;
import androidx.core.view.ViewCompat;
import android.util.AttributeSet;
import android.view.MotionEvent;
import android.view.View;
import android.widget.FrameLayout;
import android.graphics.SurfaceTexture;
import com.facebook.react.bridge.ReadableMap;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import java.util.Set;
import java.util.SortedSet;
public class CameraView extends FrameLayout {
/** The camera device faces the opposite direction as the device's screen. */
public static final int FACING_BACK = Constants.FACING_BACK;
/** The camera device faces the same direction as the device's screen. */
public static final int FACING_FRONT = Constants.FACING_FRONT;
/** Direction the camera faces relative to device screen. */
@IntDef({FACING_BACK, FACING_FRONT})
@Retention(RetentionPolicy.SOURCE)
public @interface Facing {
}
/** Flash will not be fired. */
public static final int FLASH_OFF = Constants.FLASH_OFF;
/** Flash will always be fired during snapshot. */
public static final int FLASH_ON = Constants.FLASH_ON;
/** Constant emission of light during preview, auto-focus and snapshot. */
public static final int FLASH_TORCH = Constants.FLASH_TORCH;
/** Flash will be fired automatically when required. */
public static final int FLASH_AUTO = Constants.FLASH_AUTO;
/** Flash will be fired in red-eye reduction mode. */
public static final int FLASH_RED_EYE = Constants.FLASH_RED_EYE;
/** The mode for for the camera device's flash control */
@Retention(RetentionPolicy.SOURCE)
@IntDef({FLASH_OFF, FLASH_ON, FLASH_TORCH, FLASH_AUTO, FLASH_RED_EYE})
public @interface Flash {
}
CameraViewImpl mImpl;
private final CallbackBridge mCallbacks;
private boolean mAdjustViewBounds;
private Context mContext;
private final DisplayOrientationDetector mDisplayOrientationDetector;
protected HandlerThread mBgThread;
protected Handler mBgHandler;
public CameraView(Context context, boolean fallbackToOldApi) {
this(context, null, fallbackToOldApi);
}
public CameraView(Context context, AttributeSet attrs, boolean fallbackToOldApi) {
this(context, attrs, 0, fallbackToOldApi);
}
@SuppressWarnings("WrongConstant")
public CameraView(Context context, AttributeSet attrs, int defStyleAttr, boolean fallbackToOldApi) {
super(context, attrs, defStyleAttr);
// bg hanadler for non UI heavy work
mBgThread = new HandlerThread("RNCamera-Handler-Thread");
mBgThread.start();
mBgHandler = new Handler(mBgThread.getLooper());
if (isInEditMode()){
mCallbacks = null;
mDisplayOrientationDetector = null;
return;
}
mAdjustViewBounds = true;
mContext = context;
// Internal setup
final PreviewImpl preview = createPreviewImpl(context);
mCallbacks = new CallbackBridge();
if (fallbackToOldApi || Build.VERSION.SDK_INT < 21 || Camera2.isLegacy(context)) {
mImpl = new Camera1(mCallbacks, preview, mBgHandler);
} else if (Build.VERSION.SDK_INT < 23) {
mImpl = new Camera2(mCallbacks, preview, context, mBgHandler);
} else {
mImpl = new Camera2Api23(mCallbacks, preview, context, mBgHandler);
}
// Display orientation detector
mDisplayOrientationDetector = new DisplayOrientationDetector(context) {
@Override
public void onDisplayOrientationChanged(int displayOrientation, int deviceOrientation) {
mImpl.setDisplayOrientation(displayOrientation);
mImpl.setDeviceOrientation(deviceOrientation);
}
};
}
public void cleanup(){
if(mBgThread != null){
if(Build.VERSION.SDK_INT < 18){
mBgThread.quit();
}
else{
mBgThread.quitSafely();
}
mBgThread = null;
}
}
@NonNull
private PreviewImpl createPreviewImpl(Context context) {
PreviewImpl preview;
if (Build.VERSION.SDK_INT < 14) {
preview = new SurfaceViewPreview(context, this);
} else {
preview = new TextureViewPreview(context, this);
}
return preview;
}
@Override
protected void onAttachedToWindow() {
super.onAttachedToWindow();
if (!isInEditMode()) {
mDisplayOrientationDetector.enable(ViewCompat.getDisplay(this));
}
}
@Override
protected void onDetachedFromWindow() {
if (!isInEditMode()) {
mDisplayOrientationDetector.disable();
}
super.onDetachedFromWindow();
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
if (isInEditMode()){
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
return;
}
// Handle android:adjustViewBounds
if (mAdjustViewBounds) {
if (!isCameraOpened()) {
mCallbacks.reserveRequestLayoutOnOpen();
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
return;
}
final int widthMode = MeasureSpec.getMode(widthMeasureSpec);
final int heightMode = MeasureSpec.getMode(heightMeasureSpec);
if (widthMode == MeasureSpec.EXACTLY && heightMode != MeasureSpec.EXACTLY) {
final AspectRatio ratio = getAspectRatio();
assert ratio != null;
int height = (int) (MeasureSpec.getSize(widthMeasureSpec) * ratio.toFloat());
if (heightMode == MeasureSpec.AT_MOST) {
height = Math.min(height, MeasureSpec.getSize(heightMeasureSpec));
}
super.onMeasure(widthMeasureSpec,
MeasureSpec.makeMeasureSpec(height, MeasureSpec.EXACTLY));
} else if (widthMode != MeasureSpec.EXACTLY && heightMode == MeasureSpec.EXACTLY) {
final AspectRatio ratio = getAspectRatio();
assert ratio != null;
int width = (int) (MeasureSpec.getSize(heightMeasureSpec) * ratio.toFloat());
if (widthMode == MeasureSpec.AT_MOST) {
width = Math.min(width, MeasureSpec.getSize(widthMeasureSpec));
}
super.onMeasure(MeasureSpec.makeMeasureSpec(width, MeasureSpec.EXACTLY),
heightMeasureSpec);
} else {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
}
} else {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
}
// Measure the TextureView
int width = getMeasuredWidth();
int height = getMeasuredHeight();
AspectRatio ratio = getAspectRatio();
if (mDisplayOrientationDetector.getLastKnownDisplayOrientation() % 180 == 0) {
ratio = ratio.inverse();
}
assert ratio != null;
if (height < width * ratio.getY() / ratio.getX()) {
mImpl.getView().measure(
MeasureSpec.makeMeasureSpec(width, MeasureSpec.EXACTLY),
MeasureSpec.makeMeasureSpec(width * ratio.getY() / ratio.getX(),
MeasureSpec.EXACTLY));
} else {
mImpl.getView().measure(
MeasureSpec.makeMeasureSpec(height * ratio.getX() / ratio.getY(),
MeasureSpec.EXACTLY),
MeasureSpec.makeMeasureSpec(height, MeasureSpec.EXACTLY));
}
}
@Override
protected Parcelable onSaveInstanceState() {
SavedState state = new SavedState(super.onSaveInstanceState());
state.facing = getFacing();
state.cameraId = getCameraId();
state.ratio = getAspectRatio();
state.autoFocus = getAutoFocus();
state.flash = getFlash();
state.exposure = getExposureCompensation();
state.focusDepth = getFocusDepth();
state.zoom = getZoom();
state.whiteBalance = getWhiteBalance();
state.playSoundOnCapture = getPlaySoundOnCapture();
state.scanning = getScanning();
state.pictureSize = getPictureSize();
return state;
}
@Override
protected void onRestoreInstanceState(Parcelable state) {
if (!(state instanceof SavedState)) {
super.onRestoreInstanceState(state);
return;
}
SavedState ss = (SavedState) state;
super.onRestoreInstanceState(ss.getSuperState());
setFacing(ss.facing);
setCameraId(ss.cameraId);
setAspectRatio(ss.ratio);
setAutoFocus(ss.autoFocus);
setFlash(ss.flash);
setExposureCompensation(ss.exposure);
setFocusDepth(ss.focusDepth);
setZoom(ss.zoom);
setWhiteBalance(ss.whiteBalance);
setPlaySoundOnCapture(ss.playSoundOnCapture);
setScanning(ss.scanning);
setPictureSize(ss.pictureSize);
}
public void setUsingCamera2Api(boolean useCamera2) {
if (Build.VERSION.SDK_INT < 21) {
return;
}
boolean wasOpened = isCameraOpened();
Parcelable state = onSaveInstanceState();
if (useCamera2 && !Camera2.isLegacy(mContext)) {
if (wasOpened) {
stop();
}
if (Build.VERSION.SDK_INT < 23) {
mImpl = new Camera2(mCallbacks, mImpl.mPreview, mContext, mBgHandler);
} else {
mImpl = new Camera2Api23(mCallbacks, mImpl.mPreview, mContext, mBgHandler);
}
onRestoreInstanceState(state);
} else {
if (mImpl instanceof Camera1) {
return;
}
if (wasOpened) {
stop();
}
mImpl = new Camera1(mCallbacks, mImpl.mPreview, mBgHandler);
}
if(wasOpened){
start();
}
}
/**
* Open a camera device and start showing camera preview. This is typically called from
* {@link Activity#onResume()}.
*/
public void start() {
mImpl.start();
// this fallback is no longer needed and was too buggy/slow
// if (!mImpl.start()) {
// if (mImpl.getView() != null) {
// this.removeView(mImpl.getView());
// }
// //store the state and restore this state after fall back to Camera1
// Parcelable state = onSaveInstanceState();
// // Camera2 uses legacy hardware layer; fall back to Camera1
// mImpl = new Camera1(mCallbacks, createPreviewImpl(getContext()), mBgHandler);
// onRestoreInstanceState(state);
// mImpl.start();
// }
}
/**
* Stop camera preview and close the device. This is typically called from
* {@link Activity#onPause()}.
*/
public void stop() {
mImpl.stop();
}
/**
* @return {@code true} if the camera is opened.
*/
public boolean isCameraOpened() {
return mImpl.isCameraOpened();
}
/**
* Add a new callback.
*
* @param callback The {@link Callback} to add.
* @see #removeCallback(Callback)
*/
public void addCallback(@NonNull Callback callback) {
mCallbacks.add(callback);
}
/**
* Remove a callback.
*
* @param callback The {@link Callback} to remove.
* @see #addCallback(Callback)
*/
public void removeCallback(@NonNull Callback callback) {
mCallbacks.remove(callback);
}
/**
* @param adjustViewBounds {@code true} if you want the CameraView to adjust its bounds to
* preserve the aspect ratio of camera.
* @see #getAdjustViewBounds()
*/
public void setAdjustViewBounds(boolean adjustViewBounds) {
if (mAdjustViewBounds != adjustViewBounds) {
mAdjustViewBounds = adjustViewBounds;
requestLayout();
}
}
/**
* @return True when this CameraView is adjusting its bounds to preserve the aspect ratio of
* camera.
* @see #setAdjustViewBounds(boolean)
*/
public boolean getAdjustViewBounds() {
return mAdjustViewBounds;
}
public View getView() {
if (mImpl != null) {
return mImpl.getView();
}
return null;
}
/**
* Chooses camera by the direction it faces.
*
* @param facing The camera facing. Must be either {@link #FACING_BACK} or
* {@link #FACING_FRONT}.
*/
public void setFacing(@Facing int facing) {
mImpl.setFacing(facing);
}
/**
* Gets the direction that the current camera faces.
*
* @return The camera facing.
*/
@Facing
public int getFacing() {
//noinspection WrongConstant
return mImpl.getFacing();
}
/**
* Chooses camera by its camera iD
*
* @param id The camera ID
*/
public void setCameraId(String id) {
mImpl.setCameraId(id);
}
/**
* Gets the currently set camera ID
*
* @return The camera facing.
*/
public String getCameraId() {
return mImpl.getCameraId();
}
/**
* Gets all the aspect ratios supported by the current camera.
*/
public Set<AspectRatio> getSupportedAspectRatios() {
return mImpl.getSupportedAspectRatios();
}
/**
* Gets all the camera IDs supported by the phone as a String
*/
public List<Properties> getCameraIds() {
return mImpl.getCameraIds();
}
/**
* Sets the aspect ratio of camera.
*
* @param ratio The {@link AspectRatio} to be set.
*/
public void setAspectRatio(@NonNull AspectRatio ratio) {
if (mImpl.setAspectRatio(ratio)) {
requestLayout();
}
}
/**
* Gets the current aspect ratio of camera.
*
* @return The current {@link AspectRatio}. Can be {@code null} if no camera is opened yet.
*/
@Nullable
public AspectRatio getAspectRatio() {
return mImpl.getAspectRatio();
}
/**
* Gets all the picture sizes for particular ratio supported by the current camera.
*
* @param ratio {@link AspectRatio} for which the available image sizes will be returned.
*/
public SortedSet<Size> getAvailablePictureSizes(@NonNull AspectRatio ratio) {
return mImpl.getAvailablePictureSizes(ratio);
}
/**
* Sets the size of taken pictures.
*
* @param size The {@link Size} to be set.
*/
public void setPictureSize(@NonNull Size size) {
mImpl.setPictureSize(size);
}
/**
* Gets the size of pictures that will be taken.
*/
public Size getPictureSize() {
return mImpl.getPictureSize();
}
/**
* Enables or disables the continuous auto-focus mode. When the current camera doesn't support
* auto-focus, calling this method will be ignored.
*
* @param autoFocus {@code true} to enable continuous auto-focus mode. {@code false} to
* disable it.
*/
public void setAutoFocus(boolean autoFocus) {
mImpl.setAutoFocus(autoFocus);
}
/**
* Returns whether the continuous auto-focus mode is enabled.
*
* @return {@code true} if the continuous auto-focus mode is enabled. {@code false} if it is
* disabled, or if it is not supported by the current camera.
*/
public boolean getAutoFocus() {
return mImpl.getAutoFocus();
}
/**
* Sets the flash mode.
*
* @param flash The desired flash mode.
*/
public void setFlash(@Flash int flash) {
mImpl.setFlash(flash);
}
/**
* Gets the current flash mode.
*
* @return The current flash mode.
*/
@Flash
public int getFlash() {
//noinspection WrongConstant
return mImpl.getFlash();
}
public void setExposureCompensation(float exposure) {
mImpl.setExposureCompensation(exposure);
}
public float getExposureCompensation() {
return mImpl.getExposureCompensation();
}
/**
* Gets the camera orientation relative to the devices native orientation.
*
* @return The orientation of the camera.
*/
public int getCameraOrientation() {
return mImpl.getCameraOrientation();
}
/**
* Sets the auto focus point.
*
* @param x sets the x coordinate for camera auto focus
* @param y sets the y coordinate for camera auto focus
*/
public void setAutoFocusPointOfInterest(float x, float y) {
mImpl.setFocusArea(x, y);
}
public void setFocusDepth(float value) {
mImpl.setFocusDepth(value);
}
public float getFocusDepth() { return mImpl.getFocusDepth(); }
public void setZoom(float zoom) {
mImpl.setZoom(zoom);
}
public float getZoom() {
return mImpl.getZoom();
}
public void setWhiteBalance(int whiteBalance) {
mImpl.setWhiteBalance(whiteBalance);
}
public int getWhiteBalance() {
return mImpl.getWhiteBalance();
}
public void setPlaySoundOnCapture(boolean playSoundOnCapture) {
mImpl.setPlaySoundOnCapture(playSoundOnCapture);
}
public boolean getPlaySoundOnCapture() {
return mImpl.getPlaySoundOnCapture();
}
public void setScanning(boolean isScanning) { mImpl.setScanning(isScanning);}
public boolean getScanning() { return mImpl.getScanning(); }
/**
* Take a picture. The result will be returned to
* {@link Callback#onPictureTaken(CameraView, byte[], int)}.
*/
public void takePicture(ReadableMap options) {
mImpl.takePicture(options);
}
/**
* Record a video and save it to file. The result will be returned to
* {@link Callback#onVideoRecorded(CameraView, String, int, int)}.
* @param path Path to file that video will be saved to.
* @param maxDuration Maximum duration of the recording, in seconds.
* @param maxFileSize Maximum recording file size, in bytes.
* @param profile Quality profile of the recording.
*
* fires {@link Callback#onRecordingStart(CameraView, String, int, int)} and {@link Callback#onRecordingEnd(CameraView)}.
*/
public boolean record(String path, int maxDuration, int maxFileSize,
boolean recordAudio, CamcorderProfile profile, int orientation) {
return mImpl.record(path, maxDuration, maxFileSize, recordAudio, profile, orientation);
}
public void stopRecording() {
mImpl.stopRecording();
}
public void resumePreview() {
mImpl.resumePreview();
}
public void pausePreview() {
mImpl.pausePreview();
}
public void setPreviewTexture(SurfaceTexture surfaceTexture) {
mImpl.setPreviewTexture(surfaceTexture);
}
public Size getPreviewSize() {
return mImpl.getPreviewSize();
}
private class CallbackBridge implements CameraViewImpl.Callback {
private final ArrayList<Callback> mCallbacks = new ArrayList<>();
private boolean mRequestLayoutOnOpen;
CallbackBridge() {
}
public void add(Callback callback) {
mCallbacks.add(callback);
}
public void remove(Callback callback) {
mCallbacks.remove(callback);
}
@Override
public void onCameraOpened() {
if (mRequestLayoutOnOpen) {
mRequestLayoutOnOpen = false;
requestLayout();
}
for (Callback callback : mCallbacks) {
callback.onCameraOpened(CameraView.this);
}
}
@Override
public void onCameraClosed() {
for (Callback callback : mCallbacks) {
callback.onCameraClosed(CameraView.this);
}
}
@Override
public void onPictureTaken(byte[] data, int deviceOrientation) {
for (Callback callback : mCallbacks) {
callback.onPictureTaken(CameraView.this, data, deviceOrientation);
}
}
@Override
public void onRecordingStart(String path, int videoOrientation, int deviceOrientation) {
for (Callback callback : mCallbacks) {
callback.onRecordingStart(CameraView.this, path, videoOrientation, deviceOrientation);
}
}
@Override
public void onRecordingEnd() {
for (Callback callback : mCallbacks) {
callback.onRecordingEnd(CameraView.this);
}
}
@Override
public void onVideoRecorded(String path, int videoOrientation, int deviceOrientation) {
for (Callback callback : mCallbacks) {
callback.onVideoRecorded(CameraView.this, path, videoOrientation, deviceOrientation);
}
}
@Override
public void onFramePreview(byte[] data, int width, int height, int orientation) {
for (Callback callback : mCallbacks) {
callback.onFramePreview(CameraView.this, data, width, height, orientation);
}
}
@Override
public void onMountError() {
for (Callback callback : mCallbacks) {
callback.onMountError(CameraView.this);
}
}
public void reserveRequestLayoutOnOpen() {
mRequestLayoutOnOpen = true;
}
}
protected static class SavedState extends BaseSavedState {
@Facing
int facing;
String cameraId;
AspectRatio ratio;
boolean autoFocus;
@Flash
int flash;
float exposure;
float focusDepth;
float zoom;
int whiteBalance;
boolean playSoundOnCapture;
boolean scanning;
Size pictureSize;
@SuppressWarnings("WrongConstant")
public SavedState(Parcel source, ClassLoader loader) {
super(source);
facing = source.readInt();
cameraId = source.readString();
ratio = source.readParcelable(loader);
autoFocus = source.readByte() != 0;
flash = source.readInt();
exposure = source.readFloat();
focusDepth = source.readFloat();
zoom = source.readFloat();
whiteBalance = source.readInt();
playSoundOnCapture = source.readByte() != 0;
scanning = source.readByte() != 0;
pictureSize = source.readParcelable(loader);
}
public SavedState(Parcelable superState) {
super(superState);
}
@Override
public void writeToParcel(Parcel out, int flags) {
super.writeToParcel(out, flags);
out.writeInt(facing);
out.writeString(cameraId);
out.writeParcelable(ratio, 0);
out.writeByte((byte) (autoFocus ? 1 : 0));
out.writeInt(flash);
out.writeFloat(exposure);
out.writeFloat(focusDepth);
out.writeFloat(zoom);
out.writeInt(whiteBalance);
out.writeByte((byte) (playSoundOnCapture ? 1 : 0));
out.writeByte((byte) (scanning ? 1 : 0));
out.writeParcelable(pictureSize, flags);
}
public static final Creator<SavedState> CREATOR
= ParcelableCompat.newCreator(new ParcelableCompatCreatorCallbacks<SavedState>() {
@Override
public SavedState createFromParcel(Parcel in, ClassLoader loader) {
return new SavedState(in, loader);
}
@Override
public SavedState[] newArray(int size) {
return new SavedState[size];
}
});
}
/**
* Callback for monitoring events about {@link CameraView}.
*/
@SuppressWarnings("UnusedParameters")
public abstract static class Callback {
/**
* Called when camera is opened.
*
* @param cameraView The associated {@link CameraView}.
*/
public void onCameraOpened(CameraView cameraView) {}
/**
* Called when camera is closed.
*
* @param cameraView The associated {@link CameraView}.
*/
public void onCameraClosed(CameraView cameraView) {}
/**
* Called when a picture is taken.
*
* @param cameraView The associated {@link CameraView}.
* @param data JPEG data.
*/
public void onPictureTaken(CameraView cameraView, byte[] data, int deviceOrientation) {}
/**
* Called when a video recording starts
*
* @param cameraView The associated {@link CameraView}.
* @param path Path to recoredd video file.
*/
public void onRecordingStart(CameraView cameraView, String path, int videoOrientation, int deviceOrientation) {}
/**
* Called when a video recording ends, but before video is saved/processed.
*
* @param cameraView The associated {@link CameraView}.
* @param path Path to recoredd video file.
*/
public void onRecordingEnd(CameraView cameraView){}
/**
* Called when a video is recorded.
*
* @param cameraView The associated {@link CameraView}.
* @param path Path to recoredd video file.
*/
public void onVideoRecorded(CameraView cameraView, String path, int videoOrientation, int deviceOrientation) {}
public void onFramePreview(CameraView cameraView, byte[] data, int width, int height, int orientation) {}
public void onMountError(CameraView cameraView) {}
}
}

View File

@ -0,0 +1,162 @@
/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.cameraview;
import android.media.CamcorderProfile;
import android.view.View;
import android.graphics.SurfaceTexture;
import android.os.Handler;
import com.facebook.react.bridge.ReadableMap;
import java.util.List;
import java.util.Properties;
import java.util.Set;
import java.util.SortedSet;
abstract class CameraViewImpl {
protected final Callback mCallback;
protected final PreviewImpl mPreview;
// Background handler that the implementation an use to run heavy tasks in background
// in a thread/looper provided by the view.
// Most calls should not require this since the view will already schedule it
// on the bg thread. However, the implementation might need to do some heavy work
// by itself.
protected final Handler mBgHandler;
CameraViewImpl(Callback callback, PreviewImpl preview, Handler bgHandler) {
mCallback = callback;
mPreview = preview;
mBgHandler = bgHandler;
}
View getView() {
return mPreview.getView();
}
/**
* @return {@code true} if the implementation was able to start the camera session.
*/
abstract boolean start();
abstract void stop();
abstract boolean isCameraOpened();
abstract void setFacing(int facing);
abstract int getFacing();
abstract void setCameraId(String id);
abstract String getCameraId();
abstract Set<AspectRatio> getSupportedAspectRatios();
abstract List<Properties> getCameraIds();
abstract SortedSet<Size> getAvailablePictureSizes(AspectRatio ratio);
abstract void setPictureSize(Size size);
abstract Size getPictureSize();
/**
* @return {@code true} if the aspect ratio was changed.
*/
abstract boolean setAspectRatio(AspectRatio ratio);
abstract AspectRatio getAspectRatio();
abstract void setAutoFocus(boolean autoFocus);
abstract boolean getAutoFocus();
abstract void setFlash(int flash);
abstract int getFlash();
abstract void setExposureCompensation(float exposure);
abstract float getExposureCompensation();
abstract void takePicture(ReadableMap options);
abstract boolean record(String path, int maxDuration, int maxFileSize,
boolean recordAudio, CamcorderProfile profile, int orientation);
abstract void stopRecording();
abstract int getCameraOrientation();
abstract void setDisplayOrientation(int displayOrientation);
abstract void setDeviceOrientation(int deviceOrientation);
abstract void setFocusArea(float x, float y);
abstract void setFocusDepth(float value);
abstract float getFocusDepth();
abstract void setZoom(float zoom);
abstract float getZoom();
abstract void setWhiteBalance(int whiteBalance);
abstract int getWhiteBalance();
abstract void setPlaySoundOnCapture(boolean playSoundOnCapture);
abstract boolean getPlaySoundOnCapture();
abstract void setScanning(boolean isScanning);
abstract boolean getScanning();
abstract public void resumePreview();
abstract public void pausePreview();
abstract public void setPreviewTexture(SurfaceTexture surfaceTexture);
abstract public Size getPreviewSize();
interface Callback {
void onCameraOpened();
void onCameraClosed();
void onPictureTaken(byte[] data, int deviceOrientation);
void onVideoRecorded(String path, int videoOrientation, int deviceOrientation);
void onRecordingStart(String path, int videoOrientation, int deviceOrientation);
void onRecordingEnd();
void onFramePreview(byte[] data, int width, int height, int orientation);
void onMountError();
}
}

View File

@ -0,0 +1,47 @@
/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.cameraview;
public interface Constants {
AspectRatio DEFAULT_ASPECT_RATIO = AspectRatio.of(4, 3);
int FACING_BACK = 0;
int FACING_FRONT = 1;
int FLASH_OFF = 0;
int FLASH_ON = 1;
int FLASH_TORCH = 2;
int FLASH_AUTO = 3;
int FLASH_RED_EYE = 4;
int LANDSCAPE_90 = 90;
int LANDSCAPE_270 = 270;
int WB_AUTO = 0;
int WB_CLOUDY = 1;
int WB_SUNNY = 2;
int WB_SHADOW = 3;
int WB_FLUORESCENT = 4;
int WB_INCANDESCENT = 5;
int ORIENTATION_AUTO = 0;
int ORIENTATION_UP = 1;
int ORIENTATION_DOWN = 2;
int ORIENTATION_LEFT = 3;
int ORIENTATION_RIGHT = 4;
}

View File

@ -0,0 +1,125 @@
/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.cameraview;
import android.content.Context;
import android.util.SparseIntArray;
import android.view.Display;
import android.view.OrientationEventListener;
import android.view.Surface;
/**
* Monitors the value returned from {@link Display#getRotation()}.
*/
abstract class DisplayOrientationDetector {
private final OrientationEventListener mOrientationEventListener;
/** Mapping from Surface.Rotation_n to degrees. */
static final SparseIntArray DISPLAY_ORIENTATIONS = new SparseIntArray();
static {
DISPLAY_ORIENTATIONS.put(Surface.ROTATION_0, 0);
DISPLAY_ORIENTATIONS.put(Surface.ROTATION_90, 90);
DISPLAY_ORIENTATIONS.put(Surface.ROTATION_180, 180);
DISPLAY_ORIENTATIONS.put(Surface.ROTATION_270, 270);
}
Display mDisplay;
private int mLastKnownDisplayOrientation = 0;
private int mLastKnownDeviceOrientation = 0;
public DisplayOrientationDetector(Context context) {
mOrientationEventListener = new OrientationEventListener(context) {
/** This is either Surface.Rotation_0, _90, _180, _270, or -1 (invalid). */
private int mLastKnownRotation = -1;
@Override
public void onOrientationChanged(int orientation) {
if (orientation == OrientationEventListener.ORIENTATION_UNKNOWN ||
mDisplay == null) {
return;
}
boolean hasChanged = false;
/** set device orientation */
final int deviceOrientation;
if (orientation > 315 || orientation < 45) {
deviceOrientation = 0;
} else if (orientation > 45 && orientation < 135) {
deviceOrientation = 90;
} else if (orientation > 135 && orientation < 225) {
deviceOrientation = 180;
} else if (orientation > 225 && orientation < 315) {
deviceOrientation = 270;
} else {
deviceOrientation = 0;
}
if (mLastKnownDeviceOrientation != deviceOrientation) {
mLastKnownDeviceOrientation = deviceOrientation;
hasChanged = true;
}
/** set screen orientation */
final int rotation = mDisplay.getRotation();
if (mLastKnownRotation != rotation) {
mLastKnownRotation = rotation;
hasChanged = true;
}
if (hasChanged) {
dispatchOnDisplayOrientationChanged(DISPLAY_ORIENTATIONS.get(rotation));
}
}
};
}
public void enable(Display display) {
mDisplay = display;
mOrientationEventListener.enable();
// Immediately dispatch the first callback
dispatchOnDisplayOrientationChanged(DISPLAY_ORIENTATIONS.get(display.getRotation()));
}
public void disable() {
mOrientationEventListener.disable();
mDisplay = null;
}
public int getLastKnownDisplayOrientation() {
return mLastKnownDisplayOrientation;
}
void dispatchOnDisplayOrientationChanged(int displayOrientation) {
mLastKnownDisplayOrientation = displayOrientation;
onDisplayOrientationChanged(displayOrientation, mLastKnownDeviceOrientation);
}
/**
* Called when display orientation is changed.
*
* @param displayOrientation One of 0, 90, 180, and 270.
* @param deviceOrientation One of 0, 90, 180, and 270.
*/
public abstract void onDisplayOrientationChanged(int displayOrientation, int deviceOrientation);
}

View File

@ -0,0 +1,87 @@
/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.cameraview;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.View;
/**
* Encapsulates all the operations related to camera preview in a backward-compatible manner.
*/
abstract class PreviewImpl {
interface Callback {
void onSurfaceChanged();
void onSurfaceDestroyed();
}
private Callback mCallback;
private int mWidth;
private int mHeight;
void setCallback(Callback callback) {
mCallback = callback;
}
abstract Surface getSurface();
abstract View getView();
abstract Class getOutputClass();
abstract void setDisplayOrientation(int displayOrientation);
abstract boolean isReady();
protected void dispatchSurfaceChanged() {
mCallback.onSurfaceChanged();
}
protected void dispatchSurfaceDestroyed() {
mCallback.onSurfaceDestroyed();
}
SurfaceHolder getSurfaceHolder() {
return null;
}
Object getSurfaceTexture() {
return null;
}
void setBufferSize(int width, int height) {
}
void setSize(int width, int height) {
mWidth = width;
mHeight = height;
}
int getWidth() {
return mWidth;
}
int getHeight() {
return mHeight;
}
}

View File

@ -0,0 +1,119 @@
/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.cameraview;
import android.os.Parcel;
import android.os.Parcelable;
import androidx.annotation.NonNull;
/**
* Immutable class for describing width and height dimensions in pixels.
*/
public class Size implements Comparable<Size>, Parcelable {
private final int mWidth;
private final int mHeight;
/**
* Create a new immutable Size instance.
*
* @param width The width of the size, in pixels
* @param height The height of the size, in pixels
*/
public Size(int width, int height) {
mWidth = width;
mHeight = height;
}
public static Size parse(String s) {
int position = s.indexOf('x');
if (position == -1) {
throw new IllegalArgumentException("Malformed size: " + s);
}
try {
int width = Integer.parseInt(s.substring(0, position));
int height = Integer.parseInt(s.substring(position + 1));
return new Size(width, height);
} catch (NumberFormatException e) {
throw new IllegalArgumentException("Malformed size: " + s, e);
}
}
public int getWidth() {
return mWidth;
}
public int getHeight() {
return mHeight;
}
@Override
public boolean equals(Object o) {
if (o == null) {
return false;
}
if (this == o) {
return true;
}
if (o instanceof Size) {
Size size = (Size) o;
return mWidth == size.mWidth && mHeight == size.mHeight;
}
return false;
}
@Override
public String toString() {
return mWidth + "x" + mHeight;
}
@Override
public int hashCode() {
// assuming most sizes are <2^16, doing a rotate will give us perfect hashing
return mHeight ^ ((mWidth << (Integer.SIZE / 2)) | (mWidth >>> (Integer.SIZE / 2)));
}
@Override
public int compareTo(@NonNull Size another) {
return mWidth * mHeight - another.mWidth * another.mHeight;
}
@Override
public int describeContents() {
return 0;
}
@Override
public void writeToParcel(Parcel dest, int flags) {
dest.writeInt(mWidth);
dest.writeInt(mHeight);
}
public static final Parcelable.Creator<Size> CREATOR = new Parcelable.Creator<Size>() {
@Override
public Size createFromParcel(Parcel source) {
int width = source.readInt();
int height = source.readInt();
return new Size(width, height);
}
@Override
public Size[] newArray(int size) {
return new Size[size];
}
};
}

View File

@ -0,0 +1,82 @@
/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.cameraview;
import androidx.collection.ArrayMap;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
/**
* A collection class that automatically groups {@link Size}s by their {@link AspectRatio}s.
*/
class SizeMap {
private final ArrayMap<AspectRatio, SortedSet<Size>> mRatios = new ArrayMap<>();
/**
* Add a new {@link Size} to this collection.
*
* @param size The size to add.
* @return {@code true} if it is added, {@code false} if it already exists and is not added.
*/
public boolean add(Size size) {
for (AspectRatio ratio : mRatios.keySet()) {
if (ratio.matches(size)) {
final SortedSet<Size> sizes = mRatios.get(ratio);
if (sizes.contains(size)) {
return false;
} else {
sizes.add(size);
return true;
}
}
}
// None of the existing ratio matches the provided size; add a new key
SortedSet<Size> sizes = new TreeSet<>();
sizes.add(size);
mRatios.put(AspectRatio.of(size.getWidth(), size.getHeight()), sizes);
return true;
}
/**
* Removes the specified aspect ratio and all sizes associated with it.
*
* @param ratio The aspect ratio to be removed.
*/
public void remove(AspectRatio ratio) {
mRatios.remove(ratio);
}
Set<AspectRatio> ratios() {
return mRatios.keySet();
}
SortedSet<Size> sizes(AspectRatio ratio) {
return mRatios.get(ratio);
}
void clear() {
mRatios.clear();
}
boolean isEmpty() {
return mRatios.isEmpty();
}
}

View File

@ -0,0 +1,88 @@
/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.cameraview;
import android.content.Context;
import androidx.core.view.ViewCompat;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.ViewGroup;
import org.reactnative.camera.R;
class SurfaceViewPreview extends PreviewImpl {
final SurfaceView mSurfaceView;
SurfaceViewPreview(Context context, ViewGroup parent) {
final View view = View.inflate(context, R.layout.surface_view, parent);
mSurfaceView = (SurfaceView) view.findViewById(R.id.surface_view);
final SurfaceHolder holder = mSurfaceView.getHolder();
//noinspection deprecation
holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
holder.addCallback(new SurfaceHolder.Callback() {
@Override
public void surfaceCreated(SurfaceHolder h) {
}
@Override
public void surfaceChanged(SurfaceHolder h, int format, int width, int height) {
setSize(width, height);
if (!ViewCompat.isInLayout(mSurfaceView)) {
dispatchSurfaceChanged();
}
}
@Override
public void surfaceDestroyed(SurfaceHolder h) {
setSize(0, 0);
}
});
}
@Override
Surface getSurface() {
return getSurfaceHolder().getSurface();
}
@Override
SurfaceHolder getSurfaceHolder() {
return mSurfaceView.getHolder();
}
@Override
View getView() {
return mSurfaceView;
}
@Override
Class getOutputClass() {
return SurfaceHolder.class;
}
@Override
void setDisplayOrientation(int displayOrientation) {
}
@Override
boolean isReady() {
return getWidth() != 0 && getHeight() != 0;
}
}

View File

@ -0,0 +1,146 @@
/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.cameraview;
import android.annotation.TargetApi;
import android.content.Context;
import android.graphics.Matrix;
import android.graphics.SurfaceTexture;
import android.view.Surface;
import android.view.TextureView;
import android.view.View;
import android.view.ViewGroup;
import org.reactnative.camera.R;
@TargetApi(14)
class TextureViewPreview extends PreviewImpl {
private final TextureView mTextureView;
private int mDisplayOrientation;
TextureViewPreview(Context context, ViewGroup parent) {
final View view = View.inflate(context, R.layout.texture_view, parent);
mTextureView = (TextureView) view.findViewById(R.id.texture_view);
mTextureView.setSurfaceTextureListener(new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
setSize(width, height);
configureTransform();
dispatchSurfaceChanged();
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
setSize(width, height);
configureTransform();
dispatchSurfaceChanged();
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
setSize(0, 0);
dispatchSurfaceDestroyed();
return true;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}
});
}
// This method is called only from Camera2.
@TargetApi(15)
@Override
void setBufferSize(int width, int height) {
mTextureView.getSurfaceTexture().setDefaultBufferSize(width, height);
}
@Override
Surface getSurface() {
return new Surface(mTextureView.getSurfaceTexture());
}
@Override
SurfaceTexture getSurfaceTexture() {
return mTextureView.getSurfaceTexture();
}
@Override
View getView() {
return mTextureView;
}
@Override
Class getOutputClass() {
return SurfaceTexture.class;
}
@Override
void setDisplayOrientation(int displayOrientation) {
mDisplayOrientation = displayOrientation;
configureTransform();
}
@Override
boolean isReady() {
return mTextureView.getSurfaceTexture() != null;
}
/**
* Configures the transform matrix for TextureView based on {@link #mDisplayOrientation} and
* the surface size.
*/
void configureTransform() {
Matrix matrix = new Matrix();
if (mDisplayOrientation % 180 == 90) {
final int width = getWidth();
final int height = getHeight();
// Rotate the camera preview when the screen is landscape.
matrix.setPolyToPoly(
new float[]{
0.f, 0.f, // top left
width, 0.f, // top right
0.f, height, // bottom left
width, height, // bottom right
}, 0,
mDisplayOrientation == 90 ?
// Clockwise
new float[]{
0.f, height, // top left
0.f, 0.f, // top right
width, height, // bottom left
width, 0.f, // bottom right
} : // mDisplayOrientation == 270
// Counter-clockwise
new float[]{
width, 0.f, // top left
width, height, // top right
0.f, 0.f, // bottom left
0.f, height, // bottom right
}, 0,
4);
} else if (mDisplayOrientation == 180) {
matrix.postRotate(180, getWidth() / 2, getHeight() / 2);
}
mTextureView.setTransform(matrix);
}
}

View File

@ -0,0 +1,327 @@
package com.lwansbrough.RCTCamera;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Matrix;
import androidx.exifinterface.media.ExifInterface;
import android.util.Base64;
import android.util.Log;
import com.drew.imaging.ImageMetadataReader;
import com.drew.imaging.ImageProcessingException;
import com.drew.metadata.Directory;
import com.drew.metadata.Metadata;
import com.drew.metadata.MetadataException;
import com.drew.metadata.Tag;
import com.drew.metadata.exif.ExifIFD0Directory;
import com.drew.metadata.exif.ExifSubIFDDirectory;
import com.facebook.react.bridge.ReadableMap;
import java.io.BufferedInputStream;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
public class MutableImage {
private static final String TAG = "RNCamera";
private final byte[] originalImageData;
private Bitmap currentRepresentation;
private Metadata originalImageMetaData;
private boolean hasBeenReoriented = false;
public MutableImage(byte[] originalImageData) {
this.originalImageData = originalImageData;
this.currentRepresentation = toBitmap(originalImageData);
}
public int getWidth() {
return this.currentRepresentation.getWidth();
}
public int getHeight() {
return this.currentRepresentation.getHeight();
}
public void mirrorImage() throws ImageMutationFailedException {
Matrix m = new Matrix();
m.preScale(-1, 1);
Bitmap bitmap = Bitmap.createBitmap(
currentRepresentation,
0,
0,
getWidth(),
getHeight(),
m,
false
);
if (bitmap == null)
throw new ImageMutationFailedException("failed to mirror");
this.currentRepresentation = bitmap;
}
public void fixOrientation() throws ImageMutationFailedException {
try {
Metadata metadata = originalImageMetaData();
ExifIFD0Directory exifIFD0Directory = metadata.getFirstDirectoryOfType(ExifIFD0Directory.class);
if (exifIFD0Directory == null) {
return;
} else if (exifIFD0Directory.containsTag(ExifIFD0Directory.TAG_ORIENTATION)) {
int exifOrientation = exifIFD0Directory.getInt(ExifIFD0Directory.TAG_ORIENTATION);
if(exifOrientation != 1) {
rotate(exifOrientation);
exifIFD0Directory.setInt(ExifIFD0Directory.TAG_ORIENTATION, 1);
}
}
} catch (ImageProcessingException | IOException | MetadataException e) {
throw new ImageMutationFailedException("failed to fix orientation", e);
}
}
public void cropToPreview(double previewRatio) throws IllegalArgumentException {
int pictureWidth = getWidth(), pictureHeight = getHeight();
int targetPictureWidth, targetPictureHeight;
if (previewRatio * pictureHeight > pictureWidth) {
targetPictureWidth = pictureWidth;
targetPictureHeight = (int) (pictureWidth / previewRatio);
} else {
targetPictureHeight = pictureHeight;
targetPictureWidth = (int) (pictureHeight * previewRatio);
}
this.currentRepresentation = Bitmap.createBitmap(
this.currentRepresentation,
(pictureWidth - targetPictureWidth) / 2,
(pictureHeight - targetPictureHeight) / 2,
targetPictureWidth,
targetPictureHeight);
}
//see http://www.impulseadventure.com/photo/exif-orientation.html
private void rotate(int exifOrientation) throws ImageMutationFailedException {
final Matrix bitmapMatrix = new Matrix();
switch (exifOrientation) {
case 1:
return;//no rotation required
case 2:
bitmapMatrix.postScale(-1, 1);
break;
case 3:
bitmapMatrix.postRotate(180);
break;
case 4:
bitmapMatrix.postRotate(180);
bitmapMatrix.postScale(-1, 1);
break;
case 5:
bitmapMatrix.postRotate(90);
bitmapMatrix.postScale(-1, 1);
break;
case 6:
bitmapMatrix.postRotate(90);
break;
case 7:
bitmapMatrix.postRotate(270);
bitmapMatrix.postScale(-1, 1);
break;
case 8:
bitmapMatrix.postRotate(270);
break;
default:
break;
}
Bitmap transformedBitmap = Bitmap.createBitmap(
currentRepresentation,
0,
0,
getWidth(),
getHeight(),
bitmapMatrix,
false
);
if (transformedBitmap == null)
throw new ImageMutationFailedException("failed to rotate");
this.currentRepresentation = transformedBitmap;
this.hasBeenReoriented = true;
}
private static Bitmap toBitmap(byte[] data) {
try {
ByteArrayInputStream inputStream = new ByteArrayInputStream(data);
Bitmap photo = BitmapFactory.decodeStream(inputStream);
inputStream.close();
return photo;
} catch (IOException e) {
throw new IllegalStateException("Will not happen", e);
}
}
public String toBase64(int jpegQualityPercent) {
return Base64.encodeToString(toJpeg(currentRepresentation, jpegQualityPercent), Base64.NO_WRAP);
}
public void writeDataToFile(File file, ReadableMap options, int jpegQualityPercent) throws IOException {
FileOutputStream fos = new FileOutputStream(file);
fos.write(toJpeg(currentRepresentation, jpegQualityPercent));
fos.close();
try {
ExifInterface exif = new ExifInterface(file.getAbsolutePath());
// copy original exif data to the output exif...
for (Directory directory : originalImageMetaData().getDirectories()) {
for (Tag tag : directory.getTags()) {
int tagType = tag.getTagType();
Object object = directory.getObject(tagType);
exif.setAttribute(tag.getTagName(), object.toString());
}
}
// Add missing exif data from a sub directory
ExifSubIFDDirectory directory = originalImageMetaData()
.getFirstDirectoryOfType(ExifSubIFDDirectory.class);
for (Tag tag : directory.getTags()) {
int tagType = tag.getTagType();
// As some of exif data does not follow naming of the ExifInterface the names need
// to be transformed into Upper camel case format.
String tagName = tag.getTagName().replaceAll(" ", "");
Object object = directory.getObject(tagType);
if (tagName.equals(ExifInterface.TAG_EXPOSURE_TIME)) {
exif.setAttribute(tagName, convertExposureTimeToDoubleFormat(object.toString()));
} else {
exif.setAttribute(tagName, object.toString());
}
}
writeLocationExifData(options, exif);
if(hasBeenReoriented)
rewriteOrientation(exif);
exif.saveAttributes();
} catch (ImageProcessingException | IOException e) {
Log.e(TAG, "failed to save exif data", e);
}
}
// Reformats exposure time value to match ExifInterface format. Example 1/11 -> 0.0909
// Even the value is formatted as double it is returned as a String because exif.setAttribute requires it.
private String convertExposureTimeToDoubleFormat(String exposureTime) {
if(!exposureTime.contains("/"))
return "";
String exposureFractions[]= exposureTime.split("/");
double divider = Double.parseDouble(exposureFractions[1]);
double exposureTimeAsDouble = 1.0f / divider;
return Double.toString(exposureTimeAsDouble);
}
private void rewriteOrientation(ExifInterface exif) {
exif.setAttribute(ExifInterface.TAG_ORIENTATION, String.valueOf(ExifInterface.ORIENTATION_NORMAL));
}
private void writeLocationExifData(ReadableMap options, ExifInterface exif) {
if(!options.hasKey("metadata"))
return;
ReadableMap metadata = options.getMap("metadata");
if (!metadata.hasKey("location"))
return;
ReadableMap location = metadata.getMap("location");
if(!location.hasKey("coords"))
return;
try {
ReadableMap coords = location.getMap("coords");
double latitude = coords.getDouble("latitude");
double longitude = coords.getDouble("longitude");
GPS.writeExifData(latitude, longitude, exif);
} catch (IOException e) {
Log.e(TAG, "Couldn't write location data", e);
}
}
private Metadata originalImageMetaData() throws ImageProcessingException, IOException {
if(this.originalImageMetaData == null) {//this is expensive, don't do it more than once
originalImageMetaData = ImageMetadataReader.readMetadata(
new BufferedInputStream(new ByteArrayInputStream(originalImageData)),
originalImageData.length
);
}
return originalImageMetaData;
}
private static byte[] toJpeg(Bitmap bitmap, int quality) throws OutOfMemoryError {
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
bitmap.compress(Bitmap.CompressFormat.JPEG, quality, outputStream);
try {
return outputStream.toByteArray();
} finally {
try {
outputStream.close();
} catch (IOException e) {
Log.e(TAG, "problem compressing jpeg", e);
}
}
}
public static class ImageMutationFailedException extends Exception {
public ImageMutationFailedException(String detailMessage, Throwable throwable) {
super(detailMessage, throwable);
}
public ImageMutationFailedException(String detailMessage) {
super(detailMessage);
}
}
private static class GPS {
public static void writeExifData(double latitude, double longitude, ExifInterface exif) throws IOException {
exif.setAttribute(ExifInterface.TAG_GPS_LATITUDE, toDegreeMinuteSecods(latitude));
exif.setAttribute(ExifInterface.TAG_GPS_LATITUDE_REF, latitudeRef(latitude));
exif.setAttribute(ExifInterface.TAG_GPS_LONGITUDE, toDegreeMinuteSecods(longitude));
exif.setAttribute(ExifInterface.TAG_GPS_LONGITUDE_REF, longitudeRef(longitude));
}
private static String latitudeRef(double latitude) {
return latitude < 0.0d ? "S" : "N";
}
private static String longitudeRef(double longitude) {
return longitude < 0.0d ? "W" : "E";
}
private static String toDegreeMinuteSecods(double latitude) {
latitude = Math.abs(latitude);
int degree = (int) latitude;
latitude *= 60;
latitude -= (degree * 60.0d);
int minute = (int) latitude;
latitude *= 60;
latitude -= (minute * 60.0d);
int second = (int) (latitude * 1000.0d);
StringBuffer sb = new StringBuffer();
sb.append(degree);
sb.append("/1,");
sb.append(minute);
sb.append("/1,");
sb.append(second);
sb.append("/1000,");
return sb.toString();
}
}
}

View File

@ -0,0 +1,536 @@
/**
* Created by Fabrice Armisen (farmisen@gmail.com) on 1/4/16.
*/
package com.lwansbrough.RCTCamera;
import android.graphics.drawable.GradientDrawable;
import android.hardware.Camera;
import android.media.CamcorderProfile;
import android.util.Log;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.lang.Math;
public class RCTCamera {
private static RCTCamera ourInstance;
private final HashMap<Integer, CameraInfoWrapper> _cameraInfos;
private final HashMap<Integer, Integer> _cameraTypeToIndex;
private final Map<Number, Camera> _cameras;
private static final Resolution RESOLUTION_480P = new Resolution(853, 480); // 480p shoots for a 16:9 HD aspect ratio, but can otherwise fall back/down to any other supported camera sizes, such as 800x480 or 720x480, if (any) present. See getSupportedPictureSizes/getSupportedVideoSizes below.
private static final Resolution RESOLUTION_720P = new Resolution(1280, 720);
private static final Resolution RESOLUTION_1080P = new Resolution(1920, 1080);
private boolean _barcodeScannerEnabled = false;
private List<String> _barCodeTypes = null;
private int _orientation = -1;
private int _actualDeviceOrientation = 0;
private int _adjustedDeviceOrientation = 0;
public static RCTCamera getInstance() {
return ourInstance;
}
public static void createInstance(int deviceOrientation) {
ourInstance = new RCTCamera(deviceOrientation);
}
public synchronized Camera acquireCameraInstance(int type) {
if (null == _cameras.get(type) && null != _cameraTypeToIndex.get(type)) {
try {
Camera camera = Camera.open(_cameraTypeToIndex.get(type));
_cameras.put(type, camera);
adjustPreviewLayout(type);
} catch (Exception e) {
Log.e("RCTCamera", "acquireCameraInstance failed", e);
}
}
return _cameras.get(type);
}
public void releaseCameraInstance(int type) {
// Release seems async and creates race conditions. Remove from map first before releasing.
Camera releasingCamera = _cameras.get(type);
if (null != releasingCamera) {
_cameras.remove(type);
releasingCamera.release();
}
}
public int getPreviewWidth(int type) {
CameraInfoWrapper cameraInfo = _cameraInfos.get(type);
if (null == cameraInfo) {
return 0;
}
return cameraInfo.previewWidth;
}
public int getPreviewHeight(int type) {
CameraInfoWrapper cameraInfo = _cameraInfos.get(type);
if (null == cameraInfo) {
return 0;
}
return cameraInfo.previewHeight;
}
public int getPreviewVisibleHeight(int type) {
CameraInfoWrapper cameraInfo = _cameraInfos.get(type);
if (null == cameraInfo) {
return 0;
}
return cameraInfo.previewVisibleHeight;
}
public int getPreviewVisibleWidth(int type) {
CameraInfoWrapper cameraInfo = _cameraInfos.get(type);
if (null == cameraInfo) {
return 0;
}
return cameraInfo.previewVisibleWidth;
}
public Camera.Size getBestSize(List<Camera.Size> supportedSizes, int maxWidth, int maxHeight) {
Camera.Size bestSize = null;
for (Camera.Size size : supportedSizes) {
if (size.width > maxWidth || size.height > maxHeight) {
continue;
}
if (bestSize == null) {
bestSize = size;
continue;
}
int resultArea = bestSize.width * bestSize.height;
int newArea = size.width * size.height;
if (newArea > resultArea) {
bestSize = size;
}
}
return bestSize;
}
private Camera.Size getSmallestSize(List<Camera.Size> supportedSizes) {
Camera.Size smallestSize = null;
for (Camera.Size size : supportedSizes) {
if (smallestSize == null) {
smallestSize = size;
continue;
}
int resultArea = smallestSize.width * smallestSize.height;
int newArea = size.width * size.height;
if (newArea < resultArea) {
smallestSize = size;
}
}
return smallestSize;
}
private Camera.Size getClosestSize(List<Camera.Size> supportedSizes, int matchWidth, int matchHeight) {
Camera.Size closestSize = null;
for (Camera.Size size : supportedSizes) {
if (closestSize == null) {
closestSize = size;
continue;
}
double currentDelta = Math.sqrt(Math.pow(closestSize.width - matchWidth,2) + Math.pow(closestSize.height - matchHeight,2));
double newDelta = Math.sqrt(Math.pow(size.width - matchWidth,2) + Math.pow(size.height - matchHeight,2));
if (newDelta < currentDelta) {
closestSize = size;
}
}
return closestSize;
}
protected List<Camera.Size> getSupportedVideoSizes(Camera camera) {
Camera.Parameters params = camera.getParameters();
// defer to preview instead of params.getSupportedVideoSizes() http://bit.ly/1rxOsq0
// but prefer SupportedVideoSizes!
List<Camera.Size> sizes = params.getSupportedVideoSizes();
if (sizes != null) {
return sizes;
}
// Video sizes may be null, which indicates that all the supported
// preview sizes are supported for video recording.
return params.getSupportedPreviewSizes();
}
public int getOrientation() {
return _orientation;
}
public void setOrientation(int orientation) {
if (_orientation == orientation) {
return;
}
_orientation = orientation;
adjustPreviewLayout(RCTCameraModule.RCT_CAMERA_TYPE_FRONT);
adjustPreviewLayout(RCTCameraModule.RCT_CAMERA_TYPE_BACK);
}
public boolean isBarcodeScannerEnabled() {
return _barcodeScannerEnabled;
}
public void setBarcodeScannerEnabled(boolean barcodeScannerEnabled) {
_barcodeScannerEnabled = barcodeScannerEnabled;
}
public List<String> getBarCodeTypes() {
return _barCodeTypes;
}
public void setBarCodeTypes(List<String> barCodeTypes) {
_barCodeTypes = barCodeTypes;
}
public int getActualDeviceOrientation() {
return _actualDeviceOrientation;
}
public void setAdjustedDeviceOrientation(int orientation) {
_adjustedDeviceOrientation = orientation;
}
public int getAdjustedDeviceOrientation() {
return _adjustedDeviceOrientation;
}
public void setActualDeviceOrientation(int actualDeviceOrientation) {
_actualDeviceOrientation = actualDeviceOrientation;
adjustPreviewLayout(RCTCameraModule.RCT_CAMERA_TYPE_FRONT);
adjustPreviewLayout(RCTCameraModule.RCT_CAMERA_TYPE_BACK);
}
public void setCaptureMode(final int cameraType, final int captureMode) {
Camera camera = _cameras.get(cameraType);
if (camera == null) {
return;
}
// Set (video) recording hint based on camera type. For video recording, setting
// this hint can help reduce the time it takes to start recording.
Camera.Parameters parameters = camera.getParameters();
parameters.setRecordingHint(captureMode == RCTCameraModule.RCT_CAMERA_CAPTURE_MODE_VIDEO);
try{
camera.setParameters(parameters);
}
catch(RuntimeException e ) {
Log.e("RCTCamera", "setParameters failed", e);
}
}
public void setCaptureQuality(int cameraType, String captureQuality) {
Camera camera = this.acquireCameraInstance(cameraType);
if (camera == null) {
return;
}
Camera.Parameters parameters = camera.getParameters();
Camera.Size pictureSize = null;
List<Camera.Size> supportedSizes = parameters.getSupportedPictureSizes();
switch (captureQuality) {
case RCTCameraModule.RCT_CAMERA_CAPTURE_QUALITY_LOW:
pictureSize = getSmallestSize(supportedSizes);
break;
case RCTCameraModule.RCT_CAMERA_CAPTURE_QUALITY_MEDIUM:
pictureSize = supportedSizes.get(supportedSizes.size() / 2);
break;
case RCTCameraModule.RCT_CAMERA_CAPTURE_QUALITY_HIGH:
pictureSize = getBestSize(parameters.getSupportedPictureSizes(), Integer.MAX_VALUE, Integer.MAX_VALUE);
break;
case RCTCameraModule.RCT_CAMERA_CAPTURE_QUALITY_PREVIEW:
Camera.Size optimalPreviewSize = getBestSize(parameters.getSupportedPreviewSizes(), Integer.MAX_VALUE, Integer.MAX_VALUE);
pictureSize = getClosestSize(parameters.getSupportedPictureSizes(), optimalPreviewSize.width, optimalPreviewSize.height);
break;
case RCTCameraModule.RCT_CAMERA_CAPTURE_QUALITY_480P:
pictureSize = getBestSize(supportedSizes, RESOLUTION_480P.width, RESOLUTION_480P.height);
break;
case RCTCameraModule.RCT_CAMERA_CAPTURE_QUALITY_720P:
pictureSize = getBestSize(supportedSizes, RESOLUTION_720P.width, RESOLUTION_720P.height);
break;
case RCTCameraModule.RCT_CAMERA_CAPTURE_QUALITY_1080P:
pictureSize = getBestSize(supportedSizes, RESOLUTION_1080P.width, RESOLUTION_1080P.height);
break;
}
if (pictureSize != null) {
parameters.setPictureSize(pictureSize.width, pictureSize.height);
try{
camera.setParameters(parameters);
}
catch(RuntimeException e ) {
Log.e("RCTCamera", "setParameters failed", e);
}
}
}
public CamcorderProfile setCaptureVideoQuality(int cameraType, String captureQuality) {
Camera camera = this.acquireCameraInstance(cameraType);
if (camera == null) {
return null;
}
Camera.Size videoSize = null;
List<Camera.Size> supportedSizes = getSupportedVideoSizes(camera);
CamcorderProfile cm = null;
switch (captureQuality) {
case RCTCameraModule.RCT_CAMERA_CAPTURE_QUALITY_LOW:
videoSize = getSmallestSize(supportedSizes);
cm = CamcorderProfile.get(_cameraTypeToIndex.get(cameraType), CamcorderProfile.QUALITY_480P);
break;
case RCTCameraModule.RCT_CAMERA_CAPTURE_QUALITY_MEDIUM:
videoSize = supportedSizes.get(supportedSizes.size() / 2);
cm = CamcorderProfile.get(_cameraTypeToIndex.get(cameraType), CamcorderProfile.QUALITY_720P);
break;
case RCTCameraModule.RCT_CAMERA_CAPTURE_QUALITY_HIGH:
videoSize = getBestSize(supportedSizes, Integer.MAX_VALUE, Integer.MAX_VALUE);
cm = CamcorderProfile.get(_cameraTypeToIndex.get(cameraType), CamcorderProfile.QUALITY_HIGH);
break;
case RCTCameraModule.RCT_CAMERA_CAPTURE_QUALITY_480P:
videoSize = getBestSize(supportedSizes, RESOLUTION_480P.width, RESOLUTION_480P.height);
cm = CamcorderProfile.get(_cameraTypeToIndex.get(cameraType), CamcorderProfile.QUALITY_480P);
break;
case RCTCameraModule.RCT_CAMERA_CAPTURE_QUALITY_720P:
videoSize = getBestSize(supportedSizes, RESOLUTION_720P.width, RESOLUTION_720P.height);
cm = CamcorderProfile.get(_cameraTypeToIndex.get(cameraType), CamcorderProfile.QUALITY_720P);
break;
case RCTCameraModule.RCT_CAMERA_CAPTURE_QUALITY_1080P:
videoSize = getBestSize(supportedSizes, RESOLUTION_1080P.width, RESOLUTION_1080P.height);
cm = CamcorderProfile.get(_cameraTypeToIndex.get(cameraType), CamcorderProfile.QUALITY_1080P);
break;
}
if (cm == null){
return null;
}
if (videoSize != null) {
cm.videoFrameHeight = videoSize.height;
cm.videoFrameWidth = videoSize.width;
}
return cm;
}
public void setTorchMode(int cameraType, int torchMode) {
Camera camera = this.acquireCameraInstance(cameraType);
if (null == camera) {
return;
}
Camera.Parameters parameters = camera.getParameters();
String value = parameters.getFlashMode();
switch (torchMode) {
case RCTCameraModule.RCT_CAMERA_TORCH_MODE_ON:
value = Camera.Parameters.FLASH_MODE_TORCH;
break;
case RCTCameraModule.RCT_CAMERA_TORCH_MODE_OFF:
value = Camera.Parameters.FLASH_MODE_OFF;
break;
}
List<String> flashModes = parameters.getSupportedFlashModes();
if (flashModes != null && flashModes.contains(value)) {
parameters.setFlashMode(value);
try{
camera.setParameters(parameters);
}
catch(RuntimeException e ) {
Log.e("RCTCamera", "setParameters failed", e);
}
}
}
public void setFlashMode(int cameraType, int flashMode) {
Camera camera = this.acquireCameraInstance(cameraType);
if (null == camera) {
return;
}
Camera.Parameters parameters = camera.getParameters();
String value = parameters.getFlashMode();
switch (flashMode) {
case RCTCameraModule.RCT_CAMERA_FLASH_MODE_AUTO:
value = Camera.Parameters.FLASH_MODE_AUTO;
break;
case RCTCameraModule.RCT_CAMERA_FLASH_MODE_ON:
value = Camera.Parameters.FLASH_MODE_ON;
break;
case RCTCameraModule.RCT_CAMERA_FLASH_MODE_OFF:
value = Camera.Parameters.FLASH_MODE_OFF;
break;
}
List<String> flashModes = parameters.getSupportedFlashModes();
if (flashModes != null && flashModes.contains(value)) {
parameters.setFlashMode(value);
try{
camera.setParameters(parameters);
}
catch(RuntimeException e ) {
Log.e("RCTCamera", "setParameters failed", e);
}
}
}
public void setZoom(int cameraType, int zoom) {
Camera camera = this.acquireCameraInstance(cameraType);
if (null == camera) {
return;
}
Camera.Parameters parameters = camera.getParameters();
int maxZoom = parameters.getMaxZoom();
if (parameters.isZoomSupported()) {
if (zoom >=0 && zoom < maxZoom) {
parameters.setZoom(zoom);
try{
camera.setParameters(parameters);
}
catch(RuntimeException e ) {
Log.e("RCTCamera", "setParameters failed", e);
}
}
}
}
public void adjustCameraRotationToDeviceOrientation(int type, int deviceOrientation) {
Camera camera = _cameras.get(type);
if (null == camera) {
return;
}
CameraInfoWrapper cameraInfo = _cameraInfos.get(type);
int rotation;
int orientation = cameraInfo.info.orientation;
if (cameraInfo.info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
rotation = (orientation + deviceOrientation * 90) % 360;
} else {
rotation = (orientation - deviceOrientation * 90 + 360) % 360;
}
cameraInfo.rotation = rotation;
Camera.Parameters parameters = camera.getParameters();
parameters.setRotation(cameraInfo.rotation);
try {
camera.setParameters(parameters);
} catch (Exception e) {
e.printStackTrace();
}
}
public void adjustPreviewLayout(int type) {
Camera camera = _cameras.get(type);
if (null == camera) {
return;
}
CameraInfoWrapper cameraInfo = _cameraInfos.get(type);
int displayRotation;
int rotation;
int orientation = cameraInfo.info.orientation;
if (cameraInfo.info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
rotation = (orientation + _actualDeviceOrientation * 90) % 360;
displayRotation = (720 - orientation - _actualDeviceOrientation * 90) % 360;
} else {
rotation = (orientation - _actualDeviceOrientation * 90 + 360) % 360;
displayRotation = rotation;
}
cameraInfo.rotation = rotation;
// TODO: take in account the _orientation prop
setAdjustedDeviceOrientation(rotation);
camera.setDisplayOrientation(displayRotation);
Camera.Parameters parameters = camera.getParameters();
parameters.setRotation(cameraInfo.rotation);
// set preview size
// defaults to highest resolution available
Camera.Size optimalPreviewSize = getBestSize(parameters.getSupportedPreviewSizes(), Integer.MAX_VALUE, Integer.MAX_VALUE);
int width = optimalPreviewSize.width;
int height = optimalPreviewSize.height;
parameters.setPreviewSize(width, height);
try {
camera.setParameters(parameters);
} catch (Exception e) {
e.printStackTrace();
}
if (cameraInfo.rotation == 0 || cameraInfo.rotation == 180) {
cameraInfo.previewWidth = width;
cameraInfo.previewHeight = height;
} else {
cameraInfo.previewWidth = height;
cameraInfo.previewHeight = width;
}
}
public void setPreviewVisibleSize(int type, int width, int height) {
CameraInfoWrapper cameraInfo = _cameraInfos.get(type);
if (cameraInfo == null) {
return;
}
cameraInfo.previewVisibleWidth = width;
cameraInfo.previewVisibleHeight = height;
}
private RCTCamera(int deviceOrientation) {
_cameras = new HashMap<>();
_cameraInfos = new HashMap<>();
_cameraTypeToIndex = new HashMap<>();
_actualDeviceOrientation = deviceOrientation;
// map camera types to camera indexes and collect cameras properties
for (int i = 0; i < Camera.getNumberOfCameras(); i++) {
Camera.CameraInfo info = new Camera.CameraInfo();
Camera.getCameraInfo(i, info);
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT && _cameraInfos.get(RCTCameraModule.RCT_CAMERA_TYPE_FRONT) == null) {
_cameraInfos.put(RCTCameraModule.RCT_CAMERA_TYPE_FRONT, new CameraInfoWrapper(info));
_cameraTypeToIndex.put(RCTCameraModule.RCT_CAMERA_TYPE_FRONT, i);
acquireCameraInstance(RCTCameraModule.RCT_CAMERA_TYPE_FRONT);
releaseCameraInstance(RCTCameraModule.RCT_CAMERA_TYPE_FRONT);
} else if (info.facing == Camera.CameraInfo.CAMERA_FACING_BACK && _cameraInfos.get(RCTCameraModule.RCT_CAMERA_TYPE_BACK) == null) {
_cameraInfos.put(RCTCameraModule.RCT_CAMERA_TYPE_BACK, new CameraInfoWrapper(info));
_cameraTypeToIndex.put(RCTCameraModule.RCT_CAMERA_TYPE_BACK, i);
acquireCameraInstance(RCTCameraModule.RCT_CAMERA_TYPE_BACK);
releaseCameraInstance(RCTCameraModule.RCT_CAMERA_TYPE_BACK);
}
}
}
private class CameraInfoWrapper {
public final Camera.CameraInfo info;
public int rotation = 0;
public int previewWidth = -1;
public int previewHeight = -1;
public int previewVisibleWidth = -1;
public int previewVisibleHeight = -1;
public CameraInfoWrapper(Camera.CameraInfo info) {
this.info = info;
}
}
private static class Resolution {
public int width;
public int height;
public Resolution(final int width, final int height) {
this.width = width;
this.height = height;
}
}
}

View File

@ -0,0 +1,871 @@
/**
* Created by Fabrice Armisen (farmisen@gmail.com) on 1/4/16.
* Android video recording support by Marc Johnson (me@marc.mn) 4/2016
*/
package com.lwansbrough.RCTCamera;
import android.content.ContentValues;
import android.content.res.Configuration;
import android.hardware.Camera;
import android.media.*;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Environment;
import android.provider.MediaStore;
import android.util.Base64;
import android.util.Log;
import android.view.Surface;
import com.facebook.react.bridge.LifecycleEventListener;
import com.facebook.react.bridge.Promise;
import com.facebook.react.bridge.ReactApplicationContext;
import com.facebook.react.bridge.ReactContextBaseJavaModule;
import com.facebook.react.bridge.ReactMethod;
import com.facebook.react.bridge.ReadableMap;
import com.facebook.react.bridge.WritableMap;
import com.facebook.react.bridge.WritableNativeMap;
import java.io.*;
import java.text.SimpleDateFormat;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.annotation.Nullable;
public class RCTCameraModule extends ReactContextBaseJavaModule
implements MediaRecorder.OnInfoListener, MediaRecorder.OnErrorListener, LifecycleEventListener {
private static final String TAG = "RCTCameraModule";
public static final int RCT_CAMERA_ASPECT_FILL = 0;
public static final int RCT_CAMERA_ASPECT_FIT = 1;
public static final int RCT_CAMERA_ASPECT_STRETCH = 2;
public static final int RCT_CAMERA_CAPTURE_MODE_STILL = 0;
public static final int RCT_CAMERA_CAPTURE_MODE_VIDEO = 1;
public static final int RCT_CAMERA_CAPTURE_TARGET_MEMORY = 0;
public static final int RCT_CAMERA_CAPTURE_TARGET_DISK = 1;
public static final int RCT_CAMERA_CAPTURE_TARGET_CAMERA_ROLL = 2;
public static final int RCT_CAMERA_CAPTURE_TARGET_TEMP = 3;
public static final int RCT_CAMERA_ORIENTATION_AUTO = Integer.MAX_VALUE;
public static final int RCT_CAMERA_ORIENTATION_PORTRAIT = Surface.ROTATION_0;
public static final int RCT_CAMERA_ORIENTATION_PORTRAIT_UPSIDE_DOWN = Surface.ROTATION_180;
public static final int RCT_CAMERA_ORIENTATION_LANDSCAPE_LEFT = Surface.ROTATION_90;
public static final int RCT_CAMERA_ORIENTATION_LANDSCAPE_RIGHT = Surface.ROTATION_270;
public static final int RCT_CAMERA_TYPE_FRONT = 1;
public static final int RCT_CAMERA_TYPE_BACK = 2;
public static final int RCT_CAMERA_FLASH_MODE_OFF = 0;
public static final int RCT_CAMERA_FLASH_MODE_ON = 1;
public static final int RCT_CAMERA_FLASH_MODE_AUTO = 2;
public static final int RCT_CAMERA_TORCH_MODE_OFF = 0;
public static final int RCT_CAMERA_TORCH_MODE_ON = 1;
public static final int RCT_CAMERA_TORCH_MODE_AUTO = 2;
public static final String RCT_CAMERA_CAPTURE_QUALITY_PREVIEW = "preview";
public static final String RCT_CAMERA_CAPTURE_QUALITY_HIGH = "high";
public static final String RCT_CAMERA_CAPTURE_QUALITY_MEDIUM = "medium";
public static final String RCT_CAMERA_CAPTURE_QUALITY_LOW = "low";
public static final String RCT_CAMERA_CAPTURE_QUALITY_1080P = "1080p";
public static final String RCT_CAMERA_CAPTURE_QUALITY_720P = "720p";
public static final String RCT_CAMERA_CAPTURE_QUALITY_480P = "480p";
public static final int MEDIA_TYPE_IMAGE = 1;
public static final int MEDIA_TYPE_VIDEO = 2;
private static ReactApplicationContext _reactContext;
private RCTSensorOrientationChecker _sensorOrientationChecker;
private MediaRecorder mMediaRecorder;
private long MRStartTime;
private File mVideoFile;
private Camera mCamera = null;
private Promise mRecordingPromise = null;
private ReadableMap mRecordingOptions;
private Boolean mSafeToCapture = true;
public RCTCameraModule(ReactApplicationContext reactContext) {
super(reactContext);
_reactContext = reactContext;
_sensorOrientationChecker = new RCTSensorOrientationChecker(_reactContext);
_reactContext.addLifecycleEventListener(this);
}
public static ReactApplicationContext getReactContextSingleton() {
return _reactContext;
}
/**
* Callback invoked on new MediaRecorder info.
*
* See https://developer.android.com/reference/android/media/MediaRecorder.OnInfoListener.html
* for more information.
*
* @param mr MediaRecorder instance for which this callback is being invoked.
* @param what Type of info we have received.
* @param extra Extra code, specific to the info type.
*/
public void onInfo(MediaRecorder mr, int what, int extra) {
if ( what == MediaRecorder.MEDIA_RECORDER_INFO_MAX_DURATION_REACHED ||
what == MediaRecorder.MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED) {
if (mRecordingPromise != null) {
releaseMediaRecorder(); // release the MediaRecorder object and resolve promise
}
}
}
/**
* Callback invoked when a MediaRecorder instance encounters an error while recording.
*
* See https://developer.android.com/reference/android/media/MediaRecorder.OnErrorListener.html
* for more information.
*
* @param mr MediaRecorder instance for which this callback is being invoked.
* @param what Type of error that has occurred.
* @param extra Extra code, specific to the error type.
*/
public void onError(MediaRecorder mr, int what, int extra) {
// On any error, release the MediaRecorder object and resolve promise. In particular, this
// prevents leaving the camera in an unrecoverable state if we crash in the middle of
// recording.
if (mRecordingPromise != null) {
releaseMediaRecorder();
}
}
@Override
public String getName() {
return "RCTCameraModule";
}
@Nullable
@Override
public Map<String, Object> getConstants() {
return Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("Aspect", getAspectConstants());
put("BarCodeType", getBarCodeConstants());
put("Type", getTypeConstants());
put("CaptureQuality", getCaptureQualityConstants());
put("CaptureMode", getCaptureModeConstants());
put("CaptureTarget", getCaptureTargetConstants());
put("Orientation", getOrientationConstants());
put("FlashMode", getFlashModeConstants());
put("TorchMode", getTorchModeConstants());
}
private Map<String, Object> getAspectConstants() {
return Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("stretch", RCT_CAMERA_ASPECT_STRETCH);
put("fit", RCT_CAMERA_ASPECT_FIT);
put("fill", RCT_CAMERA_ASPECT_FILL);
}
});
}
private Map<String, Object> getBarCodeConstants() {
return Collections.unmodifiableMap(new HashMap<String, Object>() {
{
// @TODO add barcode types
}
});
}
private Map<String, Object> getTypeConstants() {
return Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("front", RCT_CAMERA_TYPE_FRONT);
put("back", RCT_CAMERA_TYPE_BACK);
}
});
}
private Map<String, Object> getCaptureQualityConstants() {
return Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("low", RCT_CAMERA_CAPTURE_QUALITY_LOW);
put("medium", RCT_CAMERA_CAPTURE_QUALITY_MEDIUM);
put("high", RCT_CAMERA_CAPTURE_QUALITY_HIGH);
put("photo", RCT_CAMERA_CAPTURE_QUALITY_HIGH);
put("preview", RCT_CAMERA_CAPTURE_QUALITY_PREVIEW);
put("480p", RCT_CAMERA_CAPTURE_QUALITY_480P);
put("720p", RCT_CAMERA_CAPTURE_QUALITY_720P);
put("1080p", RCT_CAMERA_CAPTURE_QUALITY_1080P);
}
});
}
private Map<String, Object> getCaptureModeConstants() {
return Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("still", RCT_CAMERA_CAPTURE_MODE_STILL);
put("video", RCT_CAMERA_CAPTURE_MODE_VIDEO);
}
});
}
private Map<String, Object> getCaptureTargetConstants() {
return Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("memory", RCT_CAMERA_CAPTURE_TARGET_MEMORY);
put("disk", RCT_CAMERA_CAPTURE_TARGET_DISK);
put("cameraRoll", RCT_CAMERA_CAPTURE_TARGET_CAMERA_ROLL);
put("temp", RCT_CAMERA_CAPTURE_TARGET_TEMP);
}
});
}
private Map<String, Object> getOrientationConstants() {
return Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("auto", RCT_CAMERA_ORIENTATION_AUTO);
put("landscapeLeft", RCT_CAMERA_ORIENTATION_LANDSCAPE_LEFT);
put("landscapeRight", RCT_CAMERA_ORIENTATION_LANDSCAPE_RIGHT);
put("portrait", RCT_CAMERA_ORIENTATION_PORTRAIT);
put("portraitUpsideDown", RCT_CAMERA_ORIENTATION_PORTRAIT_UPSIDE_DOWN);
}
});
}
private Map<String, Object> getFlashModeConstants() {
return Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("off", RCT_CAMERA_FLASH_MODE_OFF);
put("on", RCT_CAMERA_FLASH_MODE_ON);
put("auto", RCT_CAMERA_FLASH_MODE_AUTO);
}
});
}
private Map<String, Object> getTorchModeConstants() {
return Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("off", RCT_CAMERA_TORCH_MODE_OFF);
put("on", RCT_CAMERA_TORCH_MODE_ON);
put("auto", RCT_CAMERA_TORCH_MODE_AUTO);
}
});
}
});
}
/**
* Prepare media recorder for video capture.
*
* See "Capturing Videos" at https://developer.android.com/guide/topics/media/camera.html for
* a guideline of steps and more information in general.
*
* @param options Options.
* @return Throwable; null if no errors.
*/
private Throwable prepareMediaRecorder(ReadableMap options, int deviceOrientation) {
// Prepare CamcorderProfile instance, setting essential options.
CamcorderProfile cm = RCTCamera.getInstance().setCaptureVideoQuality(options.getInt("type"), options.getString("quality"));
if (cm == null) {
return new RuntimeException("CamcorderProfile not found in prepareMediaRecorder.");
}
// Unlock camera to make available for MediaRecorder. Note that this statement must be
// executed before calling setCamera when configuring the MediaRecorder instance.
mCamera.unlock();
// Create new MediaRecorder instance.
mMediaRecorder = new MediaRecorder();
// Attach callback to handle maxDuration (@see onInfo method in this file).
mMediaRecorder.setOnInfoListener(this);
// Attach error listener (@see onError method in this file).
mMediaRecorder.setOnErrorListener(this);
// Set camera.
mMediaRecorder.setCamera(mCamera);
// Set AV sources.
mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.CAMCORDER);
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
// Adjust for orientation.
// mMediaRecorder.setOrientationHint(RCTCamera.getInstance().getAdjustedDeviceOrientation());
switch (deviceOrientation) {
case 0: mMediaRecorder.setOrientationHint(90);
break;
case 1: mMediaRecorder.setOrientationHint(0);
break;
case 2: mMediaRecorder.setOrientationHint(270);
break;
case 3: mMediaRecorder.setOrientationHint(180);
break;
}
// Set video output format and encoding using CamcorderProfile.
cm.fileFormat = MediaRecorder.OutputFormat.MPEG_4;
mMediaRecorder.setProfile(cm);
// Set video output file.
mVideoFile = null;
switch (options.getInt("target")) {
case RCT_CAMERA_CAPTURE_TARGET_MEMORY:
mVideoFile = getTempMediaFile(MEDIA_TYPE_VIDEO); // temporarily
break;
case RCT_CAMERA_CAPTURE_TARGET_CAMERA_ROLL:
mVideoFile = getOutputCameraRollFile(MEDIA_TYPE_VIDEO);
break;
case RCT_CAMERA_CAPTURE_TARGET_TEMP:
mVideoFile = getTempMediaFile(MEDIA_TYPE_VIDEO);
break;
default:
case RCT_CAMERA_CAPTURE_TARGET_DISK:
mVideoFile = getOutputMediaFile(MEDIA_TYPE_VIDEO);
break;
}
if (mVideoFile == null) {
return new RuntimeException("Error while preparing output file in prepareMediaRecorder.");
}
mMediaRecorder.setOutputFile(mVideoFile.getPath());
if (options.hasKey("totalSeconds")) {
int totalSeconds = options.getInt("totalSeconds");
mMediaRecorder.setMaxDuration(totalSeconds * 1000);
}
if (options.hasKey("maxFileSize")) {
int maxFileSize = options.getInt("maxFileSize");
mMediaRecorder.setMaxFileSize(maxFileSize);
}
// Prepare the MediaRecorder instance with the provided configuration settings.
try {
mMediaRecorder.prepare();
} catch (Exception ex) {
Log.e(TAG, "Media recorder prepare error.", ex);
releaseMediaRecorder();
return ex;
}
return null;
}
private void record(final ReadableMap options, final Promise promise, final int deviceOrientation) {
if (mRecordingPromise != null) {
return;
}
mCamera = RCTCamera.getInstance().acquireCameraInstance(options.getInt("type"));
if (mCamera == null) {
promise.reject(new RuntimeException("No camera found."));
return;
}
Throwable prepareError = prepareMediaRecorder(options, deviceOrientation);
if (prepareError != null) {
promise.reject(prepareError);
return;
}
try {
mMediaRecorder.start();
MRStartTime = System.currentTimeMillis();
mRecordingOptions = options;
mRecordingPromise = promise; // only got here if mediaRecorder started
} catch (Exception ex) {
Log.e(TAG, "Media recorder start error.", ex);
promise.reject(ex);
}
}
/**
* Release media recorder following video capture (or failure to start recording session).
*
* See "Capturing Videos" at https://developer.android.com/guide/topics/media/camera.html for
* a guideline of steps and more information in general.
*/
private void releaseMediaRecorder() {
// Must record at least a second or MediaRecorder throws exceptions on some platforms
long duration = System.currentTimeMillis() - MRStartTime;
if (duration < 1500) {
try {
Thread.sleep(1500 - duration);
} catch(InterruptedException ex) {
Log.e(TAG, "releaseMediaRecorder thread sleep error.", ex);
}
}
// Release actual MediaRecorder instance.
if (mMediaRecorder != null) {
// Stop recording video.
try {
mMediaRecorder.stop(); // stop the recording
} catch (RuntimeException ex) {
Log.e(TAG, "Media recorder stop error.", ex);
}
// Optionally, remove the configuration settings from the recorder.
mMediaRecorder.reset();
// Release the MediaRecorder.
mMediaRecorder.release();
// Reset variable.
mMediaRecorder = null;
}
// Lock the camera so that future MediaRecorder sessions can use it by calling
// Camera.lock(). Note this is not required on Android 4.0+ unless the
// MediaRecorder.prepare() call fails.
if (mCamera != null) {
mCamera.lock();
}
if (mRecordingPromise == null) {
return;
}
File f = new File(mVideoFile.getPath());
if (!f.exists()) {
mRecordingPromise.reject(new RuntimeException("There is nothing recorded."));
mRecordingPromise = null;
return;
}
f.setReadable(true, false); // so mediaplayer can play it
f.setWritable(true, false); // so can clean it up
WritableMap response = new WritableNativeMap();
switch (mRecordingOptions.getInt("target")) {
case RCT_CAMERA_CAPTURE_TARGET_MEMORY:
byte[] encoded = convertFileToByteArray(mVideoFile);
response.putString("data", new String(encoded, Base64.NO_WRAP));
mRecordingPromise.resolve(response);
f.delete();
break;
case RCT_CAMERA_CAPTURE_TARGET_CAMERA_ROLL:
ContentValues values = new ContentValues();
values.put(MediaStore.Video.Media.DATA, mVideoFile.getPath());
values.put(MediaStore.Video.Media.TITLE, mRecordingOptions.hasKey("title") ? mRecordingOptions.getString("title") : "video");
if (mRecordingOptions.hasKey("description")) {
values.put(MediaStore.Video.Media.DESCRIPTION, mRecordingOptions.hasKey("description"));
}
if (mRecordingOptions.hasKey("latitude")) {
values.put(MediaStore.Video.Media.LATITUDE, mRecordingOptions.getString("latitude"));
}
if (mRecordingOptions.hasKey("longitude")) {
values.put(MediaStore.Video.Media.LONGITUDE, mRecordingOptions.getString("longitude"));
}
values.put(MediaStore.Video.Media.MIME_TYPE, "video/mp4");
_reactContext.getContentResolver().insert(MediaStore.Video.Media.EXTERNAL_CONTENT_URI, values);
addToMediaStore(mVideoFile.getAbsolutePath());
response.putString("path", Uri.fromFile(mVideoFile).toString());
mRecordingPromise.resolve(response);
break;
case RCT_CAMERA_CAPTURE_TARGET_TEMP:
case RCT_CAMERA_CAPTURE_TARGET_DISK:
response.putString("path", Uri.fromFile(mVideoFile).toString());
mRecordingPromise.resolve(response);
}
mRecordingPromise = null;
}
public static byte[] convertFileToByteArray(File f)
{
byte[] byteArray = null;
try
{
InputStream inputStream = new FileInputStream(f);
ByteArrayOutputStream bos = new ByteArrayOutputStream();
byte[] b = new byte[1024*8];
int bytesRead;
while ((bytesRead = inputStream.read(b)) != -1) {
bos.write(b, 0, bytesRead);
}
byteArray = bos.toByteArray();
}
catch (IOException e)
{
e.printStackTrace();
}
return byteArray;
}
@ReactMethod
public void capture(final ReadableMap options, final Promise promise) {
if (RCTCamera.getInstance() == null) {
promise.reject("Camera is not ready yet.");
return;
}
int orientation = options.hasKey("orientation") ? options.getInt("orientation") : RCTCamera.getInstance().getOrientation();
if (orientation == RCT_CAMERA_ORIENTATION_AUTO) {
_sensorOrientationChecker.onResume();
_sensorOrientationChecker.registerOrientationListener(new RCTSensorOrientationListener() {
@Override
public void orientationEvent() {
int deviceOrientation = _sensorOrientationChecker.getOrientation();
_sensorOrientationChecker.unregisterOrientationListener();
_sensorOrientationChecker.onPause();
captureWithOrientation(options, promise, deviceOrientation);
}
});
} else {
captureWithOrientation(options, promise, orientation);
}
}
private void captureWithOrientation(final ReadableMap options, final Promise promise, int deviceOrientation) {
final Camera camera = RCTCamera.getInstance().acquireCameraInstance(options.getInt("type"));
if (null == camera) {
promise.reject("No camera found.");
return;
}
if (options.getInt("mode") == RCT_CAMERA_CAPTURE_MODE_VIDEO) {
record(options, promise, deviceOrientation);
return;
}
RCTCamera.getInstance().setCaptureQuality(options.getInt("type"), options.getString("quality"));
if (options.hasKey("playSoundOnCapture") && options.getBoolean("playSoundOnCapture")) {
MediaActionSound sound = new MediaActionSound();
sound.play(MediaActionSound.SHUTTER_CLICK);
}
if (options.hasKey("quality")) {
RCTCamera.getInstance().setCaptureQuality(options.getInt("type"), options.getString("quality"));
}
RCTCamera.getInstance().adjustCameraRotationToDeviceOrientation(options.getInt("type"), deviceOrientation);
camera.setPreviewCallback(null);
Camera.PictureCallback captureCallback = new Camera.PictureCallback() {
@Override
public void onPictureTaken(final byte[] data, Camera camera) {
camera.stopPreview();
camera.startPreview();
AsyncTask.execute(new Runnable() {
@Override
public void run() {
processImage(new MutableImage(data), options, promise);
}
});
mSafeToCapture = true;
}
};
Camera.ShutterCallback shutterCallback = new Camera.ShutterCallback() {
@Override
public void onShutter() {
try {
camera.setPreviewCallback(null);
camera.setPreviewTexture(null);
} catch (Exception e) {
e.printStackTrace();
}
}
};
if(mSafeToCapture) {
try {
camera.takePicture(shutterCallback, null, captureCallback);
mSafeToCapture = false;
} catch(RuntimeException ex) {
Log.e(TAG, "Couldn't capture photo.", ex);
}
}
}
/**
* synchronized in order to prevent the user crashing the app by taking many photos and them all being processed
* concurrently which would blow the memory (esp on smaller devices), and slow things down.
*/
private synchronized void processImage(MutableImage mutableImage, ReadableMap options, Promise promise) {
boolean shouldFixOrientation = options.hasKey("fixOrientation") && options.getBoolean("fixOrientation");
if(shouldFixOrientation) {
try {
mutableImage.fixOrientation();
} catch (MutableImage.ImageMutationFailedException e) {
promise.reject("Error fixing orientation image", e);
}
}
boolean needsReorient = false;
double previewRatio, pictureRatio = (double) mutableImage.getWidth() / (double) mutableImage.getHeight();
try {
int type = options.getInt("type");
previewRatio = (double) RCTCamera.getInstance().getPreviewVisibleWidth(type) / (double) RCTCamera.getInstance().getPreviewVisibleHeight(type);
needsReorient = (previewRatio > 1) != (pictureRatio > 1);
} catch (IllegalArgumentException e) {
previewRatio = pictureRatio;
}
boolean shouldCropToPreview = options.hasKey("cropToPreview") && options.getBoolean("cropToPreview");
if (shouldCropToPreview) {
try {
mutableImage.cropToPreview(needsReorient ? 1.0 / previewRatio : previewRatio);
} catch (IllegalArgumentException e) {
promise.reject("Error cropping image to preview", e);
}
}
boolean shouldMirror = options.hasKey("mirrorImage") && options.getBoolean("mirrorImage");
if (shouldMirror) {
try {
mutableImage.mirrorImage();
} catch (MutableImage.ImageMutationFailedException e) {
promise.reject("Error mirroring image", e);
}
}
int jpegQualityPercent = 80;
if(options.hasKey("jpegQuality")) {
jpegQualityPercent = options.getInt("jpegQuality");
}
int imgWidth = (needsReorient) ? mutableImage.getHeight() : mutableImage.getWidth();
int imgHeight = (needsReorient) ? mutableImage.getWidth() : mutableImage.getHeight();
switch (options.getInt("target")) {
case RCT_CAMERA_CAPTURE_TARGET_MEMORY:
String encoded = mutableImage.toBase64(jpegQualityPercent);
WritableMap response = new WritableNativeMap();
response.putString("data", encoded);
response.putInt("width", imgWidth);
response.putInt("height", imgHeight);
promise.resolve(response);
break;
case RCT_CAMERA_CAPTURE_TARGET_CAMERA_ROLL: {
File cameraRollFile = getOutputCameraRollFile(MEDIA_TYPE_IMAGE);
if (cameraRollFile == null) {
promise.reject("Error creating media file.");
return;
}
try {
mutableImage.writeDataToFile(cameraRollFile, options, jpegQualityPercent);
} catch (IOException | NullPointerException e) {
promise.reject("failed to save image file", e);
return;
}
addToMediaStore(cameraRollFile.getAbsolutePath());
resolveImage(cameraRollFile, imgWidth, imgHeight, promise, true);
break;
}
case RCT_CAMERA_CAPTURE_TARGET_DISK: {
File pictureFile = getOutputMediaFile(MEDIA_TYPE_IMAGE);
if (pictureFile == null) {
promise.reject("Error creating media file.");
return;
}
try {
mutableImage.writeDataToFile(pictureFile, options, jpegQualityPercent);
} catch (IOException e) {
promise.reject("failed to save image file", e);
return;
}
resolveImage(pictureFile, imgWidth, imgHeight, promise, false);
break;
}
case RCT_CAMERA_CAPTURE_TARGET_TEMP: {
File tempFile = getTempMediaFile(MEDIA_TYPE_IMAGE);
if (tempFile == null) {
promise.reject("Error creating media file.");
return;
}
try {
mutableImage.writeDataToFile(tempFile, options, jpegQualityPercent);
} catch (IOException e) {
promise.reject("failed to save image file", e);
return;
}
resolveImage(tempFile, imgWidth, imgHeight, promise, false);
break;
}
}
}
@ReactMethod
public void stopCapture(final Promise promise) {
if (mRecordingPromise != null) {
releaseMediaRecorder(); // release the MediaRecorder object
promise.resolve("Finished recording.");
} else {
promise.resolve("Not recording.");
}
}
@ReactMethod
public void hasFlash(ReadableMap options, final Promise promise) {
Camera camera = RCTCamera.getInstance().acquireCameraInstance(options.getInt("type"));
if (null == camera) {
promise.reject("No camera found.");
return;
}
List<String> flashModes = camera.getParameters().getSupportedFlashModes();
promise.resolve(null != flashModes && !flashModes.isEmpty());
}
@ReactMethod
public void setZoom(ReadableMap options, int zoom) {
RCTCamera instance = RCTCamera.getInstance();
if (instance == null) return;
Camera camera = instance.acquireCameraInstance(options.getInt("type"));
if (camera == null) return;
Camera.Parameters parameters = camera.getParameters();
int maxZoom = parameters.getMaxZoom();
if (parameters.isZoomSupported()) {
if (zoom >=0 && zoom < maxZoom) {
parameters.setZoom(zoom);
try{
camera.setParameters(parameters);
}
catch(RuntimeException e ) {
Log.e("RCTCameraModule", "setParameters failed", e);
}
}
}
}
private File getOutputMediaFile(int type) {
// Get environment directory type id from requested media type.
String environmentDirectoryType;
if (type == MEDIA_TYPE_IMAGE) {
environmentDirectoryType = Environment.DIRECTORY_PICTURES;
} else if (type == MEDIA_TYPE_VIDEO) {
environmentDirectoryType = Environment.DIRECTORY_MOVIES;
} else {
Log.e(TAG, "Unsupported media type:" + type);
return null;
}
return getOutputFile(
type,
Environment.getExternalStoragePublicDirectory(environmentDirectoryType)
);
}
private File getOutputCameraRollFile(int type) {
return getOutputFile(
type,
Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM)
);
}
private File getOutputFile(int type, File storageDir) {
// Create the storage directory if it does not exist
if (!storageDir.exists()) {
if (!storageDir.mkdirs()) {
Log.e(TAG, "failed to create directory:" + storageDir.getAbsolutePath());
return null;
}
}
// Create a media file name
String fileName = String.format("%s", new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date()));
if (type == MEDIA_TYPE_IMAGE) {
fileName = String.format("IMG_%s.jpg", fileName);
} else if (type == MEDIA_TYPE_VIDEO) {
fileName = String.format("VID_%s.mp4", fileName);
} else {
Log.e(TAG, "Unsupported media type:" + type);
return null;
}
return new File(String.format("%s%s%s", storageDir.getPath(), File.separator, fileName));
}
private File getTempMediaFile(int type) {
try {
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
File outputDir = _reactContext.getCacheDir();
File outputFile;
if (type == MEDIA_TYPE_IMAGE) {
outputFile = File.createTempFile("IMG_" + timeStamp, ".jpg", outputDir);
} else if (type == MEDIA_TYPE_VIDEO) {
outputFile = File.createTempFile("VID_" + timeStamp, ".mp4", outputDir);
} else {
Log.e(TAG, "Unsupported media type:" + type);
return null;
}
return outputFile;
} catch (Exception e) {
Log.e(TAG, e.getMessage());
return null;
}
}
private void addToMediaStore(String path) {
MediaScannerConnection.scanFile(_reactContext, new String[] { path }, null, null);
}
/**
* LifecycleEventListener overrides
*/
@Override
public void onHostResume() {
mSafeToCapture = true;
}
@Override
public void onHostPause() {
// On pause, we stop any pending recording session
if (mRecordingPromise != null) {
releaseMediaRecorder();
}
}
@Override
public void onHostDestroy() {
// ... do nothing
}
private void resolveImage(final File imageFile, final int imgWidth, final int imgHeight, final Promise promise, boolean addToMediaStore) {
final WritableMap response = new WritableNativeMap();
response.putString("path", Uri.fromFile(imageFile).toString());
response.putInt("width", imgWidth);
response.putInt("height", imgHeight);
if(addToMediaStore) {
// borrowed from react-native CameraRollManager, it finds and returns the 'internal'
// representation of the image uri that was just saved.
// e.g. content://media/external/images/media/123
MediaScannerConnection.scanFile(
_reactContext,
new String[]{imageFile.getAbsolutePath()},
null,
new MediaScannerConnection.OnScanCompletedListener() {
@Override
public void onScanCompleted(String path, Uri uri) {
if (uri != null) {
response.putString("mediaUri", uri.toString());
}
promise.resolve(response);
}
});
} else {
promise.resolve(response);
}
}
}

View File

@ -0,0 +1,72 @@
package com.lwansbrough.RCTCamera;
import android.graphics.Rect;
import android.graphics.RectF;
import android.hardware.Camera;
import android.view.MotionEvent;
public class RCTCameraUtils {
private static final int FOCUS_AREA_MOTION_EVENT_EDGE_LENGTH = 100;
private static final int FOCUS_AREA_WEIGHT = 1000;
/**
* Computes a Camera.Area corresponding to the new focus area to focus the camera on. This is
* done by deriving a square around the center of a MotionEvent pointer (with side length equal
* to FOCUS_AREA_MOTION_EVENT_EDGE_LENGTH), then transforming this rectangle's/square's
* coordinates into the (-1000, 1000) coordinate system used for camera focus areas.
*
* Also note that we operate on RectF instances for the most part, to avoid any integer
* division rounding errors going forward. We only round at the very end for playing into
* the final focus areas list.
*
* @throws RuntimeException if unable to compute valid intersection between MotionEvent region
* and SurfaceTexture region.
*/
protected static Camera.Area computeFocusAreaFromMotionEvent(final MotionEvent event, final int surfaceTextureWidth, final int surfaceTextureHeight) {
// Get position of first touch pointer.
final int pointerId = event.getPointerId(0);
final int pointerIndex = event.findPointerIndex(pointerId);
final float centerX = event.getX(pointerIndex);
final float centerY = event.getY(pointerIndex);
// Build event rect. Note that coordinates increase right and down, such that left <= right
// and top <= bottom.
final RectF eventRect = new RectF(
centerX - FOCUS_AREA_MOTION_EVENT_EDGE_LENGTH, // left
centerY - FOCUS_AREA_MOTION_EVENT_EDGE_LENGTH, // top
centerX + FOCUS_AREA_MOTION_EVENT_EDGE_LENGTH, // right
centerY + FOCUS_AREA_MOTION_EVENT_EDGE_LENGTH // bottom
);
// Intersect this rect with the rect corresponding to the full area of the parent surface
// texture, making sure we are not placing any amount of the eventRect outside the parent
// surface's area.
final RectF surfaceTextureRect = new RectF(
(float) 0, // left
(float) 0, // top
(float) surfaceTextureWidth, // right
(float) surfaceTextureHeight // bottom
);
final boolean intersectSuccess = eventRect.intersect(surfaceTextureRect);
if (!intersectSuccess) {
throw new RuntimeException(
"MotionEvent rect does not intersect with SurfaceTexture rect; unable to " +
"compute focus area"
);
}
// Transform into (-1000, 1000) focus area coordinate system. See
// https://developer.android.com/reference/android/hardware/Camera.Area.html.
// Note that if this is ever changed to a Rect instead of RectF, be cautious of integer
// division rounding!
final RectF focusAreaRect = new RectF(
(eventRect.left / surfaceTextureWidth) * 2000 - 1000, // left
(eventRect.top / surfaceTextureHeight) * 2000 - 1000, // top
(eventRect.right / surfaceTextureWidth) * 2000 - 1000, // right
(eventRect.bottom / surfaceTextureHeight) * 2000 - 1000 // bottom
);
Rect focusAreaRectRounded = new Rect();
focusAreaRect.round(focusAreaRectRounded);
return new Camera.Area(focusAreaRectRounded, FOCUS_AREA_WEIGHT);
}
}

View File

@ -0,0 +1,218 @@
/**
* Created by Fabrice Armisen (farmisen@gmail.com) on 1/3/16.
*/
package com.lwansbrough.RCTCamera;
import android.content.Context;
import android.hardware.SensorManager;
import android.view.OrientationEventListener;
import android.view.ViewGroup;
import android.view.WindowManager;
import android.view.View;
import java.util.List;
public class RCTCameraView extends ViewGroup {
private final OrientationEventListener _orientationListener;
private final Context _context;
private RCTCameraViewFinder _viewFinder = null;
private int _actualDeviceOrientation = -1;
private int _aspect = RCTCameraModule.RCT_CAMERA_ASPECT_FIT;
private int _captureMode = RCTCameraModule.RCT_CAMERA_CAPTURE_MODE_STILL;
private String _captureQuality = "high";
private int _torchMode = -1;
private int _flashMode = -1;
private int _zoom = 0;
private boolean _clearWindowBackground = false;
public RCTCameraView(Context context) {
super(context);
this._context = context;
RCTCamera.createInstance(getDeviceOrientation(context));
_orientationListener = new OrientationEventListener(context, SensorManager.SENSOR_DELAY_NORMAL) {
@Override
public void onOrientationChanged(int orientation) {
if (setActualDeviceOrientation(_context)) {
layoutViewFinder();
}
}
};
if (_orientationListener.canDetectOrientation()) {
_orientationListener.enable();
} else {
_orientationListener.disable();
}
}
@Override
protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
layoutViewFinder(left, top, right, bottom);
}
@Override
public void onViewAdded(View child) {
if (this._viewFinder == child) return;
// remove and readd view to make sure it is in the back.
// @TODO figure out why there was a z order issue in the first place and fix accordingly.
this.removeView(this._viewFinder);
this.addView(this._viewFinder, 0);
}
public void setAspect(int aspect) {
this._aspect = aspect;
layoutViewFinder();
}
public void setCameraType(final int type) {
if (null != this._viewFinder) {
this._viewFinder.setCameraType(type);
RCTCamera.getInstance().adjustPreviewLayout(type);
} else {
_viewFinder = new RCTCameraViewFinder(_context, type);
if (-1 != this._flashMode) {
_viewFinder.setFlashMode(this._flashMode);
}
if (-1 != this._torchMode) {
_viewFinder.setTorchMode(this._torchMode);
}
if (0 != this._zoom) {
_viewFinder.setZoom(this._zoom);
}
_viewFinder.setClearWindowBackground(this._clearWindowBackground);
addView(_viewFinder);
}
}
public void setCaptureMode(final int captureMode) {
this._captureMode = captureMode;
if (this._viewFinder != null) {
this._viewFinder.setCaptureMode(captureMode);
}
}
public void setCaptureQuality(String captureQuality) {
this._captureQuality = captureQuality;
if (this._viewFinder != null) {
this._viewFinder.setCaptureQuality(captureQuality);
}
}
public void setTorchMode(int torchMode) {
this._torchMode = torchMode;
if (this._viewFinder != null) {
this._viewFinder.setTorchMode(torchMode);
}
}
public void setFlashMode(int flashMode) {
this._flashMode = flashMode;
if (this._viewFinder != null) {
this._viewFinder.setFlashMode(flashMode);
}
}
public void setZoom(int zoom) {
this._zoom = zoom;
if (this._viewFinder != null) {
this._viewFinder.setZoom(zoom);
}
}
public void setOrientation(int orientation) {
RCTCamera.getInstance().setOrientation(orientation);
if (this._viewFinder != null) {
layoutViewFinder();
}
}
public void setBarcodeScannerEnabled(boolean barcodeScannerEnabled) {
RCTCamera.getInstance().setBarcodeScannerEnabled(barcodeScannerEnabled);
}
public void setBarCodeTypes(List<String> types) {
RCTCamera.getInstance().setBarCodeTypes(types);
}
public void setClearWindowBackground(boolean clearWindowBackground) {
this._clearWindowBackground = clearWindowBackground;
if (this._viewFinder != null) {
this._viewFinder.setClearWindowBackground(clearWindowBackground);
}
}
public void stopPreview() {
if (_viewFinder == null) return;
_viewFinder.stopPreview();
}
public void startPreview() {
if (_viewFinder == null) return;
_viewFinder.startPreview();
}
private boolean setActualDeviceOrientation(Context context) {
int actualDeviceOrientation = getDeviceOrientation(context);
if (_actualDeviceOrientation != actualDeviceOrientation) {
_actualDeviceOrientation = actualDeviceOrientation;
RCTCamera.getInstance().setActualDeviceOrientation(_actualDeviceOrientation);
return true;
} else {
return false;
}
}
private int getDeviceOrientation(Context context) {
return ((WindowManager) context.getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay().getOrientation();
}
private void layoutViewFinder() {
layoutViewFinder(this.getLeft(), this.getTop(), this.getRight(), this.getBottom());
}
private void layoutViewFinder(int left, int top, int right, int bottom) {
if (null == _viewFinder) {
return;
}
float width = right - left;
float height = bottom - top;
int viewfinderWidth;
int viewfinderHeight;
double ratio;
switch (this._aspect) {
case RCTCameraModule.RCT_CAMERA_ASPECT_FIT:
ratio = this._viewFinder.getRatio();
if (ratio * height > width) {
viewfinderHeight = (int) (width / ratio);
viewfinderWidth = (int) width;
} else {
viewfinderWidth = (int) (ratio * height);
viewfinderHeight = (int) height;
}
break;
case RCTCameraModule.RCT_CAMERA_ASPECT_FILL:
ratio = this._viewFinder.getRatio();
if (ratio * height < width) {
viewfinderHeight = (int) (width / ratio);
viewfinderWidth = (int) width;
} else {
viewfinderWidth = (int) (ratio * height);
viewfinderHeight = (int) height;
}
break;
default:
viewfinderWidth = (int) width;
viewfinderHeight = (int) height;
}
int viewFinderPaddingX = (int) ((width - viewfinderWidth) / 2);
int viewFinderPaddingY = (int) ((height - viewfinderHeight) / 2);
RCTCamera.getInstance().setPreviewVisibleSize(_viewFinder.getCameraType(), (int) width, (int) height);
this._viewFinder.layout(viewFinderPaddingX, viewFinderPaddingY, viewFinderPaddingX + viewfinderWidth, viewFinderPaddingY + viewfinderHeight);
this.postInvalidate(this.getLeft(), this.getTop(), this.getRight(), this.getBottom());
}
}

View File

@ -0,0 +1,566 @@
/**
* Created by Fabrice Armisen (farmisen@gmail.com) on 1/3/16.
*/
package com.lwansbrough.RCTCamera;
import android.app.Activity;
import android.content.Context;
import android.content.ContextWrapper;
import android.graphics.Rect;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.view.MotionEvent;
import android.view.TextureView;
import android.os.AsyncTask;
import android.util.Log;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.ReactContext;
import com.facebook.react.bridge.WritableArray;
import com.facebook.react.bridge.WritableMap;
import com.facebook.react.modules.core.DeviceEventManagerModule;
import java.util.ArrayList;
import java.util.List;
import java.util.EnumMap;
import java.util.EnumSet;
import com.google.zxing.BarcodeFormat;
import com.google.zxing.BinaryBitmap;
import com.google.zxing.DecodeHintType;
import com.google.zxing.MultiFormatReader;
import com.google.zxing.PlanarYUVLuminanceSource;
import com.google.zxing.Result;
import com.google.zxing.ResultPoint;
import com.google.zxing.common.HybridBinarizer;
class RCTCameraViewFinder extends TextureView implements TextureView.SurfaceTextureListener, Camera.PreviewCallback {
private int _cameraType;
private int _captureMode;
private SurfaceTexture _surfaceTexture;
private int _surfaceTextureWidth;
private int _surfaceTextureHeight;
private boolean _isStarting;
private boolean _isStopping;
private Camera _camera;
private boolean _clearWindowBackground = false;
private float mFingerSpacing;
// concurrency lock for barcode scanner to avoid flooding the runtime
public static volatile boolean barcodeScannerTaskLock = false;
// reader instance for the barcode scanner
private final MultiFormatReader _multiFormatReader = new MultiFormatReader();
public RCTCameraViewFinder(Context context, int type) {
super(context);
this.setSurfaceTextureListener(this);
this._cameraType = type;
this.initBarcodeReader(RCTCamera.getInstance().getBarCodeTypes());
}
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
_surfaceTexture = surface;
_surfaceTextureWidth = width;
_surfaceTextureHeight = height;
startCamera();
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
_surfaceTextureWidth = width;
_surfaceTextureHeight = height;
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
_surfaceTexture = null;
_surfaceTextureWidth = 0;
_surfaceTextureHeight = 0;
stopCamera();
return true;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}
public int getCameraType() {
return _cameraType;
}
public double getRatio() {
int width = RCTCamera.getInstance().getPreviewWidth(this._cameraType);
int height = RCTCamera.getInstance().getPreviewHeight(this._cameraType);
return ((float) width) / ((float) height);
}
public void setCameraType(final int type) {
if (this._cameraType == type) {
return;
}
new Thread(new Runnable() {
@Override
public void run() {
stopPreview();
_cameraType = type;
startPreview();
}
}).start();
}
public void setCaptureMode(final int captureMode) {
RCTCamera.getInstance().setCaptureMode(_cameraType, captureMode);
this._captureMode = captureMode;
}
public void setCaptureQuality(String captureQuality) {
RCTCamera.getInstance().setCaptureQuality(_cameraType, captureQuality);
}
public void setTorchMode(int torchMode) {
RCTCamera.getInstance().setTorchMode(_cameraType, torchMode);
}
public void setFlashMode(int flashMode) {
RCTCamera.getInstance().setFlashMode(_cameraType, flashMode);
}
public void setClearWindowBackground(boolean clearWindowBackground) {
this._clearWindowBackground = clearWindowBackground;
}
public void setZoom(int zoom) {
RCTCamera.getInstance().setZoom(_cameraType, zoom);
}
public void startPreview() {
if (_surfaceTexture != null) {
startCamera();
}
}
public void stopPreview() {
if (_camera != null) {
stopCamera();
}
}
synchronized private void startCamera() {
if (!_isStarting) {
_isStarting = true;
try {
_camera = RCTCamera.getInstance().acquireCameraInstance(_cameraType);
Camera.Parameters parameters = _camera.getParameters();
final boolean isCaptureModeStill = (_captureMode == RCTCameraModule.RCT_CAMERA_CAPTURE_MODE_STILL);
final boolean isCaptureModeVideo = (_captureMode == RCTCameraModule.RCT_CAMERA_CAPTURE_MODE_VIDEO);
if (!isCaptureModeStill && !isCaptureModeVideo) {
throw new RuntimeException("Unsupported capture mode:" + _captureMode);
}
// Set auto-focus. Try to set to continuous picture/video, and fall back to general
// auto if available.
List<String> focusModes = parameters.getSupportedFocusModes();
if (isCaptureModeStill && focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE)) {
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
} else if (isCaptureModeVideo && focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
} else if (focusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO)) {
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
}
// set picture size
// defaults to max available size
List<Camera.Size> supportedSizes;
if (isCaptureModeStill) {
supportedSizes = parameters.getSupportedPictureSizes();
} else if (isCaptureModeVideo) {
supportedSizes = RCTCamera.getInstance().getSupportedVideoSizes(_camera);
} else {
throw new RuntimeException("Unsupported capture mode:" + _captureMode);
}
Camera.Size optimalPictureSize = RCTCamera.getInstance().getBestSize(
supportedSizes,
Integer.MAX_VALUE,
Integer.MAX_VALUE
);
parameters.setPictureSize(optimalPictureSize.width, optimalPictureSize.height);
try{
_camera.setParameters(parameters);
}
catch(RuntimeException e ) {
Log.e("RCTCameraViewFinder", "setParameters failed", e);
}
_camera.setPreviewTexture(_surfaceTexture);
_camera.startPreview();
// clear window background if needed
if (_clearWindowBackground) {
Activity activity = getActivity();
if (activity != null)
activity.getWindow().setBackgroundDrawable(null);
}
// send previews to `onPreviewFrame`
_camera.setPreviewCallback(this);
} catch (NullPointerException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
stopCamera();
} finally {
_isStarting = false;
}
}
}
synchronized private void stopCamera() {
if (!_isStopping) {
_isStopping = true;
try {
if (_camera != null) {
_camera.stopPreview();
// stop sending previews to `onPreviewFrame`
_camera.setPreviewCallback(null);
RCTCamera.getInstance().releaseCameraInstance(_cameraType);
_camera = null;
}
} catch (Exception e) {
e.printStackTrace();
} finally {
_isStopping = false;
}
}
}
private Activity getActivity() {
Context context = getContext();
while (context instanceof ContextWrapper) {
if (context instanceof Activity) {
return (Activity)context;
}
context = ((ContextWrapper)context).getBaseContext();
}
return null;
}
/**
* Parse barcodes as BarcodeFormat constants.
*
* Supports all iOS codes except [code39mod43, itf14]
*
* Additionally supports [codabar, maxicode, rss14, rssexpanded, upca, upceanextension]
*/
private BarcodeFormat parseBarCodeString(String c) {
if ("aztec".equals(c)) {
return BarcodeFormat.AZTEC;
} else if ("ean13".equals(c)) {
return BarcodeFormat.EAN_13;
} else if ("ean8".equals(c)) {
return BarcodeFormat.EAN_8;
} else if ("qr".equals(c)) {
return BarcodeFormat.QR_CODE;
} else if ("pdf417".equals(c)) {
return BarcodeFormat.PDF_417;
} else if ("upce".equals(c)) {
return BarcodeFormat.UPC_E;
} else if ("datamatrix".equals(c)) {
return BarcodeFormat.DATA_MATRIX;
} else if ("code39".equals(c)) {
return BarcodeFormat.CODE_39;
} else if ("code93".equals(c)) {
return BarcodeFormat.CODE_93;
} else if ("interleaved2of5".equals(c)) {
return BarcodeFormat.ITF;
} else if ("codabar".equals(c)) {
return BarcodeFormat.CODABAR;
} else if ("code128".equals(c)) {
return BarcodeFormat.CODE_128;
} else if ("maxicode".equals(c)) {
return BarcodeFormat.MAXICODE;
} else if ("rss14".equals(c)) {
return BarcodeFormat.RSS_14;
} else if ("rssexpanded".equals(c)) {
return BarcodeFormat.RSS_EXPANDED;
} else if ("upca".equals(c)) {
return BarcodeFormat.UPC_A;
} else if ("upceanextension".equals(c)) {
return BarcodeFormat.UPC_EAN_EXTENSION;
} else {
android.util.Log.v("RCTCamera", "Unsupported code.. [" + c + "]");
return null;
}
}
/**
* Initialize the barcode decoder.
*/
private void initBarcodeReader(List<String> barCodeTypes) {
EnumMap<DecodeHintType, Object> hints = new EnumMap<>(DecodeHintType.class);
EnumSet<BarcodeFormat> decodeFormats = EnumSet.noneOf(BarcodeFormat.class);
if (barCodeTypes != null) {
for (String code : barCodeTypes) {
BarcodeFormat format = parseBarCodeString(code);
if (format != null) {
decodeFormats.add(format);
}
}
}
hints.put(DecodeHintType.POSSIBLE_FORMATS, decodeFormats);
_multiFormatReader.setHints(hints);
}
/**
* Spawn a barcode reader task if
* - the barcode scanner is enabled (has a onBarCodeRead function)
* - one isn't already running
*
* See {Camera.PreviewCallback}
*/
public void onPreviewFrame(byte[] data, Camera camera) {
if (RCTCamera.getInstance().isBarcodeScannerEnabled() && !RCTCameraViewFinder.barcodeScannerTaskLock) {
RCTCameraViewFinder.barcodeScannerTaskLock = true;
new ReaderAsyncTask(camera, data).execute();
}
}
private class ReaderAsyncTask extends AsyncTask<Void, Void, Void> {
private byte[] imageData;
private final Camera camera;
ReaderAsyncTask(Camera camera, byte[] imageData) {
this.camera = camera;
this.imageData = imageData;
}
private Result getBarcode(int width, int height, boolean inverse) {
try{
PlanarYUVLuminanceSource source = new PlanarYUVLuminanceSource(imageData, width, height, 0, 0, width, height, false);
BinaryBitmap bitmap;
if (inverse) {
bitmap = new BinaryBitmap(new HybridBinarizer(source.invert()));
} else {
bitmap = new BinaryBitmap(new HybridBinarizer(source));
}
return _multiFormatReader.decodeWithState(bitmap);
} catch (Throwable t) {
// meh
} finally {
_multiFormatReader.reset();
}
return null;
}
private Result getBarcodeAnyOrientation() {
Camera.Size size = camera.getParameters().getPreviewSize();
int width = size.width;
int height = size.height;
Result result = getBarcode(width, height, false);
if (result != null) {
return result;
}
// inverse
result = getBarcode(width, height, true);
if (result != null) {
return result;
}
// rotate
rotateImage(width, height);
width = size.height;
height = size.width;
result = getBarcode(width, height, false);
if (result != null) {
return result;
}
return getBarcode(width, height, true);
}
private void rotateImage(int width, int height) {
byte[] rotated = new byte[imageData.length];
for (int y = 0; y < width; y++) {
for (int x = 0; x < height; x++) {
int sourceIx = x + y * height;
int destIx = x * width + width - y - 1;
if (sourceIx >= 0 && sourceIx < imageData.length && destIx >= 0 && destIx < imageData.length) {
rotated[destIx] = imageData[sourceIx];
}
}
}
imageData = rotated;
}
@Override
protected Void doInBackground(Void... ignored) {
if (isCancelled()) {
return null;
}
try {
// rotate for zxing if orientation is portrait
Result result = getBarcodeAnyOrientation();
if (result == null){
throw new Exception();
}
ReactContext reactContext = RCTCameraModule.getReactContextSingleton();
WritableMap event = Arguments.createMap();
WritableArray resultPoints = Arguments.createArray();
ResultPoint[] points = result.getResultPoints();
if(points != null) {
for (ResultPoint point : points) {
WritableMap newPoint = Arguments.createMap();
newPoint.putString("x", String.valueOf(point.getX()));
newPoint.putString("y", String.valueOf(point.getY()));
resultPoints.pushMap(newPoint);
}
}
event.putArray("bounds", resultPoints);
event.putString("data", result.getText());
event.putString("type", result.getBarcodeFormat().toString());
reactContext.getJSModule(DeviceEventManagerModule.RCTDeviceEventEmitter.class).emit("CameraBarCodeReadAndroid", event);
} catch (Throwable t) {
// meh
} finally {
_multiFormatReader.reset();
RCTCameraViewFinder.barcodeScannerTaskLock = false;
return null;
}
}
}
@Override
public boolean onTouchEvent(MotionEvent event) {
// Fast swiping and touching while component is being loaded can cause _camera to be null.
if (_camera == null) {
return false;
}
// Get the pointer ID
Camera.Parameters params = _camera.getParameters();
int action = event.getAction();
if (event.getPointerCount() > 1) {
// handle multi-touch events
if (action == MotionEvent.ACTION_POINTER_DOWN) {
mFingerSpacing = getFingerSpacing(event);
} else if (action == MotionEvent.ACTION_MOVE && params.isZoomSupported()) {
_camera.cancelAutoFocus();
handleZoom(event, params);
}
} else {
// handle single touch events
if (action == MotionEvent.ACTION_UP) {
handleFocus(event, params);
}
}
return true;
}
private void handleZoom(MotionEvent event, Camera.Parameters params) {
int maxZoom = params.getMaxZoom();
int zoom = params.getZoom();
float newDist = getFingerSpacing(event);
if (newDist > mFingerSpacing) {
//zoom in
if (zoom < maxZoom)
zoom++;
} else if (newDist < mFingerSpacing) {
//zoom out
if (zoom > 0)
zoom--;
}
mFingerSpacing = newDist;
params.setZoom(zoom);
try{
_camera.setParameters(params);
}
catch(RuntimeException e ) {
Log.e("RCTCameraViewFinder", "setParameters failed", e);
}
}
/**
* Handles setting focus to the location of the event.
*
* Note that this will override the focus mode on the camera to FOCUS_MODE_AUTO if available,
* even if this was previously something else (such as FOCUS_MODE_CONTINUOUS_*; see also
* {@link #startCamera()}. However, this makes sense - after the user has initiated any
* specific focus intent, we shouldn't be refocusing and overriding their request!
*/
public void handleFocus(MotionEvent event, Camera.Parameters params) {
List<String> supportedFocusModes = params.getSupportedFocusModes();
if (supportedFocusModes != null && supportedFocusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO)) {
// Ensure focus areas are enabled. If max num focus areas is 0, then focus area is not
// supported, so we cannot do anything here.
if (params.getMaxNumFocusAreas() == 0) {
return;
}
// Cancel any previous focus actions.
_camera.cancelAutoFocus();
// Compute focus area rect.
Camera.Area focusAreaFromMotionEvent;
try {
focusAreaFromMotionEvent = RCTCameraUtils.computeFocusAreaFromMotionEvent(event, _surfaceTextureWidth, _surfaceTextureHeight);
} catch (final RuntimeException e) {
e.printStackTrace();
return;
}
// Set focus mode to auto.
params.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
// Set focus area.
final ArrayList<Camera.Area> focusAreas = new ArrayList<Camera.Area>();
focusAreas.add(focusAreaFromMotionEvent);
params.setFocusAreas(focusAreas);
// Also set metering area if enabled. If max num metering areas is 0, then metering area
// is not supported. We can usually safely omit this anyway, though.
if (params.getMaxNumMeteringAreas() > 0) {
params.setMeteringAreas(focusAreas);
}
// Set parameters before starting auto-focus.
try{
_camera.setParameters(params);
}
catch(RuntimeException e ) {
Log.e("RCTCameraViewFinder", "setParameters failed", e);
}
// Start auto-focus now that focus area has been set. If successful, then can cancel
// it afterwards. Wrap in try-catch to avoid crashing on merely autoFocus fails.
try {
_camera.autoFocus(new Camera.AutoFocusCallback() {
@Override
public void onAutoFocus(boolean success, Camera camera) {
if (success) {
camera.cancelAutoFocus();
}
}
});
} catch (Exception e) {
e.printStackTrace();
}
}
}
/** Determine the space between the first two fingers */
private float getFingerSpacing(MotionEvent event) {
float x = event.getX(0) - event.getX(1);
float y = event.getY(0) - event.getY(1);
return (float) Math.sqrt(x * x + y * y);
}
}

View File

@ -0,0 +1,133 @@
package com.lwansbrough.RCTCamera;
import androidx.annotation.Nullable;
import com.facebook.react.bridge.ReadableArray;
import com.facebook.react.common.MapBuilder;
import com.facebook.react.uimanager.*;
import com.facebook.react.uimanager.annotations.ReactProp;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
public class RCTCameraViewManager extends ViewGroupManager<RCTCameraView> {
private static final String REACT_CLASS = "RCTCamera";
public static final int COMMAND_STOP_PREVIEW = 1;
public static final int COMMAND_START_PREVIEW = 2;
@Override
public String getName() {
return REACT_CLASS;
}
@Override
public RCTCameraView createViewInstance(ThemedReactContext context) {
return new RCTCameraView(context);
}
@Override
public Map<String, Integer> getCommandsMap() {
return MapBuilder.of(
"stopPreview",
COMMAND_STOP_PREVIEW,
"startPreview",
COMMAND_START_PREVIEW);
}
@Override
public void receiveCommand(RCTCameraView view, int commandType, @Nullable ReadableArray args) {
if (view == null) {
throw new AssertionError();
}
switch (commandType) {
case COMMAND_STOP_PREVIEW: {
view.stopPreview();
return;
}
case COMMAND_START_PREVIEW: {
view.startPreview();
return;
}
default:
throw new IllegalArgumentException(
String.format("Unsupported command %d received by %s.", commandType, getClass().getSimpleName()));
}
}
@ReactProp(name = "aspect")
public void setAspect(RCTCameraView view, int aspect) {
view.setAspect(aspect);
}
@ReactProp(name = "captureMode")
public void setCaptureMode(RCTCameraView view, final int captureMode) {
// Note that this in practice only performs any additional setup necessary for each mode;
// the actual indication to capture a still or record a video when capture() is called is
// still ultimately decided upon by what it in the options sent to capture().
view.setCaptureMode(captureMode);
}
@ReactProp(name = "captureTarget")
public void setCaptureTarget(RCTCameraView view, int captureTarget) {
// No reason to handle this props value here since it's passed again to the RCTCameraModule capture method
}
@ReactProp(name = "type")
public void setType(RCTCameraView view, int type) {
view.setCameraType(type);
}
@ReactProp(name = "captureQuality")
public void setCaptureQuality(RCTCameraView view, String captureQuality) {
view.setCaptureQuality(captureQuality);
}
@ReactProp(name = "torchMode")
public void setTorchMode(RCTCameraView view, int torchMode) {
view.setTorchMode(torchMode);
}
@ReactProp(name = "flashMode")
public void setFlashMode(RCTCameraView view, int flashMode) {
view.setFlashMode(flashMode);
}
@ReactProp(name = "zoom")
public void setZoom(RCTCameraView view, int zoom) {
view.setZoom(zoom);
}
@ReactProp(name = "orientation")
public void setOrientation(RCTCameraView view, int orientation) {
view.setOrientation(orientation);
}
@ReactProp(name = "captureAudio")
public void setCaptureAudio(RCTCameraView view, boolean captureAudio) {
// TODO - implement video mode
}
@ReactProp(name = "barcodeScannerEnabled")
public void setBarcodeScannerEnabled(RCTCameraView view, boolean barcodeScannerEnabled) {
view.setBarcodeScannerEnabled(barcodeScannerEnabled);
}
@ReactProp(name = "barCodeTypes")
public void setBarCodeTypes(RCTCameraView view, ReadableArray barCodeTypes) {
if (barCodeTypes == null) {
return;
}
List<String> result = new ArrayList<String>(barCodeTypes.size());
for (int i = 0; i < barCodeTypes.size(); i++) {
result.add(barCodeTypes.getString(i));
}
view.setBarCodeTypes(result);
}
@ReactProp(name = "clearWindowBackground")
public void setClearWindowBackground(RCTCameraView view, boolean clearWindowBackground) {
view.setClearWindowBackground(clearWindowBackground);
}
}

View File

@ -0,0 +1,85 @@
/**
* Created by rpopovici on 23/03/16.
*/
package com.lwansbrough.RCTCamera;
import android.content.Context;
import android.hardware.Sensor;
import android.hardware.SensorEvent;
import android.hardware.SensorEventListener;
import android.hardware.SensorManager;
import android.view.Surface;
import com.facebook.react.bridge.ReactApplicationContext;
interface RCTSensorOrientationListener {
void orientationEvent();
}
public class RCTSensorOrientationChecker {
int mOrientation = 0;
private SensorEventListener mSensorEventListener;
private SensorManager mSensorManager;
private RCTSensorOrientationListener mListener = null;
public RCTSensorOrientationChecker( ReactApplicationContext reactContext) {
mSensorEventListener = new Listener();
mSensorManager = (SensorManager) reactContext.getSystemService(Context.SENSOR_SERVICE);
}
/**
* Call on activity onResume()
*/
public void onResume() {
mSensorManager.registerListener(mSensorEventListener, mSensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER), SensorManager.SENSOR_DELAY_NORMAL);
}
/**
* Call on activity onPause()
*/
public void onPause() {
mSensorManager.unregisterListener(mSensorEventListener);
}
private class Listener implements SensorEventListener {
@Override
public void onSensorChanged(SensorEvent event) {
float x = event.values[0];
float y = event.values[1];
if (x<5 && x>-5 && y > 5)
mOrientation = Surface.ROTATION_0; // portrait
else if (x<-5 && y<5 && y>-5)
mOrientation = Surface.ROTATION_270; // right
else if (x<5 && x>-5 && y<-5)
mOrientation = Surface.ROTATION_180; // upside down
else if (x>5 && y<5 && y>-5)
mOrientation = Surface.ROTATION_90; // left
if (mListener != null) {
mListener.orientationEvent();
}
}
@Override
public void onAccuracyChanged(Sensor sensor, int accuracy) {
}
}
public int getOrientation() {
return mOrientation;
}
public void registerOrientationListener(RCTSensorOrientationListener listener) {
this.mListener = listener;
}
public void unregisterOrientationListener() {
mListener = null;
}
}

View File

@ -0,0 +1,425 @@
package org.reactnative.camera;
import android.Manifest;
import android.content.pm.PackageInfo;
import android.content.pm.PackageManager;
import android.util.Log;
import android.widget.Toast;
import com.facebook.react.bridge.*;
import com.facebook.react.common.build.ReactBuildConfig;
import com.facebook.react.uimanager.NativeViewHierarchyManager;
import com.facebook.react.uimanager.UIBlock;
import com.facebook.react.uimanager.UIManagerModule;
import com.google.android.cameraview.AspectRatio;
import com.google.zxing.BarcodeFormat;
import org.reactnative.barcodedetector.BarcodeFormatUtils;
import org.reactnative.camera.utils.ScopedContext;
import org.reactnative.facedetector.RNFaceDetector;
import com.google.android.cameraview.Size;
import javax.annotation.Nullable;
import java.io.File;
import java.util.Collections;
import java.util.Properties;
import java.util.List;
import java.util.Map;
import java.util.HashMap;
import java.util.Set;
import java.util.SortedSet;
public class CameraModule extends ReactContextBaseJavaModule {
private static final String TAG = "CameraModule";
private ScopedContext mScopedContext;
static final int VIDEO_2160P = 0;
static final int VIDEO_1080P = 1;
static final int VIDEO_720P = 2;
static final int VIDEO_480P = 3;
static final int VIDEO_4x3 = 4;
static final int GOOGLE_VISION_BARCODE_MODE_NORMAL = 0;
static final int GOOGLE_VISION_BARCODE_MODE_ALTERNATE = 1;
static final int GOOGLE_VISION_BARCODE_MODE_INVERTED = 2;
public static final Map<String, Object> VALID_BARCODE_TYPES =
Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("aztec", BarcodeFormat.AZTEC.toString());
put("ean13", BarcodeFormat.EAN_13.toString());
put("ean8", BarcodeFormat.EAN_8.toString());
put("qr", BarcodeFormat.QR_CODE.toString());
put("pdf417", BarcodeFormat.PDF_417.toString());
put("upc_e", BarcodeFormat.UPC_E.toString());
put("datamatrix", BarcodeFormat.DATA_MATRIX.toString());
put("code39", BarcodeFormat.CODE_39.toString());
put("code93", BarcodeFormat.CODE_93.toString());
put("interleaved2of5", BarcodeFormat.ITF.toString());
put("codabar", BarcodeFormat.CODABAR.toString());
put("code128", BarcodeFormat.CODE_128.toString());
put("maxicode", BarcodeFormat.MAXICODE.toString());
put("rss14", BarcodeFormat.RSS_14.toString());
put("rssexpanded", BarcodeFormat.RSS_EXPANDED.toString());
put("upc_a", BarcodeFormat.UPC_A.toString());
put("upc_ean", BarcodeFormat.UPC_EAN_EXTENSION.toString());
}
});
public CameraModule(ReactApplicationContext reactContext) {
super(reactContext);
mScopedContext = new ScopedContext(reactContext);
}
public ScopedContext getScopedContext() {
return mScopedContext;
}
@Override
public String getName() {
return "RNCameraModule";
}
@Nullable
@Override
public Map<String, Object> getConstants() {
return Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("Type", getTypeConstants());
put("FlashMode", getFlashModeConstants());
put("AutoFocus", getAutoFocusConstants());
put("WhiteBalance", getWhiteBalanceConstants());
put("VideoQuality", getVideoQualityConstants());
put("BarCodeType", getBarCodeConstants());
put("FaceDetection", Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("Mode", getFaceDetectionModeConstants());
put("Landmarks", getFaceDetectionLandmarksConstants());
put("Classifications", getFaceDetectionClassificationsConstants());
}
private Map<String, Object> getFaceDetectionModeConstants() {
return Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("fast", RNFaceDetector.FAST_MODE);
put("accurate", RNFaceDetector.ACCURATE_MODE);
}
});
}
private Map<String, Object> getFaceDetectionClassificationsConstants() {
return Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("all", RNFaceDetector.ALL_CLASSIFICATIONS);
put("none", RNFaceDetector.NO_CLASSIFICATIONS);
}
});
}
private Map<String, Object> getFaceDetectionLandmarksConstants() {
return Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("all", RNFaceDetector.ALL_LANDMARKS);
put("none", RNFaceDetector.NO_LANDMARKS);
}
});
}
}));
put("GoogleVisionBarcodeDetection", Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("BarcodeType", BarcodeFormatUtils.REVERSE_FORMATS);
put("BarcodeMode", getGoogleVisionBarcodeModeConstants());
}
}));
put("Orientation", Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("auto", Constants.ORIENTATION_AUTO);
put("portrait", Constants.ORIENTATION_UP);
put("portraitUpsideDown", Constants.ORIENTATION_DOWN);
put("landscapeLeft", Constants.ORIENTATION_LEFT);
put("landscapeRight", Constants.ORIENTATION_RIGHT);
}
}));
}
private Map<String, Object> getTypeConstants() {
return Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("front", Constants.FACING_FRONT);
put("back", Constants.FACING_BACK);
}
});
}
private Map<String, Object> getFlashModeConstants() {
return Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("off", Constants.FLASH_OFF);
put("on", Constants.FLASH_ON);
put("auto", Constants.FLASH_AUTO);
put("torch", Constants.FLASH_TORCH);
}
});
}
private Map<String, Object> getAutoFocusConstants() {
return Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("on", true);
put("off", false);
}
});
}
private Map<String, Object> getWhiteBalanceConstants() {
return Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("auto", Constants.WB_AUTO);
put("cloudy", Constants.WB_CLOUDY);
put("sunny", Constants.WB_SUNNY);
put("shadow", Constants.WB_SHADOW);
put("fluorescent", Constants.WB_FLUORESCENT);
put("incandescent", Constants.WB_INCANDESCENT);
}
});
}
private Map<String, Object> getVideoQualityConstants() {
return Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("2160p", VIDEO_2160P);
put("1080p", VIDEO_1080P);
put("720p", VIDEO_720P);
put("480p", VIDEO_480P);
put("4:3", VIDEO_4x3);
}
});
}
private Map<String, Object> getGoogleVisionBarcodeModeConstants() {
return Collections.unmodifiableMap(new HashMap<String, Object>() {
{
put("NORMAL", GOOGLE_VISION_BARCODE_MODE_NORMAL);
put("ALTERNATE", GOOGLE_VISION_BARCODE_MODE_ALTERNATE);
put("INVERTED", GOOGLE_VISION_BARCODE_MODE_INVERTED);
}
});
}
private Map<String, Object> getBarCodeConstants() {
return VALID_BARCODE_TYPES;
}
});
}
@ReactMethod
public void pausePreview(final int viewTag) {
final ReactApplicationContext context = getReactApplicationContext();
UIManagerModule uiManager = context.getNativeModule(UIManagerModule.class);
uiManager.addUIBlock(new UIBlock() {
@Override
public void execute(NativeViewHierarchyManager nativeViewHierarchyManager) {
final RNCameraView cameraView;
try {
cameraView = (RNCameraView) nativeViewHierarchyManager.resolveView(viewTag);
if (cameraView.isCameraOpened()) {
cameraView.pausePreview();
}
} catch (Exception e) {
e.printStackTrace();
}
}
});
}
@ReactMethod
public void resumePreview(final int viewTag) {
final ReactApplicationContext context = getReactApplicationContext();
UIManagerModule uiManager = context.getNativeModule(UIManagerModule.class);
uiManager.addUIBlock(new UIBlock() {
@Override
public void execute(NativeViewHierarchyManager nativeViewHierarchyManager) {
final RNCameraView cameraView;
try {
cameraView = (RNCameraView) nativeViewHierarchyManager.resolveView(viewTag);
if (cameraView.isCameraOpened()) {
cameraView.resumePreview();
}
} catch (Exception e) {
e.printStackTrace();
}
}
});
}
@ReactMethod
public void takePicture(final ReadableMap options, final int viewTag, final Promise promise) {
final ReactApplicationContext context = getReactApplicationContext();
final File cacheDirectory = mScopedContext.getCacheDirectory();
UIManagerModule uiManager = context.getNativeModule(UIManagerModule.class);
uiManager.addUIBlock(new UIBlock() {
@Override
public void execute(NativeViewHierarchyManager nativeViewHierarchyManager) {
RNCameraView cameraView = (RNCameraView) nativeViewHierarchyManager.resolveView(viewTag);
try {
if (cameraView.isCameraOpened()) {
cameraView.takePicture(options, promise, cacheDirectory);
} else {
promise.reject("E_CAMERA_UNAVAILABLE", "Camera is not running");
}
}
catch (Exception e) {
promise.reject("E_TAKE_PICTURE_FAILED", e.getMessage());
}
}
});
}
@ReactMethod
public void record(final ReadableMap options, final int viewTag, final Promise promise) {
final ReactApplicationContext context = getReactApplicationContext();
final File cacheDirectory = mScopedContext.getCacheDirectory();
UIManagerModule uiManager = context.getNativeModule(UIManagerModule.class);
uiManager.addUIBlock(new UIBlock() {
@Override
public void execute(NativeViewHierarchyManager nativeViewHierarchyManager) {
final RNCameraView cameraView;
try {
cameraView = (RNCameraView) nativeViewHierarchyManager.resolveView(viewTag);
if (cameraView.isCameraOpened()) {
cameraView.record(options, promise, cacheDirectory);
} else {
promise.reject("E_CAMERA_UNAVAILABLE", "Camera is not running");
}
} catch (Exception e) {
promise.reject("E_CAPTURE_FAILED", e.getMessage());
}
}
});
}
@ReactMethod
public void stopRecording(final int viewTag) {
final ReactApplicationContext context = getReactApplicationContext();
UIManagerModule uiManager = context.getNativeModule(UIManagerModule.class);
uiManager.addUIBlock(new UIBlock() {
@Override
public void execute(NativeViewHierarchyManager nativeViewHierarchyManager) {
final RNCameraView cameraView;
try {
cameraView = (RNCameraView) nativeViewHierarchyManager.resolveView(viewTag);
if (cameraView.isCameraOpened()) {
cameraView.stopRecording();
}
} catch (Exception e) {
e.printStackTrace();
}
}
});
}
@ReactMethod
public void getSupportedRatios(final int viewTag, final Promise promise) {
final ReactApplicationContext context = getReactApplicationContext();
UIManagerModule uiManager = context.getNativeModule(UIManagerModule.class);
uiManager.addUIBlock(new UIBlock() {
@Override
public void execute(NativeViewHierarchyManager nativeViewHierarchyManager) {
final RNCameraView cameraView;
try {
cameraView = (RNCameraView) nativeViewHierarchyManager.resolveView(viewTag);
WritableArray result = Arguments.createArray();
if (cameraView.isCameraOpened()) {
Set<AspectRatio> ratios = cameraView.getSupportedAspectRatios();
for (AspectRatio ratio : ratios) {
result.pushString(ratio.toString());
}
promise.resolve(result);
} else {
promise.reject("E_CAMERA_UNAVAILABLE", "Camera is not running");
}
} catch (Exception e) {
e.printStackTrace();
}
}
});
}
@ReactMethod
public void getCameraIds(final int viewTag, final Promise promise) {
final ReactApplicationContext context = getReactApplicationContext();
UIManagerModule uiManager = context.getNativeModule(UIManagerModule.class);
uiManager.addUIBlock(new UIBlock() {
@Override
public void execute(NativeViewHierarchyManager nativeViewHierarchyManager) {
final RNCameraView cameraView;
try {
cameraView = (RNCameraView) nativeViewHierarchyManager.resolveView(viewTag);
WritableArray result = Arguments.createArray();
List<Properties> ids = cameraView.getCameraIds();
for (Properties p : ids) {
WritableMap m = new WritableNativeMap();
m.putString("id", p.getProperty("id"));
m.putInt("type", Integer.valueOf(p.getProperty("type")));
result.pushMap(m);
}
promise.resolve(result);
} catch (Exception e) {
e.printStackTrace();
promise.reject("E_CAMERA_FAILED", e.getMessage());
}
}
});
}
@ReactMethod
public void getAvailablePictureSizes(final String ratio, final int viewTag, final Promise promise) {
final ReactApplicationContext context = getReactApplicationContext();
UIManagerModule uiManager = context.getNativeModule(UIManagerModule.class);
uiManager.addUIBlock(new UIBlock() {
@Override
public void execute(NativeViewHierarchyManager nativeViewHierarchyManager) {
final RNCameraView cameraView;
try {
cameraView = (RNCameraView) nativeViewHierarchyManager.resolveView(viewTag);
WritableArray result = Arguments.createArray();
if (cameraView.isCameraOpened()) {
SortedSet<Size> sizes = cameraView.getAvailablePictureSizes(AspectRatio.parse(ratio));
for (Size size : sizes) {
result.pushString(size.toString());
}
promise.resolve(result);
} else {
promise.reject("E_CAMERA_UNAVAILABLE", "Camera is not running");
}
} catch (Exception e) {
promise.reject("E_CAMERA_BAD_VIEWTAG", "getAvailablePictureSizesAsync: Expected a Camera component");
}
}
});
}
@ReactMethod
public void checkIfRecordAudioPermissionsAreDefined(final Promise promise) {
try {
PackageInfo info = getCurrentActivity().getPackageManager().getPackageInfo(getReactApplicationContext().getPackageName(), PackageManager.GET_PERMISSIONS);
if (info.requestedPermissions != null) {
for (String p : info.requestedPermissions) {
if (p.equals(Manifest.permission.RECORD_AUDIO)) {
promise.resolve(true);
return;
}
}
}
} catch (Exception e) {
e.printStackTrace();
}
promise.resolve(false);
}
}

View File

@ -0,0 +1,226 @@
package org.reactnative.camera;
import androidx.annotation.Nullable;
import com.facebook.react.bridge.ReadableArray;
import com.facebook.react.bridge.ReadableMap;
import com.facebook.react.common.MapBuilder;
import com.facebook.react.uimanager.ThemedReactContext;
import com.facebook.react.uimanager.ViewGroupManager;
import com.facebook.react.uimanager.annotations.ReactProp;
import com.google.android.cameraview.AspectRatio;
import com.google.android.cameraview.Size;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
public class CameraViewManager extends ViewGroupManager<RNCameraView> {
public enum Events {
EVENT_CAMERA_READY("onCameraReady"),
EVENT_PREVIEW_COLOR("onPreviewColor"),
EVENT_ON_MOUNT_ERROR("onMountError"),
EVENT_ON_BAR_CODE_READ("onBarCodeRead"),
EVENT_ON_FACES_DETECTED("onFacesDetected"),
EVENT_ON_BARCODES_DETECTED("onGoogleVisionBarcodesDetected"),
EVENT_ON_FACE_DETECTION_ERROR("onFaceDetectionError"),
EVENT_ON_BARCODE_DETECTION_ERROR("onGoogleVisionBarcodeDetectionError"),
EVENT_ON_TEXT_RECOGNIZED("onTextRecognized"),
EVENT_ON_PICTURE_TAKEN("onPictureTaken"),
EVENT_ON_PICTURE_SAVED("onPictureSaved"),
EVENT_ON_RECORDING_START("onRecordingStart"),
EVENT_ON_RECORDING_END("onRecordingEnd");
private final String mName;
Events(final String name) {
mName = name;
}
@Override
public String toString() {
return mName;
}
}
private static final String REACT_CLASS = "RNCamera";
@Override
public void onDropViewInstance(RNCameraView view) {
view.onHostDestroy();
super.onDropViewInstance(view);
}
@Override
public String getName() {
return REACT_CLASS;
}
@Override
protected RNCameraView createViewInstance(ThemedReactContext themedReactContext) {
return new RNCameraView(themedReactContext);
}
@Override
@Nullable
public Map<String, Object> getExportedCustomDirectEventTypeConstants() {
MapBuilder.Builder<String, Object> builder = MapBuilder.builder();
for (Events event : Events.values()) {
builder.put(event.toString(), MapBuilder.of("registrationName", event.toString()));
}
return builder.build();
}
@ReactProp(name = "type")
public void setType(RNCameraView view, int type) {
view.setFacing(type);
}
@ReactProp(name = "cameraId")
public void setCameraId(RNCameraView view, String id) {
view.setCameraId(id);
}
@ReactProp(name = "ratio")
public void setRatio(RNCameraView view, String ratio) {
view.setAspectRatio(AspectRatio.parse(ratio));
}
@ReactProp(name = "flashMode")
public void setFlashMode(RNCameraView view, int torchMode) {
view.setFlash(torchMode);
}
@ReactProp(name = "exposure")
public void setExposureCompensation(RNCameraView view, float exposure){
view.setExposureCompensation(exposure);
}
@ReactProp(name = "autoFocus")
public void setAutoFocus(RNCameraView view, boolean autoFocus) {
view.setAutoFocus(autoFocus);
}
@ReactProp(name = "focusDepth")
public void setFocusDepth(RNCameraView view, float depth) {
view.setFocusDepth(depth);
}
@ReactProp(name = "autoFocusPointOfInterest")
public void setAutoFocusPointOfInterest(RNCameraView view, ReadableMap coordinates) {
if(coordinates != null){
float x = (float) coordinates.getDouble("x");
float y = (float) coordinates.getDouble("y");
view.setAutoFocusPointOfInterest(x, y);
}
}
@ReactProp(name = "zoom")
public void setZoom(RNCameraView view, float zoom) {
view.setZoom(zoom);
}
@ReactProp(name = "whiteBalance")
public void setWhiteBalance(RNCameraView view, int whiteBalance) {
view.setWhiteBalance(whiteBalance);
}
@ReactProp(name = "pictureSize")
public void setPictureSize(RNCameraView view, String size) {
view.setPictureSize(size.equals("None") ? null : Size.parse(size));
}
@ReactProp(name = "playSoundOnCapture")
public void setPlaySoundOnCapture(RNCameraView view, boolean playSoundOnCapture) {
view.setPlaySoundOnCapture(playSoundOnCapture);
}
@ReactProp(name = "barCodeTypes")
public void setBarCodeTypes(RNCameraView view, ReadableArray barCodeTypes) {
if (barCodeTypes == null) {
return;
}
List<String> result = new ArrayList<>(barCodeTypes.size());
for (int i = 0; i < barCodeTypes.size(); i++) {
result.add(barCodeTypes.getString(i));
}
view.setBarCodeTypes(result);
}
@ReactProp(name = "barCodeScannerEnabled")
public void setBarCodeScanning(RNCameraView view, boolean barCodeScannerEnabled) {
view.setShouldScanBarCodes(barCodeScannerEnabled);
}
@ReactProp(name = "useCamera2Api")
public void setUseCamera2Api(RNCameraView view, boolean useCamera2Api) {
view.setUsingCamera2Api(useCamera2Api);
}
@ReactProp(name = "faceDetectorEnabled")
public void setFaceDetecting(RNCameraView view, boolean faceDetectorEnabled) {
view.setShouldDetectFaces(faceDetectorEnabled);
}
@ReactProp(name = "faceDetectionMode")
public void setFaceDetectionMode(RNCameraView view, int mode) {
view.setFaceDetectionMode(mode);
}
@ReactProp(name = "faceDetectionLandmarks")
public void setFaceDetectionLandmarks(RNCameraView view, int landmarks) {
view.setFaceDetectionLandmarks(landmarks);
}
@ReactProp(name = "faceDetectionClassifications")
public void setFaceDetectionClassifications(RNCameraView view, int classifications) {
view.setFaceDetectionClassifications(classifications);
}
@ReactProp(name = "trackingEnabled")
public void setTracking(RNCameraView view, boolean trackingEnabled) {
view.setTracking(trackingEnabled);
}
@ReactProp(name = "googleVisionBarcodeDetectorEnabled")
public void setGoogleVisionBarcodeDetecting(RNCameraView view, boolean googleBarcodeDetectorEnabled) {
view.setShouldGoogleDetectBarcodes(googleBarcodeDetectorEnabled);
}
@ReactProp(name = "googleVisionBarcodeType")
public void setGoogleVisionBarcodeType(RNCameraView view, int barcodeType) {
view.setGoogleVisionBarcodeType(barcodeType);
}
@ReactProp(name = "googleVisionBarcodeMode")
public void setGoogleVisionBarcodeMode(RNCameraView view, int barcodeMode) {
view.setGoogleVisionBarcodeMode(barcodeMode);
}
@ReactProp(name = "textRecognizerEnabled")
public void setTextRecognizing(RNCameraView view, boolean textRecognizerEnabled) {
view.setShouldRecognizeText(textRecognizerEnabled);
}
/**---limit scan area addition---**/
@ReactProp(name = "rectOfInterest")
public void setRectOfInterest(RNCameraView view, ReadableMap coordinates) {
if(coordinates != null){
float x = (float) coordinates.getDouble("x");
float y = (float) coordinates.getDouble("y");
float width = (float) coordinates.getDouble("width");
float height = (float) coordinates.getDouble("height");
view.setRectOfInterest(x, y, width, height);
}
}
@ReactProp(name = "cameraViewDimensions")
public void setCameraViewDimensions(RNCameraView view, ReadableMap dimensions) {
if(dimensions != null){
int cameraViewWidth = (int) dimensions.getDouble("width");
int cameraViewHeight = (int) dimensions.getDouble("height");
view.setCameraViewDimensions(cameraViewWidth, cameraViewHeight);
}
}
/**---limit scan area addition---**/
}

View File

@ -0,0 +1,48 @@
/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.reactnative.camera;
import com.google.android.cameraview.AspectRatio;
public interface Constants {
AspectRatio DEFAULT_ASPECT_RATIO = AspectRatio.of(4, 3);
int FACING_BACK = 0;
int FACING_FRONT = 1;
int FLASH_OFF = 0;
int FLASH_ON = 1;
int FLASH_TORCH = 2;
int FLASH_AUTO = 3;
int FLASH_RED_EYE = 4;
int LANDSCAPE_90 = 90;
int LANDSCAPE_270 = 270;
int WB_AUTO = 0;
int WB_CLOUDY = 1;
int WB_SUNNY = 2;
int WB_SHADOW = 3;
int WB_FLUORESCENT = 4;
int WB_INCANDESCENT = 5;
int ORIENTATION_AUTO = 0;
int ORIENTATION_UP = 1;
int ORIENTATION_DOWN = 2;
int ORIENTATION_LEFT = 3;
int ORIENTATION_RIGHT = 4;
}

View File

@ -0,0 +1,43 @@
package org.reactnative.camera;
import com.facebook.react.ReactPackage;
import com.facebook.react.bridge.JavaScriptModule;
import com.facebook.react.bridge.NativeModule;
import com.facebook.react.bridge.ReactApplicationContext;
import com.facebook.react.uimanager.ViewManager;
import com.lwansbrough.RCTCamera.RCTCameraModule;
import com.lwansbrough.RCTCamera.RCTCameraViewManager;
import org.reactnative.facedetector.FaceDetectorModule;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
/**
* Created by jgfidelis on 02/02/18.
*/
public class RNCameraPackage implements ReactPackage {
@Override
public List<NativeModule> createNativeModules(ReactApplicationContext reactApplicationContext) {
return Arrays.<NativeModule>asList(
new RCTCameraModule(reactApplicationContext),
new CameraModule(reactApplicationContext),
new FaceDetectorModule(reactApplicationContext)
);
}
// Deprecated in RN 0.47
public List<Class<? extends JavaScriptModule>> createJSModules() {
return Collections.emptyList();
}
@Override
public List<ViewManager> createViewManagers(ReactApplicationContext reactApplicationContext) {
return Arrays.<ViewManager>asList(
new RCTCameraViewManager(),
new CameraViewManager()
);
}
}

View File

@ -0,0 +1,614 @@
package org.reactnative.camera;
import android.Manifest;
import android.annotation.SuppressLint;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Color;
import android.graphics.Rect;
import android.graphics.YuvImage;
import android.media.CamcorderProfile;
import android.os.Build;
import androidx.core.content.ContextCompat;
import android.util.Log;
import android.view.View;
import android.os.AsyncTask;
import com.facebook.react.bridge.*;
import com.facebook.react.uimanager.ThemedReactContext;
import com.google.android.cameraview.CameraView;
import com.google.zxing.BarcodeFormat;
import com.google.zxing.DecodeHintType;
import com.google.zxing.MultiFormatReader;
import com.google.zxing.Result;
import org.reactnative.barcodedetector.RNBarcodeDetector;
import org.reactnative.camera.tasks.*;
import org.reactnative.camera.utils.RNFileUtils;
import org.reactnative.facedetector.RNFaceDetector;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentLinkedQueue;
public class RNCameraView extends CameraView implements LifecycleEventListener, BarCodeScannerAsyncTaskDelegate, FaceDetectorAsyncTaskDelegate,
BarcodeDetectorAsyncTaskDelegate, TextRecognizerAsyncTaskDelegate, PictureSavedDelegate {
private ThemedReactContext mThemedReactContext;
private Queue<Promise> mPictureTakenPromises = new ConcurrentLinkedQueue<>();
private Map<Promise, ReadableMap> mPictureTakenOptions = new ConcurrentHashMap<>();
private Map<Promise, File> mPictureTakenDirectories = new ConcurrentHashMap<>();
private Promise mVideoRecordedPromise;
private List<String> mBarCodeTypes = null;
private boolean mIsPaused = false;
private boolean mIsNew = true;
private boolean invertImageData = false;
private Boolean mIsRecording = false;
private Boolean mIsRecordingInterrupted = false;
// Concurrency lock for scanners to avoid flooding the runtime
public volatile boolean barCodeScannerTaskLock = false;
public volatile boolean faceDetectorTaskLock = false;
public volatile boolean googleBarcodeDetectorTaskLock = false;
public volatile boolean textRecognizerTaskLock = false;
// Scanning-related properties
private MultiFormatReader mMultiFormatReader;
private RNFaceDetector mFaceDetector;
private RNBarcodeDetector mGoogleBarcodeDetector;
private boolean mShouldDetectFaces = false;
private boolean mShouldGoogleDetectBarcodes = false;
private boolean mShouldScanBarCodes = false;
private boolean mShouldRecognizeText = false;
private int mFaceDetectorMode = RNFaceDetector.FAST_MODE;
private int mFaceDetectionLandmarks = RNFaceDetector.NO_LANDMARKS;
private int mFaceDetectionClassifications = RNFaceDetector.NO_CLASSIFICATIONS;
private int mGoogleVisionBarCodeType = RNBarcodeDetector.ALL_FORMATS;
private int mGoogleVisionBarCodeMode = RNBarcodeDetector.NORMAL_MODE;
private boolean mTrackingEnabled = true;
private int mPaddingX;
private int mPaddingY;
// Limit Android Scan Area
private boolean mLimitScanArea = false;
private float mScanAreaX = 0.0f;
private float mScanAreaY = 0.0f;
private float mScanAreaWidth = 0.0f;
private float mScanAreaHeight = 0.0f;
private int mCameraViewWidth = 0;
private int mCameraViewHeight = 0;
public RNCameraView(ThemedReactContext themedReactContext) {
super(themedReactContext, true);
mThemedReactContext = themedReactContext;
themedReactContext.addLifecycleEventListener(this);
addCallback(new Callback() {
@Override
public void onCameraOpened(CameraView cameraView) {
RNCameraViewHelper.emitCameraReadyEvent(cameraView);
}
@Override
public void onMountError(CameraView cameraView) {
RNCameraViewHelper.emitMountErrorEvent(cameraView, "Camera view threw an error - component could not be rendered.");
}
@Override
public void onPictureTaken(CameraView cameraView, final byte[] data, int deviceOrientation) {
Promise promise = mPictureTakenPromises.poll();
ReadableMap options = mPictureTakenOptions.remove(promise);
if (options.hasKey("fastMode") && options.getBoolean("fastMode")) {
promise.resolve(null);
}
final File cacheDirectory = mPictureTakenDirectories.remove(promise);
if(Build.VERSION.SDK_INT >= 11/*HONEYCOMB*/) {
new ResolveTakenPictureAsyncTask(data, promise, options, cacheDirectory, deviceOrientation, RNCameraView.this)
.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);
} else {
new ResolveTakenPictureAsyncTask(data, promise, options, cacheDirectory, deviceOrientation, RNCameraView.this)
.execute();
}
RNCameraViewHelper.emitPictureTakenEvent(cameraView);
}
@Override
public void onRecordingStart(CameraView cameraView, String path, int videoOrientation, int deviceOrientation) {
WritableMap result = Arguments.createMap();
result.putInt("videoOrientation", videoOrientation);
result.putInt("deviceOrientation", deviceOrientation);
result.putString("uri", RNFileUtils.uriFromFile(new File(path)).toString());
RNCameraViewHelper.emitRecordingStartEvent(cameraView, result);
}
@Override
public void onRecordingEnd(CameraView cameraView) {
RNCameraViewHelper.emitRecordingEndEvent(cameraView);
}
@Override
public void onVideoRecorded(CameraView cameraView, String path, int videoOrientation, int deviceOrientation) {
if (mVideoRecordedPromise != null) {
if (path != null) {
WritableMap result = Arguments.createMap();
result.putBoolean("isRecordingInterrupted", mIsRecordingInterrupted);
result.putInt("videoOrientation", videoOrientation);
result.putInt("deviceOrientation", deviceOrientation);
result.putString("uri", RNFileUtils.uriFromFile(new File(path)).toString());
mVideoRecordedPromise.resolve(result);
} else {
mVideoRecordedPromise.reject("E_RECORDING", "Couldn't stop recording - there is none in progress");
}
mIsRecording = false;
mIsRecordingInterrupted = false;
mVideoRecordedPromise = null;
}
}
@Override
public void onFramePreview(CameraView cameraView, byte[] data, int width, int height, int rotation) {
int correctRotation = RNCameraViewHelper.getCorrectCameraRotation(rotation, getFacing(), getCameraOrientation());
boolean willCallBarCodeTask = mShouldScanBarCodes && !barCodeScannerTaskLock && cameraView instanceof BarCodeScannerAsyncTaskDelegate;
boolean willCallFaceTask = mShouldDetectFaces && !faceDetectorTaskLock && cameraView instanceof FaceDetectorAsyncTaskDelegate;
boolean willCallGoogleBarcodeTask = mShouldGoogleDetectBarcodes && !googleBarcodeDetectorTaskLock && cameraView instanceof BarcodeDetectorAsyncTaskDelegate;
boolean willCallTextTask = mShouldRecognizeText && !textRecognizerTaskLock && cameraView instanceof TextRecognizerAsyncTaskDelegate;
boolean willCallPreviewColor = true;
if (!willCallBarCodeTask && !willCallFaceTask && !willCallGoogleBarcodeTask && !willCallTextTask && !willCallPreviewColor) {
return;
}
if (data.length < (1.5 * width * height)) {
return;
}
if(willCallPreviewColor){
YuvImage yuvImage = new YuvImage(data, 17, width, height, null);
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
yuvImage.compressToJpeg(new Rect(0, 0, width, height), 80, byteArrayOutputStream);
byte[] byteArray = byteArrayOutputStream.toByteArray();
Bitmap decodeByteArray = BitmapFactory.decodeByteArray(byteArray, 0, byteArray.length);
int w = decodeByteArray.getWidth() / 2;
int h = decodeByteArray.getHeight() / 2;
int count = 0;
int r = 0;
int g = 0;
int b = 0;
for(int i = w -1; i <= w + 1; i++){
for(int j = h - 1; j <= h +1; j++){
count ++;
int c = decodeByteArray.getPixel(i, j);
r += (c >> 16) & 0xff;
g += (c >> 8) & 0xff;
b += c & 0xff;
}
}
r = r/count;
g = g/count;
b = b/count;
RNCameraViewHelper.emitPreviewColorEvent(cameraView, (r<<16) + (g << 8) + b, r, g, b);
}
if (willCallBarCodeTask) {
barCodeScannerTaskLock = true;
BarCodeScannerAsyncTaskDelegate delegate = (BarCodeScannerAsyncTaskDelegate) cameraView;
new BarCodeScannerAsyncTask(delegate, mMultiFormatReader, data, width, height, mLimitScanArea, mScanAreaX, mScanAreaY, mScanAreaWidth, mScanAreaHeight, mCameraViewWidth, mCameraViewHeight, getAspectRatio().toFloat()).execute();
}
if (willCallFaceTask) {
faceDetectorTaskLock = true;
FaceDetectorAsyncTaskDelegate delegate = (FaceDetectorAsyncTaskDelegate) cameraView;
new FaceDetectorAsyncTask(delegate, mFaceDetector, data, width, height, correctRotation, getResources().getDisplayMetrics().density, getFacing(), getWidth(), getHeight(), mPaddingX, mPaddingY).execute();
}
if (willCallGoogleBarcodeTask) {
googleBarcodeDetectorTaskLock = true;
if (mGoogleVisionBarCodeMode == RNBarcodeDetector.NORMAL_MODE) {
invertImageData = false;
} else if (mGoogleVisionBarCodeMode == RNBarcodeDetector.ALTERNATE_MODE) {
invertImageData = !invertImageData;
} else if (mGoogleVisionBarCodeMode == RNBarcodeDetector.INVERTED_MODE) {
invertImageData = true;
}
if (invertImageData) {
for (int y = 0; y < data.length; y++) {
data[y] = (byte) ~data[y];
}
}
BarcodeDetectorAsyncTaskDelegate delegate = (BarcodeDetectorAsyncTaskDelegate) cameraView;
new BarcodeDetectorAsyncTask(delegate, mGoogleBarcodeDetector, data, width, height, correctRotation, getResources().getDisplayMetrics().density, getFacing(), getWidth(), getHeight(), mPaddingX, mPaddingY).execute();
}
if (willCallTextTask) {
textRecognizerTaskLock = true;
TextRecognizerAsyncTaskDelegate delegate = (TextRecognizerAsyncTaskDelegate) cameraView;
new TextRecognizerAsyncTask(delegate, mThemedReactContext, data, width, height, correctRotation, getResources().getDisplayMetrics().density, getFacing(), getWidth(), getHeight(), mPaddingX, mPaddingY).execute();
}
}
});
}
@Override
protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
View preview = getView();
if (null == preview) {
return;
}
float width = right - left;
float height = bottom - top;
float ratio = getAspectRatio().toFloat();
int orientation = getResources().getConfiguration().orientation;
int correctHeight;
int correctWidth;
this.setBackgroundColor(Color.BLACK);
if (orientation == android.content.res.Configuration.ORIENTATION_LANDSCAPE) {
if (ratio * height < width) {
correctHeight = (int) (width / ratio);
correctWidth = (int) width;
} else {
correctWidth = (int) (height * ratio);
correctHeight = (int) height;
}
} else {
if (ratio * width > height) {
correctHeight = (int) (width * ratio);
correctWidth = (int) width;
} else {
correctWidth = (int) (height / ratio);
correctHeight = (int) height;
}
}
int paddingX = (int) ((width - correctWidth) / 2);
int paddingY = (int) ((height - correctHeight) / 2);
mPaddingX = paddingX;
mPaddingY = paddingY;
preview.layout(paddingX, paddingY, correctWidth + paddingX, correctHeight + paddingY);
}
@SuppressLint("all")
@Override
public void requestLayout() {
// React handles this for us, so we don't need to call super.requestLayout();
}
public void setBarCodeTypes(List<String> barCodeTypes) {
mBarCodeTypes = barCodeTypes;
initBarcodeReader();
}
public void takePicture(final ReadableMap options, final Promise promise, final File cacheDirectory) {
mBgHandler.post(new Runnable() {
@Override
public void run() {
mPictureTakenPromises.add(promise);
mPictureTakenOptions.put(promise, options);
mPictureTakenDirectories.put(promise, cacheDirectory);
try {
RNCameraView.super.takePicture(options);
} catch (Exception e) {
mPictureTakenPromises.remove(promise);
mPictureTakenOptions.remove(promise);
mPictureTakenDirectories.remove(promise);
promise.reject("E_TAKE_PICTURE_FAILED", e.getMessage());
}
}
});
}
@Override
public void onPictureSaved(WritableMap response) {
RNCameraViewHelper.emitPictureSavedEvent(this, response);
}
public void record(final ReadableMap options, final Promise promise, final File cacheDirectory) {
mBgHandler.post(new Runnable() {
@Override
public void run() {
try {
String path = options.hasKey("path") ? options.getString("path") : RNFileUtils.getOutputFilePath(cacheDirectory, ".mp4");
int maxDuration = options.hasKey("maxDuration") ? options.getInt("maxDuration") : -1;
int maxFileSize = options.hasKey("maxFileSize") ? options.getInt("maxFileSize") : -1;
CamcorderProfile profile = CamcorderProfile.get(CamcorderProfile.QUALITY_HIGH);
if (options.hasKey("quality")) {
profile = RNCameraViewHelper.getCamcorderProfile(options.getInt("quality"));
}
if (options.hasKey("videoBitrate")) {
profile.videoBitRate = options.getInt("videoBitrate");
}
boolean recordAudio = true;
if (options.hasKey("mute")) {
recordAudio = !options.getBoolean("mute");
}
int orientation = Constants.ORIENTATION_AUTO;
if (options.hasKey("orientation")) {
orientation = options.getInt("orientation");
}
if (RNCameraView.super.record(path, maxDuration * 1000, maxFileSize, recordAudio, profile, orientation)) {
mIsRecording = true;
mVideoRecordedPromise = promise;
} else {
promise.reject("E_RECORDING_FAILED", "Starting video recording failed. Another recording might be in progress.");
}
} catch (IOException e) {
promise.reject("E_RECORDING_FAILED", "Starting video recording failed - could not create video file.");
}
}
});
}
/**
* Initialize the barcode decoder.
* Supports all iOS codes except [code138, code39mod43, itf14]
* Additionally supports [codabar, code128, maxicode, rss14, rssexpanded, upc_a, upc_ean]
*/
private void initBarcodeReader() {
mMultiFormatReader = new MultiFormatReader();
EnumMap<DecodeHintType, Object> hints = new EnumMap<>(DecodeHintType.class);
EnumSet<BarcodeFormat> decodeFormats = EnumSet.noneOf(BarcodeFormat.class);
if (mBarCodeTypes != null) {
for (String code : mBarCodeTypes) {
String formatString = (String) CameraModule.VALID_BARCODE_TYPES.get(code);
if (formatString != null) {
decodeFormats.add(BarcodeFormat.valueOf(formatString));
}
}
}
hints.put(DecodeHintType.POSSIBLE_FORMATS, decodeFormats);
mMultiFormatReader.setHints(hints);
}
public void setShouldScanBarCodes(boolean shouldScanBarCodes) {
if (shouldScanBarCodes && mMultiFormatReader == null) {
initBarcodeReader();
}
this.mShouldScanBarCodes = shouldScanBarCodes;
setScanning(mShouldDetectFaces || mShouldGoogleDetectBarcodes || mShouldScanBarCodes || mShouldRecognizeText);
}
public void onBarCodeRead(Result barCode, int width, int height) {
String barCodeType = barCode.getBarcodeFormat().toString();
if (!mShouldScanBarCodes || !mBarCodeTypes.contains(barCodeType)) {
return;
}
RNCameraViewHelper.emitBarCodeReadEvent(this, barCode, width, height);
}
public void onBarCodeScanningTaskCompleted() {
barCodeScannerTaskLock = false;
if(mMultiFormatReader != null) {
mMultiFormatReader.reset();
}
}
// Limit Scan Area
public void setRectOfInterest(float x, float y, float width, float height) {
this.mLimitScanArea = true;
this.mScanAreaX = x;
this.mScanAreaY = y;
this.mScanAreaWidth = width;
this.mScanAreaHeight = height;
}
public void setCameraViewDimensions(int width, int height) {
this.mCameraViewWidth = width;
this.mCameraViewHeight = height;
}
/**
* Initial setup of the face detector
*/
private void setupFaceDetector() {
mFaceDetector = new RNFaceDetector(mThemedReactContext);
mFaceDetector.setMode(mFaceDetectorMode);
mFaceDetector.setLandmarkType(mFaceDetectionLandmarks);
mFaceDetector.setClassificationType(mFaceDetectionClassifications);
mFaceDetector.setTracking(mTrackingEnabled);
}
public void setFaceDetectionLandmarks(int landmarks) {
mFaceDetectionLandmarks = landmarks;
if (mFaceDetector != null) {
mFaceDetector.setLandmarkType(landmarks);
}
}
public void setFaceDetectionClassifications(int classifications) {
mFaceDetectionClassifications = classifications;
if (mFaceDetector != null) {
mFaceDetector.setClassificationType(classifications);
}
}
public void setFaceDetectionMode(int mode) {
mFaceDetectorMode = mode;
if (mFaceDetector != null) {
mFaceDetector.setMode(mode);
}
}
public void setTracking(boolean trackingEnabled) {
mTrackingEnabled = trackingEnabled;
if (mFaceDetector != null) {
mFaceDetector.setTracking(trackingEnabled);
}
}
public void setShouldDetectFaces(boolean shouldDetectFaces) {
if (shouldDetectFaces && mFaceDetector == null) {
setupFaceDetector();
}
this.mShouldDetectFaces = shouldDetectFaces;
setScanning(mShouldDetectFaces || mShouldGoogleDetectBarcodes || mShouldScanBarCodes || mShouldRecognizeText);
}
public void onFacesDetected(WritableArray data) {
if (!mShouldDetectFaces) {
return;
}
RNCameraViewHelper.emitFacesDetectedEvent(this, data);
}
public void onFaceDetectionError(RNFaceDetector faceDetector) {
if (!mShouldDetectFaces) {
return;
}
RNCameraViewHelper.emitFaceDetectionErrorEvent(this, faceDetector);
}
@Override
public void onFaceDetectingTaskCompleted() {
faceDetectorTaskLock = false;
}
/**
* Initial setup of the barcode detector
*/
private void setupBarcodeDetector() {
mGoogleBarcodeDetector = new RNBarcodeDetector(mThemedReactContext);
mGoogleBarcodeDetector.setBarcodeType(mGoogleVisionBarCodeType);
}
public void setShouldGoogleDetectBarcodes(boolean shouldDetectBarcodes) {
if (shouldDetectBarcodes && mGoogleBarcodeDetector == null) {
setupBarcodeDetector();
}
this.mShouldGoogleDetectBarcodes = shouldDetectBarcodes;
setScanning(mShouldDetectFaces || mShouldGoogleDetectBarcodes || mShouldScanBarCodes || mShouldRecognizeText);
}
public void setGoogleVisionBarcodeType(int barcodeType) {
mGoogleVisionBarCodeType = barcodeType;
if (mGoogleBarcodeDetector != null) {
mGoogleBarcodeDetector.setBarcodeType(barcodeType);
}
}
public void setGoogleVisionBarcodeMode(int barcodeMode) {
mGoogleVisionBarCodeMode = barcodeMode;
}
public void onBarcodesDetected(WritableArray barcodesDetected) {
if (!mShouldGoogleDetectBarcodes) {
return;
}
RNCameraViewHelper.emitBarcodesDetectedEvent(this, barcodesDetected);
}
public void onBarcodeDetectionError(RNBarcodeDetector barcodeDetector) {
if (!mShouldGoogleDetectBarcodes) {
return;
}
RNCameraViewHelper.emitBarcodeDetectionErrorEvent(this, barcodeDetector);
}
@Override
public void onBarcodeDetectingTaskCompleted() {
googleBarcodeDetectorTaskLock = false;
}
/**
*
* Text recognition
*/
public void setShouldRecognizeText(boolean shouldRecognizeText) {
this.mShouldRecognizeText = shouldRecognizeText;
setScanning(mShouldDetectFaces || mShouldGoogleDetectBarcodes || mShouldScanBarCodes || mShouldRecognizeText);
}
public void onTextRecognized(WritableArray serializedData) {
if (!mShouldRecognizeText) {
return;
}
RNCameraViewHelper.emitTextRecognizedEvent(this, serializedData);
}
@Override
public void onTextRecognizerTaskCompleted() {
textRecognizerTaskLock = false;
}
/**
*
* End Text Recognition */
@Override
public void onHostResume() {
if (hasCameraPermissions()) {
mBgHandler.post(new Runnable() {
@Override
public void run() {
if ((mIsPaused && !isCameraOpened()) || mIsNew) {
mIsPaused = false;
mIsNew = false;
start();
}
}
});
} else {
RNCameraViewHelper.emitMountErrorEvent(this, "Camera permissions not granted - component could not be rendered.");
}
}
@Override
public void onHostPause() {
if (mIsRecording) {
mIsRecordingInterrupted = true;
}
if (!mIsPaused && isCameraOpened()) {
mIsPaused = true;
stop();
}
}
@Override
public void onHostDestroy() {
if (mFaceDetector != null) {
mFaceDetector.release();
}
if (mGoogleBarcodeDetector != null) {
mGoogleBarcodeDetector.release();
}
mMultiFormatReader = null;
mThemedReactContext.removeLifecycleEventListener(this);
// camera release can be quite expensive. Run in on bg handler
// and cleanup last once everything has finished
mBgHandler.post(new Runnable() {
@Override
public void run() {
stop();
cleanup();
}
});
}
private boolean hasCameraPermissions() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
int result = ContextCompat.checkSelfPermission(getContext(), Manifest.permission.CAMERA);
return result == PackageManager.PERMISSION_GRANTED;
} else {
return true;
}
}
}

View File

@ -0,0 +1,477 @@
package org.reactnative.camera;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.media.CamcorderProfile;
import android.os.Build;
import androidx.exifinterface.media.ExifInterface;
import android.util.Log;
import android.view.ViewGroup;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.ReactContext;
import com.facebook.react.bridge.ReadableMapKeySetIterator;
import com.facebook.react.bridge.ReadableMap;
import com.facebook.react.bridge.WritableMap;
import com.facebook.react.bridge.WritableArray;
import com.facebook.react.uimanager.UIManagerModule;
import com.google.android.cameraview.CameraView;
import com.google.zxing.Result;
import org.reactnative.camera.events.*;
import org.reactnative.barcodedetector.RNBarcodeDetector;
import org.reactnative.facedetector.RNFaceDetector;
import java.text.SimpleDateFormat;
import java.util.Calendar;
public class RNCameraViewHelper {
public static final String[][] exifTags = new String[][]{
{"string", ExifInterface.TAG_ARTIST},
{"int", ExifInterface.TAG_BITS_PER_SAMPLE},
{"int", ExifInterface.TAG_COMPRESSION},
{"string", ExifInterface.TAG_COPYRIGHT},
{"string", ExifInterface.TAG_DATETIME},
{"string", ExifInterface.TAG_IMAGE_DESCRIPTION},
{"int", ExifInterface.TAG_IMAGE_LENGTH},
{"int", ExifInterface.TAG_IMAGE_WIDTH},
{"int", ExifInterface.TAG_JPEG_INTERCHANGE_FORMAT},
{"int", ExifInterface.TAG_JPEG_INTERCHANGE_FORMAT_LENGTH},
{"string", ExifInterface.TAG_MAKE},
{"string", ExifInterface.TAG_MODEL},
{"int", ExifInterface.TAG_ORIENTATION},
{"int", ExifInterface.TAG_PHOTOMETRIC_INTERPRETATION},
{"int", ExifInterface.TAG_PLANAR_CONFIGURATION},
{"double", ExifInterface.TAG_PRIMARY_CHROMATICITIES},
{"double", ExifInterface.TAG_REFERENCE_BLACK_WHITE},
{"int", ExifInterface.TAG_RESOLUTION_UNIT},
{"int", ExifInterface.TAG_ROWS_PER_STRIP},
{"int", ExifInterface.TAG_SAMPLES_PER_PIXEL},
{"string", ExifInterface.TAG_SOFTWARE},
{"int", ExifInterface.TAG_STRIP_BYTE_COUNTS},
{"int", ExifInterface.TAG_STRIP_OFFSETS},
{"int", ExifInterface.TAG_TRANSFER_FUNCTION},
{"double", ExifInterface.TAG_WHITE_POINT},
{"double", ExifInterface.TAG_X_RESOLUTION},
{"double", ExifInterface.TAG_Y_CB_CR_COEFFICIENTS},
{"int", ExifInterface.TAG_Y_CB_CR_POSITIONING},
{"int", ExifInterface.TAG_Y_CB_CR_SUB_SAMPLING},
{"double", ExifInterface.TAG_Y_RESOLUTION},
{"double", ExifInterface.TAG_APERTURE_VALUE},
{"double", ExifInterface.TAG_BRIGHTNESS_VALUE},
{"string", ExifInterface.TAG_CFA_PATTERN},
{"int", ExifInterface.TAG_COLOR_SPACE},
{"string", ExifInterface.TAG_COMPONENTS_CONFIGURATION},
{"double", ExifInterface.TAG_COMPRESSED_BITS_PER_PIXEL},
{"int", ExifInterface.TAG_CONTRAST},
{"int", ExifInterface.TAG_CUSTOM_RENDERED},
{"string", ExifInterface.TAG_DATETIME_DIGITIZED},
{"string", ExifInterface.TAG_DATETIME_ORIGINAL},
{"string", ExifInterface.TAG_DEVICE_SETTING_DESCRIPTION},
{"double", ExifInterface.TAG_DIGITAL_ZOOM_RATIO},
{"string", ExifInterface.TAG_EXIF_VERSION},
{"double", ExifInterface.TAG_EXPOSURE_BIAS_VALUE},
{"double", ExifInterface.TAG_EXPOSURE_INDEX},
{"int", ExifInterface.TAG_EXPOSURE_MODE},
{"int", ExifInterface.TAG_EXPOSURE_PROGRAM},
{"double", ExifInterface.TAG_EXPOSURE_TIME},
{"double", ExifInterface.TAG_F_NUMBER},
{"string", ExifInterface.TAG_FILE_SOURCE},
{"int", ExifInterface.TAG_FLASH},
{"double", ExifInterface.TAG_FLASH_ENERGY},
{"string", ExifInterface.TAG_FLASHPIX_VERSION},
{"double", ExifInterface.TAG_FOCAL_LENGTH},
{"int", ExifInterface.TAG_FOCAL_LENGTH_IN_35MM_FILM},
{"int", ExifInterface.TAG_FOCAL_PLANE_RESOLUTION_UNIT},
{"double", ExifInterface.TAG_FOCAL_PLANE_X_RESOLUTION},
{"double", ExifInterface.TAG_FOCAL_PLANE_Y_RESOLUTION},
{"int", ExifInterface.TAG_GAIN_CONTROL},
{"int", ExifInterface.TAG_ISO_SPEED_RATINGS},
{"string", ExifInterface.TAG_IMAGE_UNIQUE_ID},
{"int", ExifInterface.TAG_LIGHT_SOURCE},
{"string", ExifInterface.TAG_MAKER_NOTE},
{"double", ExifInterface.TAG_MAX_APERTURE_VALUE},
{"int", ExifInterface.TAG_METERING_MODE},
{"int", ExifInterface.TAG_NEW_SUBFILE_TYPE},
{"string", ExifInterface.TAG_OECF},
{"int", ExifInterface.TAG_PIXEL_X_DIMENSION},
{"int", ExifInterface.TAG_PIXEL_Y_DIMENSION},
{"string", ExifInterface.TAG_RELATED_SOUND_FILE},
{"int", ExifInterface.TAG_SATURATION},
{"int", ExifInterface.TAG_SCENE_CAPTURE_TYPE},
{"string", ExifInterface.TAG_SCENE_TYPE},
{"int", ExifInterface.TAG_SENSING_METHOD},
{"int", ExifInterface.TAG_SHARPNESS},
{"double", ExifInterface.TAG_SHUTTER_SPEED_VALUE},
{"string", ExifInterface.TAG_SPATIAL_FREQUENCY_RESPONSE},
{"string", ExifInterface.TAG_SPECTRAL_SENSITIVITY},
{"int", ExifInterface.TAG_SUBFILE_TYPE},
{"string", ExifInterface.TAG_SUBSEC_TIME},
{"string", ExifInterface.TAG_SUBSEC_TIME_DIGITIZED},
{"string", ExifInterface.TAG_SUBSEC_TIME_ORIGINAL},
{"int", ExifInterface.TAG_SUBJECT_AREA},
{"double", ExifInterface.TAG_SUBJECT_DISTANCE},
{"int", ExifInterface.TAG_SUBJECT_DISTANCE_RANGE},
{"int", ExifInterface.TAG_SUBJECT_LOCATION},
{"string", ExifInterface.TAG_USER_COMMENT},
{"int", ExifInterface.TAG_WHITE_BALANCE},
{"int", ExifInterface.TAG_GPS_ALTITUDE_REF},
{"string", ExifInterface.TAG_GPS_AREA_INFORMATION},
{"double", ExifInterface.TAG_GPS_DOP},
{"string", ExifInterface.TAG_GPS_DATESTAMP},
{"double", ExifInterface.TAG_GPS_DEST_BEARING},
{"string", ExifInterface.TAG_GPS_DEST_BEARING_REF},
{"double", ExifInterface.TAG_GPS_DEST_DISTANCE},
{"string", ExifInterface.TAG_GPS_DEST_DISTANCE_REF},
{"double", ExifInterface.TAG_GPS_DEST_LATITUDE},
{"string", ExifInterface.TAG_GPS_DEST_LATITUDE_REF},
{"double", ExifInterface.TAG_GPS_DEST_LONGITUDE},
{"string", ExifInterface.TAG_GPS_DEST_LONGITUDE_REF},
{"int", ExifInterface.TAG_GPS_DIFFERENTIAL},
{"double", ExifInterface.TAG_GPS_IMG_DIRECTION},
{"string", ExifInterface.TAG_GPS_IMG_DIRECTION_REF},
{"string", ExifInterface.TAG_GPS_LATITUDE_REF},
{"string", ExifInterface.TAG_GPS_LONGITUDE_REF},
{"string", ExifInterface.TAG_GPS_MAP_DATUM},
{"string", ExifInterface.TAG_GPS_MEASURE_MODE},
{"string", ExifInterface.TAG_GPS_PROCESSING_METHOD},
{"string", ExifInterface.TAG_GPS_SATELLITES},
{"double", ExifInterface.TAG_GPS_SPEED},
{"string", ExifInterface.TAG_GPS_SPEED_REF},
{"string", ExifInterface.TAG_GPS_STATUS},
{"string", ExifInterface.TAG_GPS_TIMESTAMP},
{"double", ExifInterface.TAG_GPS_TRACK},
{"string", ExifInterface.TAG_GPS_TRACK_REF},
{"string", ExifInterface.TAG_GPS_VERSION_ID},
{"string", ExifInterface.TAG_INTEROPERABILITY_INDEX},
{"int", ExifInterface.TAG_THUMBNAIL_IMAGE_LENGTH},
{"int", ExifInterface.TAG_THUMBNAIL_IMAGE_WIDTH},
{"int", ExifInterface.TAG_DNG_VERSION},
{"int", ExifInterface.TAG_DEFAULT_CROP_SIZE},
{"int", ExifInterface.TAG_ORF_PREVIEW_IMAGE_START},
{"int", ExifInterface.TAG_ORF_PREVIEW_IMAGE_LENGTH},
{"int", ExifInterface.TAG_ORF_ASPECT_FRAME},
{"int", ExifInterface.TAG_RW2_SENSOR_BOTTOM_BORDER},
{"int", ExifInterface.TAG_RW2_SENSOR_LEFT_BORDER},
{"int", ExifInterface.TAG_RW2_SENSOR_RIGHT_BORDER},
{"int", ExifInterface.TAG_RW2_SENSOR_TOP_BORDER},
{"int", ExifInterface.TAG_RW2_ISO},
};
// Run all events on native modules queue thread since they might be fired
// from other non RN threads.
// Mount error event
public static void emitMountErrorEvent(final ViewGroup view, final String error) {
final ReactContext reactContext = (ReactContext) view.getContext();
reactContext.runOnNativeModulesQueueThread(new Runnable() {
@Override
public void run() {
CameraMountErrorEvent event = CameraMountErrorEvent.obtain(view.getId(), error);
reactContext.getNativeModule(UIManagerModule.class).getEventDispatcher().dispatchEvent(event);
}
});
}
// Camera ready event
public static void emitCameraReadyEvent(final ViewGroup view) {
final ReactContext reactContext = (ReactContext) view.getContext();
reactContext.runOnNativeModulesQueueThread(new Runnable() {
@Override
public void run() {
CameraReadyEvent event = CameraReadyEvent.obtain(view.getId());
reactContext.getNativeModule(UIManagerModule.class).getEventDispatcher().dispatchEvent(event);
}
});
}
// Preview Color event
public static void emitPreviewColorEvent(final ViewGroup view, final int color, final int r, final int g, final int b){
final ReactContext reactContext = (ReactContext) view.getContext();
reactContext.runOnNativeModulesQueueThread(new Runnable() {
@Override
public void run() {
PreviewColorEvent event = PreviewColorEvent.obtain(view.getId(), color, r, g, b);
reactContext.getNativeModule(UIManagerModule.class).getEventDispatcher().dispatchEvent(event);
}
});
}
// Picture saved event
public static void emitPictureSavedEvent(final ViewGroup view, final WritableMap response) {
final ReactContext reactContext = (ReactContext) view.getContext();
reactContext.runOnNativeModulesQueueThread(new Runnable() {
@Override
public void run() {
PictureSavedEvent event = PictureSavedEvent.obtain(view.getId(), response);
reactContext.getNativeModule(UIManagerModule.class).getEventDispatcher().dispatchEvent(event);
}
});
}
// Picture taken event
public static void emitPictureTakenEvent(final ViewGroup view) {
final ReactContext reactContext = (ReactContext) view.getContext();
reactContext.runOnNativeModulesQueueThread(new Runnable() {
@Override
public void run() {
PictureTakenEvent event = PictureTakenEvent.obtain(view.getId());
reactContext.getNativeModule(UIManagerModule.class).getEventDispatcher().dispatchEvent(event);
}
});
}
// video recording start/end events
public static void emitRecordingStartEvent(final ViewGroup view, final WritableMap response) {
final ReactContext reactContext = (ReactContext) view.getContext();
reactContext.runOnNativeModulesQueueThread(new Runnable() {
@Override
public void run() {
RecordingStartEvent event = RecordingStartEvent.obtain(view.getId(), response);
reactContext.getNativeModule(UIManagerModule.class).getEventDispatcher().dispatchEvent(event);
}
});
}
public static void emitRecordingEndEvent(final ViewGroup view) {
final ReactContext reactContext = (ReactContext) view.getContext();
reactContext.runOnNativeModulesQueueThread(new Runnable() {
@Override
public void run() {
RecordingEndEvent event = RecordingEndEvent.obtain(view.getId());
reactContext.getNativeModule(UIManagerModule.class).getEventDispatcher().dispatchEvent(event);
}
});
}
// Face detection events
public static void emitFacesDetectedEvent(final ViewGroup view, final WritableArray data) {
final ReactContext reactContext = (ReactContext) view.getContext();
reactContext.runOnNativeModulesQueueThread(new Runnable() {
@Override
public void run() {
FacesDetectedEvent event = FacesDetectedEvent.obtain(view.getId(), data);
reactContext.getNativeModule(UIManagerModule.class).getEventDispatcher().dispatchEvent(event);
}
});
}
public static void emitFaceDetectionErrorEvent(final ViewGroup view, final RNFaceDetector faceDetector) {
final ReactContext reactContext = (ReactContext) view.getContext();
reactContext.runOnNativeModulesQueueThread(new Runnable() {
@Override
public void run() {
FaceDetectionErrorEvent event = FaceDetectionErrorEvent.obtain(view.getId(), faceDetector);
reactContext.getNativeModule(UIManagerModule.class).getEventDispatcher().dispatchEvent(event);
}
});
}
// Barcode detection events
public static void emitBarcodesDetectedEvent(final ViewGroup view, final WritableArray barcodes) {
final ReactContext reactContext = (ReactContext) view.getContext();
reactContext.runOnNativeModulesQueueThread(new Runnable() {
@Override
public void run() {
BarcodesDetectedEvent event = BarcodesDetectedEvent.obtain(view.getId(), barcodes);
reactContext.getNativeModule(UIManagerModule.class).getEventDispatcher().dispatchEvent(event);
}
});
}
public static void emitBarcodeDetectionErrorEvent(final ViewGroup view, final RNBarcodeDetector barcodeDetector) {
final ReactContext reactContext = (ReactContext) view.getContext();
reactContext.runOnNativeModulesQueueThread(new Runnable() {
@Override
public void run() {
BarcodeDetectionErrorEvent event = BarcodeDetectionErrorEvent.obtain(view.getId(), barcodeDetector);
reactContext.getNativeModule(UIManagerModule.class).getEventDispatcher().dispatchEvent(event);
}
});
}
// Bar code read event
public static void emitBarCodeReadEvent(final ViewGroup view, final Result barCode, final int width, final int height) {
final ReactContext reactContext = (ReactContext) view.getContext();
reactContext.runOnNativeModulesQueueThread(new Runnable() {
@Override
public void run() {
BarCodeReadEvent event = BarCodeReadEvent.obtain(view.getId(), barCode, width, height);
reactContext.getNativeModule(UIManagerModule.class).getEventDispatcher().dispatchEvent(event);
}
});
}
// Text recognition event
public static void emitTextRecognizedEvent(final ViewGroup view, final WritableArray data) {
final ReactContext reactContext = (ReactContext) view.getContext();
reactContext.runOnNativeModulesQueueThread(new Runnable() {
@Override
public void run() {
TextRecognizedEvent event = TextRecognizedEvent.obtain(view.getId(), data);
reactContext.getNativeModule(UIManagerModule.class).getEventDispatcher().dispatchEvent(event);
}
});
}
// Utilities
public static int getCorrectCameraRotation(int rotation, int facing, int cameraOrientation) {
if (facing == CameraView.FACING_FRONT) {
// Tested the below line and there's no need to do the mirror calculation
return (cameraOrientation + rotation) % 360;
} else {
final int landscapeFlip = rotationIsLandscape(rotation) ? 180 : 0;
return (cameraOrientation - rotation + landscapeFlip) % 360;
}
}
private static boolean rotationIsLandscape(int rotation) {
return (rotation == Constants.LANDSCAPE_90 ||
rotation == Constants.LANDSCAPE_270);
}
private static int getCamcorderProfileQualityFromCameraModuleConstant(int quality) {
switch (quality) {
case CameraModule.VIDEO_2160P:
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
return CamcorderProfile.QUALITY_2160P;
}
case CameraModule.VIDEO_1080P:
return CamcorderProfile.QUALITY_1080P;
case CameraModule.VIDEO_720P:
return CamcorderProfile.QUALITY_720P;
case CameraModule.VIDEO_480P:
return CamcorderProfile.QUALITY_480P;
case CameraModule.VIDEO_4x3:
return CamcorderProfile.QUALITY_480P;
}
return CamcorderProfile.QUALITY_HIGH;
}
public static CamcorderProfile getCamcorderProfile(int quality) {
CamcorderProfile profile = CamcorderProfile.get(CamcorderProfile.QUALITY_HIGH);
int camcorderQuality = getCamcorderProfileQualityFromCameraModuleConstant(quality);
if (CamcorderProfile.hasProfile(camcorderQuality)) {
profile = CamcorderProfile.get(camcorderQuality);
if (quality == CameraModule.VIDEO_4x3) {
profile.videoFrameWidth = 640;
}
}
return profile;
}
public static WritableMap getExifData(ExifInterface exifInterface) {
WritableMap exifMap = Arguments.createMap();
for (String[] tagInfo : exifTags) {
String name = tagInfo[1];
if (exifInterface.getAttribute(name) != null) {
String type = tagInfo[0];
switch (type) {
case "string":
exifMap.putString(name, exifInterface.getAttribute(name));
break;
case "int":
exifMap.putInt(name, exifInterface.getAttributeInt(name, 0));
break;
case "double":
exifMap.putDouble(name, exifInterface.getAttributeDouble(name, 0));
break;
}
}
}
double[] latLong = exifInterface.getLatLong();
if (latLong != null) {
exifMap.putDouble(ExifInterface.TAG_GPS_LATITUDE, latLong[0]);
exifMap.putDouble(ExifInterface.TAG_GPS_LONGITUDE, latLong[1]);
exifMap.putDouble(ExifInterface.TAG_GPS_ALTITUDE, exifInterface.getAltitude(0));
}
return exifMap;
}
public static void setExifData(ExifInterface exifInterface, ReadableMap exifMap) {
for (String[] tagInfo : exifTags) {
String name = tagInfo[1];
if (exifMap.hasKey(name)) {
String type = tagInfo[0];
switch (type) {
case "string":
exifInterface.setAttribute(name, exifMap.getString(name));
break;
case "int":
exifInterface.setAttribute(name, Integer.toString(exifMap.getInt(name)));
exifMap.getInt(name);
break;
case "double":
exifInterface.setAttribute(name, Double.toString(exifMap.getDouble(name)));
exifMap.getDouble(name);
break;
}
}
}
if (exifMap.hasKey(ExifInterface.TAG_GPS_LATITUDE) && exifMap.hasKey(ExifInterface.TAG_GPS_LONGITUDE)) {
exifInterface.setLatLong(exifMap.getDouble(ExifInterface.TAG_GPS_LATITUDE),
exifMap.getDouble(ExifInterface.TAG_GPS_LONGITUDE));
}
if(exifMap.hasKey(ExifInterface.TAG_GPS_ALTITUDE)){
exifInterface.setAltitude(exifMap.getDouble(ExifInterface.TAG_GPS_ALTITUDE));
}
}
// clears exif values in place
public static void clearExifData(ExifInterface exifInterface) {
for (String[] tagInfo : exifTags) {
exifInterface.setAttribute(tagInfo[1], null);
}
// these are not part of our tag list, remove by hand
exifInterface.setAttribute(ExifInterface.TAG_GPS_LATITUDE, null);
exifInterface.setAttribute(ExifInterface.TAG_GPS_LONGITUDE, null);
exifInterface.setAttribute(ExifInterface.TAG_GPS_ALTITUDE, null);
}
public static Bitmap generateSimulatorPhoto(int width, int height) {
Bitmap fakePhoto = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
Canvas canvas = new Canvas(fakePhoto);
Paint background = new Paint();
background.setColor(Color.BLACK);
canvas.drawRect(0, 0, width, height, background);
Paint textPaint = new Paint();
textPaint.setColor(Color.YELLOW);
textPaint.setTextSize(35);
Calendar calendar = Calendar.getInstance();
SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy.MM.dd G '->' HH:mm:ss z");
canvas.drawText(simpleDateFormat.format(calendar.getTime()), width * 0.1f, height * 0.2f, textPaint);
canvas.drawText(simpleDateFormat.format(calendar.getTime()), width * 0.2f, height * 0.4f, textPaint);
canvas.drawText(simpleDateFormat.format(calendar.getTime()), width * 0.3f, height * 0.6f, textPaint);
canvas.drawText(simpleDateFormat.format(calendar.getTime()), width * 0.4f, height * 0.8f, textPaint);
return fakePhoto;
}
}

View File

@ -0,0 +1,100 @@
package org.reactnative.camera.events;
import androidx.core.util.Pools;
import org.reactnative.camera.CameraViewManager;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.WritableArray;
import com.facebook.react.bridge.WritableMap;
import com.facebook.react.uimanager.events.Event;
import com.facebook.react.uimanager.events.RCTEventEmitter;
import com.google.zxing.Result;
import com.google.zxing.ResultPoint;
import java.util.Formatter;
public class BarCodeReadEvent extends Event<BarCodeReadEvent> {
private static final Pools.SynchronizedPool<BarCodeReadEvent> EVENTS_POOL =
new Pools.SynchronizedPool<>(3);
private Result mBarCode;
private int mWidth;
private int mHeight;
private BarCodeReadEvent() {}
public static BarCodeReadEvent obtain(int viewTag, Result barCode, int width, int height) {
BarCodeReadEvent event = EVENTS_POOL.acquire();
if (event == null) {
event = new BarCodeReadEvent();
}
event.init(viewTag, barCode, width, height);
return event;
}
private void init(int viewTag, Result barCode, int width, int height) {
super.init(viewTag);
mBarCode = barCode;
mWidth = width;
mHeight = height;
}
/**
* We want every distinct barcode to be reported to the JS listener.
* If we return some static value as a coalescing key there may be two barcode events
* containing two different barcodes waiting to be transmitted to JS
* that would get coalesced (because both of them would have the same coalescing key).
* So let's differentiate them with a hash of the contents (mod short's max value).
*/
@Override
public short getCoalescingKey() {
int hashCode = mBarCode.getText().hashCode() % Short.MAX_VALUE;
return (short) hashCode;
}
@Override
public String getEventName() {
return CameraViewManager.Events.EVENT_ON_BAR_CODE_READ.toString();
}
@Override
public void dispatch(RCTEventEmitter rctEventEmitter) {
rctEventEmitter.receiveEvent(getViewTag(), getEventName(), serializeEventData());
}
private WritableMap serializeEventData() {
WritableMap event = Arguments.createMap();
WritableMap eventOrigin = Arguments.createMap();
event.putInt("target", getViewTag());
event.putString("data", mBarCode.getText());
byte[] rawBytes = mBarCode.getRawBytes();
if (rawBytes != null && rawBytes.length > 0) {
Formatter formatter = new Formatter();
for (byte b : rawBytes) {
formatter.format("%02x", b);
}
event.putString("rawData", formatter.toString());
formatter.close();
}
event.putString("type", mBarCode.getBarcodeFormat().toString());
WritableArray resultPoints = Arguments.createArray();
ResultPoint[] points = mBarCode.getResultPoints();
for (ResultPoint point: points) {
if(point!=null) {
WritableMap newPoint = Arguments.createMap();
newPoint.putString("x", String.valueOf(point.getX()));
newPoint.putString("y", String.valueOf(point.getY()));
resultPoints.pushMap(newPoint);
}
}
eventOrigin.putArray("origin", resultPoints);
eventOrigin.putInt("height", mHeight);
eventOrigin.putInt("width", mWidth);
event.putMap("bounds", eventOrigin);
return event;
}
}

View File

@ -0,0 +1,53 @@
package org.reactnative.camera.events;
import androidx.core.util.Pools;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.WritableMap;
import com.facebook.react.uimanager.events.Event;
import com.facebook.react.uimanager.events.RCTEventEmitter;
import org.reactnative.camera.CameraViewManager;
import org.reactnative.barcodedetector.RNBarcodeDetector;
public class BarcodeDetectionErrorEvent extends Event<BarcodeDetectionErrorEvent> {
private static final Pools.SynchronizedPool<BarcodeDetectionErrorEvent> EVENTS_POOL = new Pools.SynchronizedPool<>(3);
private RNBarcodeDetector mBarcodeDetector;
private BarcodeDetectionErrorEvent() {
}
public static BarcodeDetectionErrorEvent obtain(int viewTag, RNBarcodeDetector barcodeDetector) {
BarcodeDetectionErrorEvent event = EVENTS_POOL.acquire();
if (event == null) {
event = new BarcodeDetectionErrorEvent();
}
event.init(viewTag, barcodeDetector);
return event;
}
private void init(int viewTag, RNBarcodeDetector faceDetector) {
super.init(viewTag);
mBarcodeDetector = faceDetector;
}
@Override
public short getCoalescingKey() {
return 0;
}
@Override
public String getEventName() {
return CameraViewManager.Events.EVENT_ON_BARCODE_DETECTION_ERROR.toString();
}
@Override
public void dispatch(RCTEventEmitter rctEventEmitter) {
rctEventEmitter.receiveEvent(getViewTag(), getEventName(), serializeEventData());
}
private WritableMap serializeEventData() {
WritableMap map = Arguments.createMap();
map.putBoolean("isOperational", mBarcodeDetector != null && mBarcodeDetector.isOperational());
return map;
}
}

View File

@ -0,0 +1,73 @@
package org.reactnative.camera.events;
import androidx.core.util.Pools;
import android.util.SparseArray;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.WritableArray;
import com.facebook.react.bridge.WritableMap;
import com.facebook.react.uimanager.events.Event;
import com.facebook.react.uimanager.events.RCTEventEmitter;
import org.reactnative.camera.CameraViewManager;
public class BarcodesDetectedEvent extends Event<BarcodesDetectedEvent> {
private static final Pools.SynchronizedPool<BarcodesDetectedEvent> EVENTS_POOL =
new Pools.SynchronizedPool<>(3);
private WritableArray mBarcodes;
private BarcodesDetectedEvent() {
}
public static BarcodesDetectedEvent obtain(
int viewTag,
WritableArray barcodes
) {
BarcodesDetectedEvent event = EVENTS_POOL.acquire();
if (event == null) {
event = new BarcodesDetectedEvent();
}
event.init(viewTag, barcodes);
return event;
}
private void init(
int viewTag,
WritableArray barcodes
) {
super.init(viewTag);
mBarcodes = barcodes;
}
/**
* note(@sjchmiela)
* Should the events about detected barcodes coalesce, the best strategy will be
* to ensure that events with different barcodes count are always being transmitted.
*/
@Override
public short getCoalescingKey() {
if (mBarcodes.size() > Short.MAX_VALUE) {
return Short.MAX_VALUE;
}
return (short) mBarcodes.size();
}
@Override
public String getEventName() {
return CameraViewManager.Events.EVENT_ON_BARCODES_DETECTED.toString();
}
@Override
public void dispatch(RCTEventEmitter rctEventEmitter) {
rctEventEmitter.receiveEvent(getViewTag(), getEventName(), serializeEventData());
}
private WritableMap serializeEventData() {
WritableMap event = Arguments.createMap();
event.putString("type", "barcode");
event.putArray("barcodes", mBarcodes);
event.putInt("target", getViewTag());
return event;
}
}

View File

@ -0,0 +1,51 @@
package org.reactnative.camera.events;
import androidx.core.util.Pools;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.WritableMap;
import com.facebook.react.uimanager.events.Event;
import com.facebook.react.uimanager.events.RCTEventEmitter;
import org.reactnative.camera.CameraViewManager;
public class CameraMountErrorEvent extends Event<CameraMountErrorEvent> {
private static final Pools.SynchronizedPool<CameraMountErrorEvent> EVENTS_POOL = new Pools.SynchronizedPool<>(3);
private String mError;
private CameraMountErrorEvent() {
}
public static CameraMountErrorEvent obtain(int viewTag, String error) {
CameraMountErrorEvent event = EVENTS_POOL.acquire();
if (event == null) {
event = new CameraMountErrorEvent();
}
event.init(viewTag, error);
return event;
}
private void init(int viewTag, String error) {
super.init(viewTag);
mError = error;
}
@Override
public short getCoalescingKey() {
return 0;
}
@Override
public String getEventName() {
return CameraViewManager.Events.EVENT_ON_MOUNT_ERROR.toString();
}
@Override
public void dispatch(RCTEventEmitter rctEventEmitter) {
rctEventEmitter.receiveEvent(getViewTag(), getEventName(), serializeEventData());
}
private WritableMap serializeEventData() {
WritableMap arguments = Arguments.createMap();
arguments.putString("message", mError);
return arguments;
}
}

View File

@ -0,0 +1,42 @@
package org.reactnative.camera.events;
import androidx.core.util.Pools;
import org.reactnative.camera.CameraViewManager;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.WritableMap;
import com.facebook.react.uimanager.events.Event;
import com.facebook.react.uimanager.events.RCTEventEmitter;
public class CameraReadyEvent extends Event<CameraReadyEvent> {
private static final Pools.SynchronizedPool<CameraReadyEvent> EVENTS_POOL = new Pools.SynchronizedPool<>(3);
private CameraReadyEvent() {}
public static CameraReadyEvent obtain(int viewTag) {
CameraReadyEvent event = EVENTS_POOL.acquire();
if (event == null) {
event = new CameraReadyEvent();
}
event.init(viewTag);
return event;
}
@Override
public short getCoalescingKey() {
return 0;
}
@Override
public String getEventName() {
return CameraViewManager.Events.EVENT_CAMERA_READY.toString();
}
@Override
public void dispatch(RCTEventEmitter rctEventEmitter) {
rctEventEmitter.receiveEvent(getViewTag(), getEventName(), serializeEventData());
}
private WritableMap serializeEventData() {
return Arguments.createMap();
}
}

View File

@ -0,0 +1,52 @@
package org.reactnative.camera.events;
import androidx.core.util.Pools;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.WritableMap;
import com.facebook.react.uimanager.events.Event;
import com.facebook.react.uimanager.events.RCTEventEmitter;
import org.reactnative.camera.CameraViewManager;
import org.reactnative.facedetector.RNFaceDetector;
public class FaceDetectionErrorEvent extends Event<FaceDetectionErrorEvent> {
private static final Pools.SynchronizedPool<FaceDetectionErrorEvent> EVENTS_POOL = new Pools.SynchronizedPool<>(3);
private RNFaceDetector mFaceDetector;
private FaceDetectionErrorEvent() {
}
public static FaceDetectionErrorEvent obtain(int viewTag, RNFaceDetector faceDetector) {
FaceDetectionErrorEvent event = EVENTS_POOL.acquire();
if (event == null) {
event = new FaceDetectionErrorEvent();
}
event.init(viewTag, faceDetector);
return event;
}
private void init(int viewTag, RNFaceDetector faceDetector) {
super.init(viewTag);
mFaceDetector = faceDetector;
}
@Override
public short getCoalescingKey() {
return 0;
}
@Override
public String getEventName() {
return CameraViewManager.Events.EVENT_ON_FACE_DETECTION_ERROR.toString();
}
@Override
public void dispatch(RCTEventEmitter rctEventEmitter) {
rctEventEmitter.receiveEvent(getViewTag(), getEventName(), serializeEventData());
}
private WritableMap serializeEventData() {
WritableMap map = Arguments.createMap();
map.putBoolean("isOperational", mFaceDetector != null && mFaceDetector.isOperational());
return map;
}
}

View File

@ -0,0 +1,65 @@
package org.reactnative.camera.events;
import androidx.core.util.Pools;
import org.reactnative.camera.CameraViewManager;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.WritableArray;
import com.facebook.react.bridge.WritableMap;
import com.facebook.react.uimanager.events.Event;
import com.facebook.react.uimanager.events.RCTEventEmitter;
public class FacesDetectedEvent extends Event<FacesDetectedEvent> {
private static final Pools.SynchronizedPool<FacesDetectedEvent> EVENTS_POOL =
new Pools.SynchronizedPool<>(3);
private WritableArray mData;
private FacesDetectedEvent() {}
public static FacesDetectedEvent obtain(int viewTag, WritableArray data) {
FacesDetectedEvent event = EVENTS_POOL.acquire();
if (event == null) {
event = new FacesDetectedEvent();
}
event.init(viewTag, data);
return event;
}
private void init(int viewTag, WritableArray data) {
super.init(viewTag);
mData = data;
}
/**
* note(@sjchmiela)
* Should the events about detected faces coalesce, the best strategy will be
* to ensure that events with different faces count are always being transmitted.
*/
@Override
public short getCoalescingKey() {
if (mData.size() > Short.MAX_VALUE) {
return Short.MAX_VALUE;
}
return (short) mData.size();
}
@Override
public String getEventName() {
return CameraViewManager.Events.EVENT_ON_FACES_DETECTED.toString();
}
@Override
public void dispatch(RCTEventEmitter rctEventEmitter) {
rctEventEmitter.receiveEvent(getViewTag(), getEventName(), serializeEventData());
}
private WritableMap serializeEventData() {
WritableMap event = Arguments.createMap();
event.putString("type", "face");
event.putArray("faces", mData);
event.putInt("target", getViewTag());
return event;
}
}

View File

@ -0,0 +1,46 @@
package org.reactnative.camera.events;
import androidx.core.util.Pools;
import com.facebook.react.bridge.WritableMap;
import com.facebook.react.uimanager.events.Event;
import com.facebook.react.uimanager.events.RCTEventEmitter;
import org.reactnative.camera.CameraViewManager;
public class PictureSavedEvent extends Event<PictureSavedEvent> {
private static final Pools.SynchronizedPool<PictureSavedEvent> EVENTS_POOL = new Pools.SynchronizedPool<>(5);
private PictureSavedEvent() {}
private WritableMap mResponse;
public static PictureSavedEvent obtain(int viewTag, WritableMap response) {
PictureSavedEvent event = EVENTS_POOL.acquire();
if (event == null) {
event = new PictureSavedEvent();
}
event.init(viewTag, response);
return event;
}
private void init(int viewTag, WritableMap response) {
super.init(viewTag);
mResponse = response;
}
@Override
public short getCoalescingKey() {
int hashCode = mResponse.getMap("data").getString("uri").hashCode() % Short.MAX_VALUE;
return (short) hashCode;
}
@Override
public String getEventName() {
return CameraViewManager.Events.EVENT_ON_PICTURE_SAVED.toString();
}
@Override
public void dispatch(RCTEventEmitter rctEventEmitter) {
rctEventEmitter.receiveEvent(getViewTag(), getEventName(), mResponse);
}
}

View File

@ -0,0 +1,42 @@
package org.reactnative.camera.events;
import androidx.core.util.Pools;
import org.reactnative.camera.CameraViewManager;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.WritableMap;
import com.facebook.react.uimanager.events.Event;
import com.facebook.react.uimanager.events.RCTEventEmitter;
public class PictureTakenEvent extends Event<PictureTakenEvent> {
private static final Pools.SynchronizedPool<PictureTakenEvent> EVENTS_POOL = new Pools.SynchronizedPool<>(3);
private PictureTakenEvent() {}
public static PictureTakenEvent obtain(int viewTag) {
PictureTakenEvent event = EVENTS_POOL.acquire();
if (event == null) {
event = new PictureTakenEvent();
}
event.init(viewTag);
return event;
}
@Override
public short getCoalescingKey() {
return 0;
}
@Override
public String getEventName() {
return CameraViewManager.Events.EVENT_ON_PICTURE_TAKEN.toString();
}
@Override
public void dispatch(RCTEventEmitter rctEventEmitter) {
rctEventEmitter.receiveEvent(getViewTag(), getEventName(), serializeEventData());
}
private WritableMap serializeEventData() {
return Arguments.createMap();
}
}

View File

@ -0,0 +1,58 @@
package org.reactnative.camera.events;
import androidx.core.util.Pools;
import org.reactnative.camera.CameraViewManager;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.WritableMap;
import com.facebook.react.uimanager.events.Event;
import com.facebook.react.uimanager.events.RCTEventEmitter;
public class PreviewColorEvent extends Event<PreviewColorEvent> {
private static final Pools.SynchronizedPool<PreviewColorEvent> EVENTS_POOL = new Pools.SynchronizedPool<>(3);
private PreviewColorEvent() {}
private int color;
private int r, g, b;
public static PreviewColorEvent obtain(int viewTag, int color, int r, int g, int b) {
PreviewColorEvent event = EVENTS_POOL.acquire();
if (event == null) {
event = new PreviewColorEvent();
}
event.init(viewTag, color, r, g, b);
return event;
}
private void init(int viewTag, int color, int r, int g, int b) {
super.init(viewTag);
this.color = color;
this.r = r;
this.g = g;
this.b = b;
}
@Override
public short getCoalescingKey() {
return 0;
}
@Override
public String getEventName() {
return CameraViewManager.Events.EVENT_PREVIEW_COLOR.toString();
}
@Override
public void dispatch(RCTEventEmitter rctEventEmitter) {
rctEventEmitter.receiveEvent(getViewTag(), getEventName(), serializeEventData());
}
private WritableMap serializeEventData() {
WritableMap map = Arguments.createMap();
map.putInt("color", color);
map.putInt("r", r);
map.putInt("g", g);
map.putInt("b", b);
return map;
}
}

View File

@ -0,0 +1,42 @@
package org.reactnative.camera.events;
import androidx.core.util.Pools;
import org.reactnative.camera.CameraViewManager;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.WritableMap;
import com.facebook.react.uimanager.events.Event;
import com.facebook.react.uimanager.events.RCTEventEmitter;
public class RecordingEndEvent extends Event<RecordingEndEvent> {
private static final Pools.SynchronizedPool<RecordingEndEvent> EVENTS_POOL = new Pools.SynchronizedPool<>(3);
private RecordingEndEvent() {}
public static RecordingEndEvent obtain(int viewTag) {
RecordingEndEvent event = EVENTS_POOL.acquire();
if (event == null) {
event = new RecordingEndEvent();
}
event.init(viewTag);
return event;
}
@Override
public short getCoalescingKey() {
return 0;
}
@Override
public String getEventName() {
return CameraViewManager.Events.EVENT_ON_RECORDING_END.toString();
}
@Override
public void dispatch(RCTEventEmitter rctEventEmitter) {
rctEventEmitter.receiveEvent(getViewTag(), getEventName(), serializeEventData());
}
private WritableMap serializeEventData() {
return Arguments.createMap();
}
}

View File

@ -0,0 +1,46 @@
package org.reactnative.camera.events;
import androidx.core.util.Pools;
import org.reactnative.camera.CameraViewManager;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.WritableMap;
import com.facebook.react.uimanager.events.Event;
import com.facebook.react.uimanager.events.RCTEventEmitter;
public class RecordingStartEvent extends Event<RecordingStartEvent> {
private static final Pools.SynchronizedPool<RecordingStartEvent> EVENTS_POOL = new Pools.SynchronizedPool<>(3);
private RecordingStartEvent() {}
private WritableMap mResponse;
public static RecordingStartEvent obtain(int viewTag, WritableMap response) {
RecordingStartEvent event = EVENTS_POOL.acquire();
if (event == null) {
event = new RecordingStartEvent();
}
event.init(viewTag, response);
return event;
}
private void init(int viewTag, WritableMap response) {
super.init(viewTag);
mResponse = response;
}
// @Override
// public short getCoalescingKey() {
// int hashCode = mResponse.getString("uri").hashCode() % Short.MAX_VALUE;
// return (short) hashCode;
// }
@Override
public String getEventName() {
return CameraViewManager.Events.EVENT_ON_RECORDING_START.toString();
}
@Override
public void dispatch(RCTEventEmitter rctEventEmitter) {
rctEventEmitter.receiveEvent(getViewTag(), getEventName(), mResponse);
}
}

View File

@ -0,0 +1,54 @@
package org.reactnative.camera.events;
import androidx.core.util.Pools;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.WritableArray;
import com.facebook.react.bridge.WritableMap;
import com.facebook.react.uimanager.events.Event;
import com.facebook.react.uimanager.events.RCTEventEmitter;
import org.reactnative.camera.CameraViewManager;
public class TextRecognizedEvent extends Event<TextRecognizedEvent> {
private static final Pools.SynchronizedPool<TextRecognizedEvent> EVENTS_POOL =
new Pools.SynchronizedPool<>(3);
private WritableArray mData;
private TextRecognizedEvent() {}
public static TextRecognizedEvent obtain(int viewTag, WritableArray data) {
TextRecognizedEvent event = EVENTS_POOL.acquire();
if (event == null) {
event = new TextRecognizedEvent();
}
event.init(viewTag, data);
return event;
}
private void init(int viewTag, WritableArray data) {
super.init(viewTag);
mData = data;
}
@Override
public String getEventName() {
return CameraViewManager.Events.EVENT_ON_TEXT_RECOGNIZED.toString();
}
@Override
public void dispatch(RCTEventEmitter rctEventEmitter) {
rctEventEmitter.receiveEvent(getViewTag(), getEventName(), createEvent());
}
private WritableMap createEvent() {
WritableMap event = Arguments.createMap();
event.putString("type", "textBlock");
event.putArray("textBlocks", mData);
event.putInt("target", getViewTag());
return event;
}
}

View File

@ -0,0 +1,187 @@
package org.reactnative.camera.tasks;
import com.google.zxing.BinaryBitmap;
import com.google.zxing.MultiFormatReader;
import com.google.zxing.NotFoundException;
import com.google.zxing.PlanarYUVLuminanceSource;
import com.google.zxing.Result;
import com.google.zxing.common.HybridBinarizer;
public class BarCodeScannerAsyncTask extends android.os.AsyncTask<Void, Void, Result> {
private byte[] mImageData;
private int mWidth;
private int mHeight;
private BarCodeScannerAsyncTaskDelegate mDelegate;
private final MultiFormatReader mMultiFormatReader;
private boolean mLimitScanArea;
private float mScanAreaX;
private float mScanAreaY;
private float mScanAreaWidth;
private float mScanAreaHeight;
private int mCameraViewWidth;
private int mCameraViewHeight;
private float mRatio;
// note(sjchmiela): From my short research it's ok to ignore rotation of the image.
public BarCodeScannerAsyncTask(
BarCodeScannerAsyncTaskDelegate delegate,
MultiFormatReader multiFormatReader,
byte[] imageData,
int width,
int height,
boolean limitScanArea,
float scanAreaX,
float scanAreaY,
float scanAreaWidth,
float scanAreaHeight,
int cameraViewWidth,
int cameraViewHeight,
float ratio
) {
mImageData = imageData;
mWidth = width;
mHeight = height;
mDelegate = delegate;
mMultiFormatReader = multiFormatReader;
mLimitScanArea = limitScanArea;
mScanAreaX = scanAreaX;
mScanAreaY = scanAreaY;
mScanAreaWidth = scanAreaWidth;
mScanAreaHeight = scanAreaHeight;
mCameraViewWidth = cameraViewWidth;
mCameraViewHeight = cameraViewHeight;
mRatio = ratio;
}
@Override
protected Result doInBackground(Void... ignored) {
if (isCancelled() || mDelegate == null) {
return null;
}
Result result = null;
/**
* mCameraViewWidth and mCameraViewHeight are obtained from portait orientation
* mWidth and mHeight are measured with landscape orientation with Home button to the right
* adjustedCamViewWidth is the adjusted width fromt the Aspect ratio setting
*/
int adjustedCamViewWidth = (int) (mCameraViewHeight / mRatio);
float adjustedScanY = (((adjustedCamViewWidth - mCameraViewWidth) / 2) + (mScanAreaY * mCameraViewWidth)) / adjustedCamViewWidth;
int left = (int) (mScanAreaX * mWidth);
int top = (int) (adjustedScanY * mHeight);
int scanWidth = (int) (mScanAreaWidth * mWidth);
int scanHeight = (int) (((mScanAreaHeight * mCameraViewWidth) / adjustedCamViewWidth) * mHeight);
try {
BinaryBitmap bitmap = generateBitmapFromImageData(
mImageData,
mWidth,
mHeight,
false,
left,
top,
scanWidth,
scanHeight
);
result = mMultiFormatReader.decodeWithState(bitmap);
} catch (NotFoundException e) {
BinaryBitmap bitmap = generateBitmapFromImageData(
rotateImage(mImageData,mWidth, mHeight),
mHeight,
mWidth,
false,
mHeight - scanHeight - top,
left,
scanHeight,
scanWidth
);
try {
result = mMultiFormatReader.decodeWithState(bitmap);
} catch (NotFoundException e1) {
BinaryBitmap invertedBitmap = generateBitmapFromImageData(
mImageData,
mWidth,
mHeight,
true,
mWidth - scanWidth - left,
mHeight - scanHeight - top,
scanWidth,
scanHeight
);
try {
result = mMultiFormatReader.decodeWithState(invertedBitmap);
} catch (NotFoundException e2) {
BinaryBitmap invertedRotatedBitmap = generateBitmapFromImageData(
rotateImage(mImageData,mWidth, mHeight),
mHeight,
mWidth,
true,
top,
mWidth - scanWidth - left,
scanHeight,
scanWidth
);
try {
result = mMultiFormatReader.decodeWithState(invertedRotatedBitmap);
} catch (NotFoundException e3) {
//no barcode Found
}
}
}
} catch (Throwable t) {
t.printStackTrace();
}
return result;
}
private byte[] rotateImage(byte[]imageData,int width, int height) {
byte[] rotated = new byte[imageData.length];
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++) {
rotated[x * height + height - y - 1] = imageData[x + y * width];
}
}
return rotated;
}
@Override
protected void onPostExecute(Result result) {
super.onPostExecute(result);
if (result != null) {
mDelegate.onBarCodeRead(result, mWidth, mHeight);
}
mDelegate.onBarCodeScanningTaskCompleted();
}
private BinaryBitmap generateBitmapFromImageData(byte[] imageData, int width, int height, boolean inverse, int left, int top, int sWidth, int sHeight) {
PlanarYUVLuminanceSource source;
if (mLimitScanArea) {
source = new PlanarYUVLuminanceSource(
imageData, // byte[] yuvData
width, // int dataWidth
height, // int dataHeight
left, // int left
top, // int top
sWidth, // int width
sHeight, // int height
false // boolean reverseHorizontal
);
} else {
source = new PlanarYUVLuminanceSource(
imageData, // byte[] yuvData
width, // int dataWidth
height, // int dataHeight
0, // int left
0, // int top
width, // int width
height, // int height
false // boolean reverseHorizontal
);
}
if (inverse) {
return new BinaryBitmap(new HybridBinarizer(source.invert()));
} else {
return new BinaryBitmap(new HybridBinarizer(source));
}
}
}

View File

@ -0,0 +1,8 @@
package org.reactnative.camera.tasks;
import com.google.zxing.Result;
public interface BarCodeScannerAsyncTaskDelegate {
void onBarCodeRead(Result barCode, int width, int height);
void onBarCodeScanningTaskCompleted();
}

View File

@ -0,0 +1,13 @@
package org.reactnative.camera.tasks;
import com.facebook.react.bridge.WritableArray;
import org.reactnative.barcodedetector.RNBarcodeDetector;
public interface BarcodeDetectorAsyncTaskDelegate {
void onBarcodesDetected(WritableArray barcodes);
void onBarcodeDetectionError(RNBarcodeDetector barcodeDetector);
void onBarcodeDetectingTaskCompleted();
}

View File

@ -0,0 +1,11 @@
package org.reactnative.camera.tasks;
import org.reactnative.facedetector.RNFaceDetector;
import com.facebook.react.bridge.WritableArray;
public interface FaceDetectorAsyncTaskDelegate {
void onFacesDetected(WritableArray faces);
void onFaceDetectionError(RNFaceDetector faceDetector);
void onFaceDetectingTaskCompleted();
}

View File

@ -0,0 +1,7 @@
package org.reactnative.camera.tasks;
import com.facebook.react.bridge.WritableMap;
public interface PictureSavedDelegate {
void onPictureSaved(WritableMap response);
}

View File

@ -0,0 +1,360 @@
package org.reactnative.camera.tasks;
import android.content.res.Resources;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Matrix;
import android.net.Uri;
import android.os.AsyncTask;
import androidx.exifinterface.media.ExifInterface;
import android.util.Base64;
import org.reactnative.camera.RNCameraViewHelper;
import org.reactnative.camera.utils.RNFileUtils;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.Promise;
import com.facebook.react.bridge.ReadableMap;
import com.facebook.react.bridge.ReadableType;
import com.facebook.react.bridge.WritableMap;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
public class ResolveTakenPictureAsyncTask extends AsyncTask<Void, Void, WritableMap> {
private static final String ERROR_TAG = "E_TAKING_PICTURE_FAILED";
private Promise mPromise;
private Bitmap mBitmap;
private byte[] mImageData;
private ReadableMap mOptions;
private File mCacheDirectory;
private int mDeviceOrientation;
private PictureSavedDelegate mPictureSavedDelegate;
public ResolveTakenPictureAsyncTask(byte[] imageData, Promise promise, ReadableMap options, File cacheDirectory, int deviceOrientation, PictureSavedDelegate delegate) {
mPromise = promise;
mOptions = options;
mImageData = imageData;
mCacheDirectory = cacheDirectory;
mDeviceOrientation = deviceOrientation;
mPictureSavedDelegate = delegate;
}
private int getQuality() {
return (int) (mOptions.getDouble("quality") * 100);
}
// loads bitmap only if necessary
private void loadBitmap() throws IOException {
if(mBitmap == null){
mBitmap = BitmapFactory.decodeByteArray(mImageData, 0, mImageData.length);
}
if(mBitmap == null){
throw new IOException("Failed to decode Image Bitmap");
}
}
@Override
protected WritableMap doInBackground(Void... voids) {
WritableMap response = Arguments.createMap();
ByteArrayInputStream inputStream = null;
ExifInterface exifInterface = null;
WritableMap exifData = null;
ReadableMap exifExtraData = null;
boolean orientationChanged = false;
response.putInt("deviceOrientation", mDeviceOrientation);
response.putInt("pictureOrientation", mOptions.hasKey("orientation") ? mOptions.getInt("orientation") : mDeviceOrientation);
try{
// this replaces the skipProcessing flag, we will process only if needed, and in
// an orderly manner, so that skipProcessing is the default behaviour if no options are given
// and this behaves more like the iOS version.
// We will load all data lazily only when needed.
// this should not incurr in any overhead if not read/used
inputStream = new ByteArrayInputStream(mImageData);
// Rotate the bitmap to the proper orientation if requested
if(mOptions.hasKey("fixOrientation") && mOptions.getBoolean("fixOrientation")){
exifInterface = new ExifInterface(inputStream);
// Get orientation of the image from mImageData via inputStream
int orientation = exifInterface.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_UNDEFINED);
if(orientation != ExifInterface.ORIENTATION_UNDEFINED){
loadBitmap();
mBitmap = rotateBitmap(mBitmap, getImageRotation(orientation));
orientationChanged = true;
}
}
if (mOptions.hasKey("width")) {
loadBitmap();
mBitmap = resizeBitmap(mBitmap, mOptions.getInt("width"));
}
if (mOptions.hasKey("mirrorImage") && mOptions.getBoolean("mirrorImage")) {
loadBitmap();
mBitmap = flipHorizontally(mBitmap);
}
// EXIF code - we will adjust exif info later if we manipulated the bitmap
boolean writeExifToResponse = mOptions.hasKey("exif") && mOptions.getBoolean("exif");
// default to true if not provided so it is consistent with iOS and with what happens if no
// processing is done and the image is saved as is.
boolean writeExifToFile = true;
if (mOptions.hasKey("writeExif")) {
switch (mOptions.getType("writeExif")) {
case Boolean:
writeExifToFile = mOptions.getBoolean("writeExif");
break;
case Map:
exifExtraData = mOptions.getMap("writeExif");
writeExifToFile = true;
break;
}
}
// Read Exif data if needed
if (writeExifToResponse || writeExifToFile) {
// if we manipulated the image, or need to add extra data, or need to add it to the response,
// then we need to load the actual exif data.
// Otherwise we can just use w/e exif data we have right now in our byte array
if(mBitmap != null || exifExtraData != null || writeExifToResponse){
if(exifInterface == null){
exifInterface = new ExifInterface(inputStream);
}
exifData = RNCameraViewHelper.getExifData(exifInterface);
if(exifExtraData != null){
exifData.merge(exifExtraData);
}
}
// if we did anything to the bitmap, adjust exif
if(mBitmap != null){
exifData.putInt("width", mBitmap.getWidth());
exifData.putInt("height", mBitmap.getHeight());
if(orientationChanged){
exifData.putInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL);
}
}
// Write Exif data to the response if requested
if (writeExifToResponse) {
response.putMap("exif", exifData);
}
}
// final processing
// Based on whether or not we loaded the full bitmap into memory, final processing differs
if(mBitmap == null){
// set response dimensions. If we haven't read our bitmap, get it efficiently
// without loading the actual bitmap into memory
BitmapFactory.Options options = new BitmapFactory.Options();
options.inJustDecodeBounds = true;
BitmapFactory.decodeByteArray(mImageData, 0, mImageData.length, options);
if(options != null){
response.putInt("width", options.outWidth);
response.putInt("height", options.outHeight);
}
// save to file if requested
if (!mOptions.hasKey("doNotSave") || !mOptions.getBoolean("doNotSave")) {
// Prepare file output
File imageFile = new File(RNFileUtils.getOutputFilePath(mCacheDirectory, ".jpg"));
imageFile.createNewFile();
FileOutputStream fOut = new FileOutputStream(imageFile);
// Save byte array (it is already a JPEG)
fOut.write(mImageData);
fOut.flush();
fOut.close();
// update exif data if needed.
// Since we didn't modify the image, we only update if we have extra exif info
if (writeExifToFile && exifExtraData != null) {
ExifInterface fileExifInterface = new ExifInterface(imageFile.getAbsolutePath());
RNCameraViewHelper.setExifData(fileExifInterface, exifExtraData);
fileExifInterface.saveAttributes();
}
else if (!writeExifToFile){
// if we were requested to NOT store exif, we actually need to
// clear the exif tags
ExifInterface fileExifInterface = new ExifInterface(imageFile.getAbsolutePath());
RNCameraViewHelper.clearExifData(fileExifInterface);
fileExifInterface.saveAttributes();
}
// else: exif is unmodified, no need to update anything
// Return file system URI
String fileUri = Uri.fromFile(imageFile).toString();
response.putString("uri", fileUri);
}
if (mOptions.hasKey("base64") && mOptions.getBoolean("base64")) {
response.putString("base64", Base64.encodeToString(mImageData, Base64.NO_WRAP));
}
}
else{
// get response dimensions right from the bitmap if we have it
response.putInt("width", mBitmap.getWidth());
response.putInt("height", mBitmap.getHeight());
// Cache compressed image in imageStream
ByteArrayOutputStream imageStream = new ByteArrayOutputStream();
mBitmap.compress(Bitmap.CompressFormat.JPEG, getQuality(), imageStream);
// Write compressed image to file in cache directory unless otherwise specified
if (!mOptions.hasKey("doNotSave") || !mOptions.getBoolean("doNotSave")) {
String filePath = writeStreamToFile(imageStream);
// since we lost any exif data on bitmap creation, we only need
// to add it if requested
if (writeExifToFile && exifData != null) {
ExifInterface fileExifInterface = new ExifInterface(filePath);
RNCameraViewHelper.setExifData(fileExifInterface, exifData);
fileExifInterface.saveAttributes();
}
File imageFile = new File(filePath);
String fileUri = Uri.fromFile(imageFile).toString();
response.putString("uri", fileUri);
}
// Write base64-encoded image to the response if requested
if (mOptions.hasKey("base64") && mOptions.getBoolean("base64")) {
response.putString("base64", Base64.encodeToString(imageStream.toByteArray(), Base64.NO_WRAP));
}
}
return response;
}
catch (Resources.NotFoundException e) {
mPromise.reject(ERROR_TAG, "Documents directory of the app could not be found.", e);
e.printStackTrace();
}
catch (IOException e) {
mPromise.reject(ERROR_TAG, "An unknown I/O exception has occurred.", e);
e.printStackTrace();
}
finally {
try {
if (inputStream != null) {
inputStream.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
return null;
}
private Bitmap rotateBitmap(Bitmap source, int angle) {
Matrix matrix = new Matrix();
matrix.postRotate(angle);
return Bitmap.createBitmap(source, 0, 0, source.getWidth(), source.getHeight(), matrix, true);
}
private Bitmap resizeBitmap(Bitmap bm, int newWidth) {
int width = bm.getWidth();
int height = bm.getHeight();
float scaleRatio = (float) newWidth / (float) width;
return Bitmap.createScaledBitmap(bm, newWidth, (int) (height * scaleRatio), true);
}
private Bitmap flipHorizontally(Bitmap source) {
Matrix matrix = new Matrix();
matrix.preScale(-1.0f, 1.0f);
return Bitmap.createBitmap(source, 0, 0, source.getWidth(), source.getHeight(), matrix, true);
}
// Get rotation degrees from Exif orientation enum
private int getImageRotation(int orientation) {
int rotationDegrees = 0;
switch (orientation) {
case ExifInterface.ORIENTATION_ROTATE_90:
rotationDegrees = 90;
break;
case ExifInterface.ORIENTATION_ROTATE_180:
rotationDegrees = 180;
break;
case ExifInterface.ORIENTATION_ROTATE_270:
rotationDegrees = 270;
break;
}
return rotationDegrees;
}
private String writeStreamToFile(ByteArrayOutputStream inputStream) throws IOException {
String outputPath = null;
IOException exception = null;
FileOutputStream outputStream = null;
try {
outputPath = RNFileUtils.getOutputFilePath(mCacheDirectory, ".jpg");
outputStream = new FileOutputStream(outputPath);
inputStream.writeTo(outputStream);
} catch (IOException e) {
e.printStackTrace();
exception = e;
} finally {
try {
if (outputStream != null) {
outputStream.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
if (exception != null) {
throw exception;
}
return outputPath;
}
@Override
protected void onPostExecute(WritableMap response) {
super.onPostExecute(response);
// If the response is not null everything went well and we can resolve the promise.
if (response != null) {
if (mOptions.hasKey("fastMode") && mOptions.getBoolean("fastMode")) {
WritableMap wrapper = Arguments.createMap();
wrapper.putInt("id", mOptions.getInt("id"));
wrapper.putMap("data", response);
mPictureSavedDelegate.onPictureSaved(wrapper);
} else {
mPromise.resolve(response);
}
}
}
}

View File

@ -0,0 +1,8 @@
package org.reactnative.camera.tasks;
import com.facebook.react.bridge.WritableArray;
public interface TextRecognizerAsyncTaskDelegate {
void onTextRecognized(WritableArray serializedData);
void onTextRecognizerTaskCompleted();
}

View File

@ -0,0 +1,64 @@
package org.reactnative.camera.utils;
public class ImageDimensions {
private int mWidth;
private int mHeight;
private int mFacing;
private int mRotation;
public ImageDimensions(int width, int height) {
this(width, height, 0);
}
public ImageDimensions(int width, int height, int rotation) {
this(width, height, rotation, -1);
}
public ImageDimensions(int width, int height, int rotation, int facing) {
mWidth = width;
mHeight = height;
mFacing = facing;
mRotation = rotation;
}
public boolean isLandscape() {
return mRotation % 180 == 90;
}
public int getWidth() {
if (isLandscape()) {
return mHeight;
}
return mWidth;
}
public int getHeight() {
if (isLandscape()) {
return mWidth;
}
return mHeight;
}
public int getRotation() {
return mRotation;
}
public int getFacing() {
return mFacing;
}
@Override
public boolean equals(Object obj) {
if (obj instanceof ImageDimensions) {
ImageDimensions otherDimensions = (ImageDimensions) obj;
return (otherDimensions.getWidth() == getWidth() &&
otherDimensions.getHeight() == getHeight() &&
otherDimensions.getFacing() == getFacing() &&
otherDimensions.getRotation() == getRotation());
} else {
return super.equals(obj);
}
}
}

View File

@ -0,0 +1,15 @@
package org.reactnative.camera.utils;
public class ObjectUtils {
/*
* Replacement for Objects.equals that is only available after Android API 19
*/
public static boolean equals(Object o1, Object o2) {
if (o1 == null && o2 == null) return true;
if (o1 == null) return false;
return o1.equals(o2);
}
}

View File

@ -0,0 +1,34 @@
package org.reactnative.camera.utils;
import android.content.Context;
import android.net.Uri;
import java.io.File;
import java.io.IOException;
import java.util.UUID;
/**
* Created by jgfidelis on 23/01/18.
*/
public class RNFileUtils {
public static File ensureDirExists(File dir) throws IOException {
if (!(dir.isDirectory() || dir.mkdirs())) {
throw new IOException("Couldn't create directory '" + dir + "'");
}
return dir;
}
public static String getOutputFilePath(File directory, String extension) throws IOException {
ensureDirExists(directory);
String filename = UUID.randomUUID().toString();
return directory + File.separator + filename + extension;
}
public static Uri uriFromFile(File file) {
return Uri.fromFile(file);
}
}

View File

@ -0,0 +1,27 @@
package org.reactnative.camera.utils;
import android.content.Context;
import java.io.File;
/**
* Created by jgfidelis on 23/01/18.
*/
public class ScopedContext {
private File cacheDirectory = null;
public ScopedContext(Context context) {
createCacheDirectory(context);
}
public void createCacheDirectory(Context context) {
cacheDirectory = new File(context.getCacheDir() + "/Camera/");
}
public File getCacheDirectory() {
return cacheDirectory;
}
}

View File

@ -0,0 +1,28 @@
package org.reactnative.frame;
import org.reactnative.camera.utils.ImageDimensions;
import com.google.android.gms.vision.Frame;
/**
* Wrapper around Frame allowing us to track Frame dimensions.
* Tracking dimensions is used in RNFaceDetector and RNBarcodeDetector to provide painless FaceDetector/BarcodeDetector recreation
* when image dimensions change.
*/
public class RNFrame {
private Frame mFrame;
private ImageDimensions mDimensions;
public RNFrame(Frame frame, ImageDimensions dimensions) {
mFrame = frame;
mDimensions = dimensions;
}
public Frame getFrame() {
return mFrame;
}
public ImageDimensions getDimensions() {
return mDimensions;
}
}

View File

@ -0,0 +1,43 @@
package org.reactnative.frame;
import android.graphics.Bitmap;
import android.graphics.ImageFormat;
import org.reactnative.camera.utils.ImageDimensions;
import com.google.android.gms.vision.Frame;
import java.nio.ByteBuffer;
public class RNFrameFactory {
public static RNFrame buildFrame(byte[] bitmapData, int width, int height, int rotation) {
Frame.Builder builder = new Frame.Builder();
ByteBuffer byteBuffer = ByteBuffer.wrap(bitmapData);
builder.setImageData(byteBuffer, width, height, ImageFormat.NV21);
switch (rotation) {
case 90:
builder.setRotation(Frame.ROTATION_90);
break;
case 180:
builder.setRotation(Frame.ROTATION_180);
break;
case 270:
builder.setRotation(Frame.ROTATION_270);
break;
default:
builder.setRotation(Frame.ROTATION_0);
}
ImageDimensions dimensions = new ImageDimensions(width, height, rotation);
return new RNFrame(builder.build(), dimensions);
}
public static RNFrame buildFrame(Bitmap bitmap) {
Frame.Builder builder = new Frame.Builder();
builder.setBitmap(bitmap);
ImageDimensions dimensions = new ImageDimensions(bitmap.getWidth(), bitmap.getHeight());
return new RNFrame(builder.build(), dimensions);
}
}

View File

@ -0,0 +1,23 @@
<?xml version="1.0" encoding="utf-8"?>
<!--
Copyright (C) 2016 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<merge xmlns:android="http://schemas.android.com/apk/res/android">
<TextureView
android:id="@+id/texture_view"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:gravity="center"
/>
</merge>

View File

@ -0,0 +1,23 @@
<?xml version="1.0" encoding="utf-8"?>
<!--
Copyright (C) 2016 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<merge xmlns:android="http://schemas.android.com/apk/res/android">
<SurfaceView
android:id="@+id/surface_view"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:gravity="center"
/>
</merge>

View File

@ -0,0 +1,58 @@
<?xml version="1.0" encoding="utf-8"?>
<!--
Copyright (C) 2016 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<resources>
<declare-styleable name="CameraView">
<!--
Set this to true if you want the CameraView to adjust its bounds to preserve the aspect
ratio of its camera preview.
-->
<attr name="android:adjustViewBounds"/>
<!-- Direction the camera faces relative to device screen. -->
<attr name="facing" format="enum">
<!-- The camera device faces the opposite direction as the device's screen. -->
<enum name="back" value="0"/>
<!-- The camera device faces the same direction as the device's screen. -->
<enum name="front" value="1"/>
</attr>
<!-- Aspect ratio of camera preview and pictures. -->
<attr name="aspectRatio" format="string"/>
<!-- Continuous auto focus mode. -->
<attr name="autoFocus" format="boolean"/>
<!-- The flash mode. -->
<attr name="flash" format="enum">
<!-- Flash will not be fired. -->
<enum name="off" value="0"/>
<!--
Flash will always be fired during snapshot.
The flash may also be fired during preview or auto-focus depending on the driver.
-->
<enum name="on" value="1"/>
<!--
Constant emission of light during preview, auto-focus and snapshot.
This can also be used for video recording.
-->
<enum name="torch" value="2"/>
<!--
Flash will be fired automatically when required.
The flash may be fired during preview, auto-focus, or snapshot depending on the
driver.
-->
<enum name="auto" value="3"/>
<!--
Flash will be fired in red-eye reduction mode.
-->
<enum name="redEye" value="4"/>
</attr>
</declare-styleable>
</resources>

View File

@ -0,0 +1,21 @@
<?xml version="1.0" encoding="utf-8"?>
<!--
Copyright (C) 2016 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<resources>
<public name="facing" type="attr"/>
<public name="aspectRatio" type="attr"/>
<public name="autoFocus" type="attr"/>
<public name="flash" type="attr"/>
<public name="Widget.CameraView" type="style"/>
</resources>

View File

@ -0,0 +1,24 @@
<?xml version="1.0" encoding="utf-8"?>
<!--
Copyright (C) 2016 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<resources>
<style name="Widget.CameraView" parent="android:Widget">
<item name="android:adjustViewBounds">false</item>
<item name="facing">back</item>
<item name="aspectRatio">4:3</item>
<item name="autoFocus">true</item>
<item name="flash">auto</item>
</style>
</resources>

View File

@ -0,0 +1,95 @@
package org.reactnative.barcodedetector;
import android.util.SparseArray;
import com.google.firebase.ml.vision.barcode.FirebaseVisionBarcode;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
public class BarcodeFormatUtils {
public static final SparseArray<String> FORMATS;
public static final Map<String, Integer> REVERSE_FORMATS;
public static final SparseArray<String> TYPES;
public static final Map<String, Integer> REVERSE_TYPES;
private static final int UNKNOWN_FORMAT_INT = FirebaseVisionBarcode.FORMAT_UNKNOWN;
private static final String UNKNOWN_TYPE_STRING = "UNKNOWN_TYPE";
private static final String UNKNOWN_FORMAT_STRING = "UNKNOWN_FORMAT";
static {
// Initialize integer to string map
SparseArray<String> map = new SparseArray<>();
map.put(FirebaseVisionBarcode.FORMAT_CODE_128, "CODE_128");
map.put(FirebaseVisionBarcode.FORMAT_CODE_39, "CODE_39");
map.put(FirebaseVisionBarcode.FORMAT_CODE_93, "CODE_93");
map.put(FirebaseVisionBarcode.FORMAT_CODABAR, "CODABAR");
map.put(FirebaseVisionBarcode.FORMAT_DATA_MATRIX, "DATA_MATRIX");
map.put(FirebaseVisionBarcode.FORMAT_EAN_13, "EAN_13");
map.put(FirebaseVisionBarcode.FORMAT_EAN_8, "EAN_8");
map.put(FirebaseVisionBarcode.FORMAT_ITF, "ITF");
map.put(FirebaseVisionBarcode.FORMAT_QR_CODE, "QR_CODE");
map.put(FirebaseVisionBarcode.FORMAT_UPC_A, "UPC_A");
map.put(FirebaseVisionBarcode.FORMAT_UPC_E, "UPC_E");
map.put(FirebaseVisionBarcode.FORMAT_PDF417, "PDF417");
map.put(FirebaseVisionBarcode.FORMAT_AZTEC, "AZTEC");
map.put(FirebaseVisionBarcode.FORMAT_ALL_FORMATS, "ALL");
map.put(FirebaseVisionBarcode.FORMAT_UPC_A, "UPC_A");
map.put(-1, "None");
FORMATS = map;
// Initialize string to integer map
Map<String, Integer> rmap = new HashMap<>();
for (int i = 0; i < map.size(); i++) {
rmap.put(map.valueAt(i), map.keyAt(i));
}
REVERSE_FORMATS = Collections.unmodifiableMap(rmap);
}
static {
// Initialize integer to string map
SparseArray<String> map = new SparseArray<>();
map.put(FirebaseVisionBarcode.TYPE_CALENDAR_EVENT, "CALENDAR_EVENT");
map.put(FirebaseVisionBarcode.TYPE_CONTACT_INFO, "CONTACT_INFO");
map.put(FirebaseVisionBarcode.TYPE_DRIVER_LICENSE, "DRIVER_LICENSE");
map.put(FirebaseVisionBarcode.TYPE_EMAIL, "EMAIL");
map.put(FirebaseVisionBarcode.TYPE_GEO, "GEO");
map.put(FirebaseVisionBarcode.TYPE_ISBN, "ISBN");
map.put(FirebaseVisionBarcode.TYPE_PHONE, "PHONE");
map.put(FirebaseVisionBarcode.TYPE_PRODUCT, "PRODUCT");
map.put(FirebaseVisionBarcode.TYPE_SMS, "SMS");
map.put(FirebaseVisionBarcode.TYPE_TEXT, "TEXT");
map.put(FirebaseVisionBarcode.TYPE_URL, "URL");
map.put(FirebaseVisionBarcode.TYPE_WIFI, "WIFI");
map.put(-1, "None");
TYPES = map;
// Initialize string to integer map
Map<String, Integer> rmap = new HashMap<>();
for (int i = 0; i < map.size(); i++) {
rmap.put(map.valueAt(i), map.keyAt(i));
}
REVERSE_TYPES = Collections.unmodifiableMap(rmap);
}
public static String get(int format) {
return TYPES.get(format, UNKNOWN_TYPE_STRING);
}
public static String getFormat(int format) {
return FORMATS.get(format, UNKNOWN_FORMAT_STRING);
}
public static int get(String format) {
if (REVERSE_FORMATS.containsKey(format)) {
return REVERSE_FORMATS.get(format);
}
return UNKNOWN_FORMAT_INT;
}
}

View File

@ -0,0 +1,67 @@
package org.reactnative.barcodedetector;
import android.content.Context;
import android.util.Log;
import com.google.firebase.ml.vision.FirebaseVision;
import com.google.firebase.ml.vision.barcode.FirebaseVisionBarcode;
import com.google.firebase.ml.vision.barcode.FirebaseVisionBarcodeDetector;
import com.google.firebase.ml.vision.barcode.FirebaseVisionBarcodeDetectorOptions;
public class RNBarcodeDetector {
public static int NORMAL_MODE = 0;
public static int ALTERNATE_MODE = 1;
public static int INVERTED_MODE = 2;
public static int ALL_FORMATS = FirebaseVisionBarcode.FORMAT_ALL_FORMATS;
private FirebaseVisionBarcodeDetector mBarcodeDetector = null;
private FirebaseVisionBarcodeDetectorOptions.Builder mBuilder;
private int mBarcodeType = FirebaseVisionBarcode.FORMAT_ALL_FORMATS;
public RNBarcodeDetector(Context context) {
mBuilder = new FirebaseVisionBarcodeDetectorOptions.Builder().setBarcodeFormats(mBarcodeType);
}
public boolean isOperational() {
// Legacy api from GMV
return true;
}
public FirebaseVisionBarcodeDetector getDetector() {
if (mBarcodeDetector == null) {
createBarcodeDetector();
}
return mBarcodeDetector;
}
public void setBarcodeType(int barcodeType) {
if (barcodeType != mBarcodeType) {
release();
mBuilder.setBarcodeFormats(barcodeType);
mBarcodeType = barcodeType;
}
}
public void release() {
if (mBarcodeDetector != null) {
try {
mBarcodeDetector.close();
} catch (Exception e) {
Log.e("RNCamera", "Attempt to close BarcodeDetector failed");
}
mBarcodeDetector = null;
}
}
private void createBarcodeDetector() {
FirebaseVisionBarcodeDetectorOptions options = mBuilder.build();
mBarcodeDetector = FirebaseVision.getInstance()
.getVisionBarcodeDetector(options);
}
}

View File

@ -0,0 +1,354 @@
package org.reactnative.camera.tasks;
//import android.graphics.Point;
import android.graphics.Rect;
import android.util.Log;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.WritableArray;
import com.facebook.react.bridge.WritableMap;
import com.google.android.gms.tasks.OnFailureListener;
import com.google.android.gms.tasks.OnSuccessListener;
import com.google.firebase.ml.vision.barcode.FirebaseVisionBarcode;
import com.google.firebase.ml.vision.barcode.FirebaseVisionBarcodeDetector;
import com.google.firebase.ml.vision.common.FirebaseVisionImage;
import com.google.firebase.ml.vision.common.FirebaseVisionImageMetadata;
import org.reactnative.barcodedetector.BarcodeFormatUtils;
import org.reactnative.barcodedetector.RNBarcodeDetector;
import org.reactnative.camera.utils.ImageDimensions;
import java.util.List;
public class BarcodeDetectorAsyncTask extends android.os.AsyncTask<Void, Void, Void> {
private byte[] mImageData;
private int mWidth;
private int mHeight;
private int mRotation;
private RNBarcodeDetector mBarcodeDetector;
private BarcodeDetectorAsyncTaskDelegate mDelegate;
private double mScaleX;
private double mScaleY;
private ImageDimensions mImageDimensions;
private int mPaddingLeft;
private int mPaddingTop;
private String TAG = "RNCamera";
public BarcodeDetectorAsyncTask(
BarcodeDetectorAsyncTaskDelegate delegate,
RNBarcodeDetector barcodeDetector,
byte[] imageData,
int width,
int height,
int rotation,
float density,
int facing,
int viewWidth,
int viewHeight,
int viewPaddingLeft,
int viewPaddingTop
) {
mImageData = imageData;
mWidth = width;
mHeight = height;
mRotation = rotation;
mDelegate = delegate;
mBarcodeDetector = barcodeDetector;
mImageDimensions = new ImageDimensions(width, height, rotation, facing);
mScaleX = (double) (viewWidth) / (mImageDimensions.getWidth() * density);
mScaleY = 1 / density;
mPaddingLeft = viewPaddingLeft;
mPaddingTop = viewPaddingTop;
}
@Override
protected Void doInBackground(Void... ignored) {
if (isCancelled() || mDelegate == null || mBarcodeDetector == null) {
return null;
}
final FirebaseVisionImageMetadata metadata = new FirebaseVisionImageMetadata.Builder()
.setWidth(mWidth)
.setHeight(mHeight)
.setFormat(FirebaseVisionImageMetadata.IMAGE_FORMAT_YV12)
.setRotation(getFirebaseRotation())
.build();
FirebaseVisionImage image = FirebaseVisionImage.fromByteArray(mImageData, metadata);
FirebaseVisionBarcodeDetector barcode = mBarcodeDetector.getDetector();
barcode.detectInImage(image)
.addOnSuccessListener(new OnSuccessListener<List<FirebaseVisionBarcode>>() {
@Override
public void onSuccess(List<FirebaseVisionBarcode> barcodes) {
WritableArray serializedBarcodes = serializeEventData(barcodes);
mDelegate.onBarcodesDetected(serializedBarcodes);
mDelegate.onBarcodeDetectingTaskCompleted();
}
})
.addOnFailureListener(new OnFailureListener() {
@Override
public void onFailure(Exception e) {
Log.e(TAG, "Text recognition task failed" + e);
mDelegate.onBarcodeDetectingTaskCompleted();
}
});
return null;
}
private int getFirebaseRotation(){
int result;
switch (mRotation) {
case 0:
result = FirebaseVisionImageMetadata.ROTATION_0;
break;
case 90:
result = FirebaseVisionImageMetadata.ROTATION_90;
break;
case 180:
result = FirebaseVisionImageMetadata.ROTATION_180;
break;
case -90:
result = FirebaseVisionImageMetadata.ROTATION_270;
break;
default:
result = FirebaseVisionImageMetadata.ROTATION_0;
Log.e(TAG, "Bad rotation value: " + mRotation);
}
return result;
}
private WritableArray serializeEventData(List<FirebaseVisionBarcode> barcodes) {
WritableArray barcodesList = Arguments.createArray();
for (FirebaseVisionBarcode barcode: barcodes) {
// TODO implement position and data from all barcode types
Rect bounds = barcode.getBoundingBox();
// Point[] corners = barcode.getCornerPoints();
String rawValue = barcode.getRawValue();
int valueType = barcode.getValueType();
int valueFormat = barcode.getFormat();
WritableMap serializedBarcode = Arguments.createMap();
switch (valueType) {
case FirebaseVisionBarcode.TYPE_WIFI:
String ssid = barcode.getWifi().getSsid();
String password = barcode.getWifi().getPassword();
int type = barcode.getWifi().getEncryptionType();
String typeString = "UNKNOWN";
switch (type) {
case FirebaseVisionBarcode.WiFi.TYPE_OPEN:
typeString = "Open";
break;
case FirebaseVisionBarcode.WiFi.TYPE_WEP:
typeString = "WEP";
break;
case FirebaseVisionBarcode.WiFi.TYPE_WPA:
typeString = "WPA";
break;
}
serializedBarcode.putString("encryptionType", typeString);
serializedBarcode.putString("password", password);
serializedBarcode.putString("ssid", ssid);
break;
case FirebaseVisionBarcode.TYPE_URL:
String title = barcode.getUrl().getTitle();
String url = barcode.getUrl().getUrl();
serializedBarcode.putString("url", url);
serializedBarcode.putString("title", title);
break;
case FirebaseVisionBarcode.TYPE_SMS:
String message = barcode.getSms().getMessage();
String phoneNumber = barcode.getSms().getPhoneNumber();
serializedBarcode.putString("message", message);
serializedBarcode.putString("title", phoneNumber);
break;
case FirebaseVisionBarcode.TYPE_PHONE:
String number = barcode.getPhone().getNumber();
int typePhone = barcode.getPhone().getType();
serializedBarcode.putString("number", number);
String typeStringPhone = getPhoneType(typePhone);
serializedBarcode.putString("phoneType", typeStringPhone);
break;
case FirebaseVisionBarcode.TYPE_CALENDAR_EVENT:
serializedBarcode.putString("description", barcode.getCalendarEvent().getDescription());
serializedBarcode.putString("location", barcode.getCalendarEvent().getLocation());
serializedBarcode.putString("organizer", barcode.getCalendarEvent().getOrganizer());
serializedBarcode.putString("status", barcode.getCalendarEvent().getStatus());
serializedBarcode.putString("summary", barcode.getCalendarEvent().getSummary());
FirebaseVisionBarcode.CalendarDateTime start = barcode.getCalendarEvent().getStart();
FirebaseVisionBarcode.CalendarDateTime end = barcode.getCalendarEvent().getEnd();
if (start != null) {
serializedBarcode.putString("start", start.getRawValue());
}
if (end != null) {
serializedBarcode.putString("end", start.getRawValue());
}
break;
case FirebaseVisionBarcode.TYPE_DRIVER_LICENSE:
serializedBarcode.putString("addressCity", barcode.getDriverLicense().getAddressCity());
serializedBarcode.putString("addressState", barcode.getDriverLicense().getAddressState());
serializedBarcode.putString("addressStreet", barcode.getDriverLicense().getAddressStreet());
serializedBarcode.putString("addressZip", barcode.getDriverLicense().getAddressZip());
serializedBarcode.putString("birthDate", barcode.getDriverLicense().getBirthDate());
serializedBarcode.putString("documentType", barcode.getDriverLicense().getDocumentType());
serializedBarcode.putString("expiryDate", barcode.getDriverLicense().getExpiryDate());
serializedBarcode.putString("firstName", barcode.getDriverLicense().getFirstName());
serializedBarcode.putString("middleName", barcode.getDriverLicense().getMiddleName());
serializedBarcode.putString("lastName", barcode.getDriverLicense().getLastName());
serializedBarcode.putString("gender", barcode.getDriverLicense().getGender());
serializedBarcode.putString("issueDate", barcode.getDriverLicense().getIssueDate());
serializedBarcode.putString("issuingCountry", barcode.getDriverLicense().getIssuingCountry());
serializedBarcode.putString("licenseNumber", barcode.getDriverLicense().getLicenseNumber());
break;
case FirebaseVisionBarcode.TYPE_GEO:
serializedBarcode.putDouble("latitude", barcode.getGeoPoint().getLat());
serializedBarcode.putDouble("longitude", barcode.getGeoPoint().getLng());
break;
case FirebaseVisionBarcode.TYPE_CONTACT_INFO:
serializedBarcode.putString("organization", barcode.getContactInfo().getOrganization());
serializedBarcode.putString("title", barcode.getContactInfo().getTitle());
FirebaseVisionBarcode.PersonName name = barcode.getContactInfo().getName();
if (name != null) {
serializedBarcode.putString("firstName", name.getFirst());
serializedBarcode.putString("lastName", name.getLast());
serializedBarcode.putString("middleName", name.getMiddle());
serializedBarcode.putString("formattedName", name.getFormattedName());
serializedBarcode.putString("prefix", name.getPrefix());
serializedBarcode.putString("pronunciation", name.getPronunciation());
serializedBarcode.putString("suffix", name.getSuffix());
}
List<FirebaseVisionBarcode.Phone> phones = barcode.getContactInfo().getPhones();
WritableArray phonesList = Arguments.createArray();
for (FirebaseVisionBarcode.Phone phone : phones) {
WritableMap phoneObject = Arguments.createMap();
phoneObject.putString("number", phone.getNumber());
phoneObject.putString("phoneType", getPhoneType(phone.getType()));
phonesList.pushMap(phoneObject);
}
serializedBarcode.putArray("phones", phonesList);
List<FirebaseVisionBarcode.Address> addresses = barcode.getContactInfo().getAddresses();
WritableArray addressesList = Arguments.createArray();
for (FirebaseVisionBarcode.Address address : addresses) {
WritableMap addressesData = Arguments.createMap();
WritableArray addressesLinesList = Arguments.createArray();
String[] addressesLines = address.getAddressLines();
for (String line : addressesLines) {
addressesLinesList.pushString(line);
}
addressesData.putArray("addressLines", addressesLinesList);
int addressType = address.getType();
String addressTypeString = "UNKNOWN";
switch(addressType) {
case FirebaseVisionBarcode.Address.TYPE_WORK:
addressTypeString = "Work";
break;
case FirebaseVisionBarcode.Address.TYPE_HOME:
addressTypeString = "Home";
break;
}
addressesData.putString("addressType", addressTypeString);
addressesList.pushMap(addressesData);
}
serializedBarcode.putArray("addresses", addressesList);
List<FirebaseVisionBarcode.Email> emails = barcode.getContactInfo().getEmails();
WritableArray emailsList = Arguments.createArray();
for (FirebaseVisionBarcode.Email email : emails) {
WritableMap emailData = processEmail(email);
emailsList.pushMap(emailData);
}
serializedBarcode.putArray("emails", emailsList);
String[] urls = barcode.getContactInfo().getUrls();
WritableArray urlsList = Arguments.createArray();
for (String urlContact : urls) {
urlsList.pushString(urlContact);
}
serializedBarcode.putArray("urls", urlsList);
break;
case FirebaseVisionBarcode.TYPE_EMAIL:
WritableMap emailData = processEmail(barcode.getEmail());
serializedBarcode.putMap("email", emailData);
break;
}
serializedBarcode.putString("data", barcode.getDisplayValue());
serializedBarcode.putString("dataRaw", rawValue);
serializedBarcode.putString("type", BarcodeFormatUtils.get(valueType));
serializedBarcode.putString("format", BarcodeFormatUtils.getFormat(valueFormat));
serializedBarcode.putMap("bounds", processBounds(bounds));
barcodesList.pushMap(serializedBarcode);
}
return barcodesList;
}
private WritableMap processEmail(FirebaseVisionBarcode.Email email) {
WritableMap emailData = Arguments.createMap();
emailData.putString("address", email.getAddress());
emailData.putString("body", email.getBody());
emailData.putString("subject", email.getSubject());
int emailType = email.getType();
String emailTypeString = "UNKNOWN";
switch (emailType) {
case FirebaseVisionBarcode.Email.TYPE_WORK:
emailTypeString = "Work";
break;
case FirebaseVisionBarcode.Email.TYPE_HOME:
emailTypeString = "Home";
break;
}
emailData.putString("emailType", emailTypeString);
return emailData;
}
private String getPhoneType(int typePhone) {
String typeStringPhone = "UNKNOWN";
switch(typePhone) {
case FirebaseVisionBarcode.Phone.TYPE_WORK:
typeStringPhone = "Work";
break;
case FirebaseVisionBarcode.Phone.TYPE_HOME:
typeStringPhone = "Home";
break;
case FirebaseVisionBarcode.Phone.TYPE_FAX:
typeStringPhone = "Fax";
break;
case FirebaseVisionBarcode.Phone.TYPE_MOBILE:
typeStringPhone = "Mobile";
break;
}
return typeStringPhone;
}
private WritableMap processBounds(Rect frame) {
WritableMap origin = Arguments.createMap();
int x = frame.left;
int y = frame.top;
if (frame.left < mWidth / 2) {
x = x + mPaddingLeft / 2;
} else if (frame.left > mWidth /2) {
x = x - mPaddingLeft / 2;
}
y = y + mPaddingTop;
origin.putDouble("x", x * mScaleX);
origin.putDouble("y", y * mScaleY);
WritableMap size = Arguments.createMap();
size.putDouble("width", frame.width() * mScaleX);
size.putDouble("height", frame.height() * mScaleY);
WritableMap bounds = Arguments.createMap();
bounds.putMap("origin", origin);
bounds.putMap("size", size);
return bounds;
}
}

View File

@ -0,0 +1,139 @@
package org.reactnative.camera.tasks;
import android.util.Log;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.WritableArray;
import com.facebook.react.bridge.WritableMap;
import com.google.android.cameraview.CameraView;
import com.google.android.gms.tasks.OnFailureListener;
import com.google.android.gms.tasks.OnSuccessListener;
import com.google.firebase.ml.vision.common.FirebaseVisionImage;
import com.google.firebase.ml.vision.common.FirebaseVisionImageMetadata;
import com.google.firebase.ml.vision.face.FirebaseVisionFace;
import com.google.firebase.ml.vision.face.FirebaseVisionFaceDetector;
import org.reactnative.camera.utils.ImageDimensions;
import org.reactnative.facedetector.FaceDetectorUtils;
import org.reactnative.facedetector.RNFaceDetector;
import java.util.List;
public class FaceDetectorAsyncTask extends android.os.AsyncTask<Void, Void, Void> {
private byte[] mImageData;
private int mWidth;
private int mHeight;
private int mRotation;
private RNFaceDetector mFaceDetector;
private FaceDetectorAsyncTaskDelegate mDelegate;
private double mScaleX;
private double mScaleY;
private ImageDimensions mImageDimensions;
private int mPaddingLeft;
private int mPaddingTop;
private String TAG = "RNCamera";
public FaceDetectorAsyncTask(
FaceDetectorAsyncTaskDelegate delegate,
RNFaceDetector faceDetector,
byte[] imageData,
int width,
int height,
int rotation,
float density,
int facing,
int viewWidth,
int viewHeight,
int viewPaddingLeft,
int viewPaddingTop
) {
mImageData = imageData;
mWidth = width;
mHeight = height;
mRotation = rotation;
mDelegate = delegate;
mFaceDetector = faceDetector;
mImageDimensions = new ImageDimensions(width, height, rotation, facing);
mScaleX = (double) (viewWidth) / (mImageDimensions.getWidth() * density);
mScaleY = (double) (viewHeight) / (mImageDimensions.getHeight() * density);
mPaddingLeft = viewPaddingLeft;
mPaddingTop = viewPaddingTop;
}
@Override
protected Void doInBackground(Void... ignored) {
if (isCancelled() || mDelegate == null || mFaceDetector == null) {
return null;
}
FirebaseVisionImageMetadata metadata = new FirebaseVisionImageMetadata.Builder()
.setWidth(mWidth)
.setHeight(mHeight)
.setFormat(FirebaseVisionImageMetadata.IMAGE_FORMAT_YV12)
.setRotation(getFirebaseRotation())
.build();
FirebaseVisionImage image = FirebaseVisionImage.fromByteArray(mImageData, metadata);
FirebaseVisionFaceDetector detector = mFaceDetector.getDetector();
detector.detectInImage(image)
.addOnSuccessListener(
new OnSuccessListener<List<FirebaseVisionFace>>() {
@Override
public void onSuccess(List<FirebaseVisionFace> faces) {
WritableArray facesList = serializeEventData(faces);
mDelegate.onFacesDetected(facesList);
mDelegate.onFaceDetectingTaskCompleted();
}
})
.addOnFailureListener(
new OnFailureListener() {
@Override
public void onFailure(Exception e) {
Log.e(TAG, "Text recognition task failed" + e);
mDelegate.onFaceDetectingTaskCompleted();
}
});
return null;
}
private int getFirebaseRotation(){
int result;
switch (mRotation) {
case 0:
result = FirebaseVisionImageMetadata.ROTATION_0;
break;
case 90:
result = FirebaseVisionImageMetadata.ROTATION_90;
break;
case 180:
result = FirebaseVisionImageMetadata.ROTATION_180;
break;
case 270:
result = FirebaseVisionImageMetadata.ROTATION_270;
break;
case -90:
result = FirebaseVisionImageMetadata.ROTATION_270;
break;
default:
result = FirebaseVisionImageMetadata.ROTATION_0;
Log.e(TAG, "Bad rotation value: " + mRotation);
}
return result;
}
private WritableArray serializeEventData(List<FirebaseVisionFace> faces) {
WritableArray facesList = Arguments.createArray();
for (FirebaseVisionFace face : faces) {
WritableMap serializedFace = FaceDetectorUtils.serializeFace(face, mScaleX, mScaleY, mWidth, mHeight, mPaddingLeft, mPaddingTop);
if (mImageDimensions.getFacing() == CameraView.FACING_FRONT) {
serializedFace = FaceDetectorUtils.rotateFaceX(serializedFace, mImageDimensions.getWidth(), mScaleX);
} else {
serializedFace = FaceDetectorUtils.changeAnglesDirection(serializedFace);
}
facesList.pushMap(serializedFace);
}
return facesList;
}
}

View File

@ -0,0 +1,267 @@
package org.reactnative.camera.tasks;
import android.graphics.Rect;
import android.util.Log;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.ReadableArray;
import com.facebook.react.bridge.ReadableMap;
import com.facebook.react.bridge.WritableArray;
import com.facebook.react.bridge.WritableMap;
import com.facebook.react.uimanager.ThemedReactContext;
import com.google.android.cameraview.CameraView;
import com.google.android.gms.tasks.OnFailureListener;
import com.google.android.gms.tasks.OnSuccessListener;
import com.google.firebase.ml.vision.FirebaseVision;
import com.google.firebase.ml.vision.common.FirebaseVisionImage;
import com.google.firebase.ml.vision.common.FirebaseVisionImageMetadata;
import com.google.firebase.ml.vision.text.FirebaseVisionText;
import com.google.firebase.ml.vision.text.FirebaseVisionTextRecognizer;
import org.reactnative.camera.utils.ImageDimensions;
import java.util.List;
public class TextRecognizerAsyncTask extends android.os.AsyncTask<Void, Void, Void> {
private TextRecognizerAsyncTaskDelegate mDelegate;
private ThemedReactContext mThemedReactContext;
private byte[] mImageData;
private int mWidth;
private int mHeight;
private int mRotation;
private double mScaleX;
private double mScaleY;
private ImageDimensions mImageDimensions;
private int mPaddingLeft;
private int mPaddingTop;
private String TAG = "RNCamera";
public TextRecognizerAsyncTask(
TextRecognizerAsyncTaskDelegate delegate,
ThemedReactContext themedReactContext,
byte[] imageData,
int width,
int height,
int rotation,
float density,
int facing,
int viewWidth,
int viewHeight,
int viewPaddingLeft,
int viewPaddingTop
) {
mDelegate = delegate;
mImageData = imageData;
mWidth = width;
mHeight = height;
mRotation = rotation;
mImageDimensions = new ImageDimensions(width, height, rotation, facing);
mScaleX = (double) (viewWidth) / (mImageDimensions.getWidth() * density);
mScaleY = (double) (viewHeight) / (mImageDimensions.getHeight() * density);
mPaddingLeft = viewPaddingLeft;
mPaddingTop = viewPaddingTop;
}
@Override
protected Void doInBackground(Void... ignored) {
if (isCancelled() || mDelegate == null) {
return null;
}
FirebaseVisionImageMetadata metadata = new FirebaseVisionImageMetadata.Builder()
.setWidth(mWidth)
.setHeight(mHeight)
.setFormat(FirebaseVisionImageMetadata.IMAGE_FORMAT_YV12)
.setRotation(getFirebaseRotation())
.build();
FirebaseVisionTextRecognizer detector = FirebaseVision.getInstance().getOnDeviceTextRecognizer();
FirebaseVisionImage image = FirebaseVisionImage.fromByteArray(mImageData, metadata);
detector.processImage(image)
.addOnSuccessListener(new OnSuccessListener<FirebaseVisionText>() {
@Override
public void onSuccess(FirebaseVisionText firebaseVisionText) {
List<FirebaseVisionText.TextBlock> textBlocks = firebaseVisionText.getTextBlocks();
WritableArray serializedData = serializeEventData(textBlocks);
mDelegate.onTextRecognized(serializedData);
mDelegate.onTextRecognizerTaskCompleted();
}
})
.addOnFailureListener(
new OnFailureListener() {
@Override
public void onFailure(Exception e) {
Log.e(TAG, "Text recognition task failed" + e);
mDelegate.onTextRecognizerTaskCompleted();
}
});
return null;
}
private int getFirebaseRotation(){
int result;
switch (mRotation) {
case 0:
result = FirebaseVisionImageMetadata.ROTATION_0;
break;
case 90:
result = FirebaseVisionImageMetadata.ROTATION_90;
break;
case 180:
result = FirebaseVisionImageMetadata.ROTATION_180;
break;
case -90:
result = FirebaseVisionImageMetadata.ROTATION_270;
break;
default:
result = FirebaseVisionImageMetadata.ROTATION_0;
Log.e(TAG, "Bad rotation value: " + mRotation);
}
return result;
}
private WritableArray serializeEventData(List<FirebaseVisionText.TextBlock> textBlocks) {
WritableArray textBlocksList = Arguments.createArray();
for (FirebaseVisionText.TextBlock block: textBlocks) {
WritableMap serializedTextBlock = serializeBloc(block);
if (mImageDimensions.getFacing() == CameraView.FACING_FRONT) {
serializedTextBlock = rotateTextX(serializedTextBlock);
}
textBlocksList.pushMap(serializedTextBlock);
}
return textBlocksList;
}
private WritableMap serializeBloc(FirebaseVisionText.TextBlock block) {
WritableMap encodedText = Arguments.createMap();
WritableArray lines = Arguments.createArray();
for (FirebaseVisionText.Line line : block.getLines()) {
lines.pushMap(serializeLine(line));
}
encodedText.putArray("components", lines);
encodedText.putString("value", block.getText());
WritableMap bounds = processBounds(block.getBoundingBox());
encodedText.putMap("bounds", bounds);
encodedText.putString("type", "block");
return encodedText;
}
private WritableMap serializeLine(FirebaseVisionText.Line line) {
WritableMap encodedText = Arguments.createMap();
WritableArray lines = Arguments.createArray();
for (FirebaseVisionText.Element element : line.getElements()) {
lines.pushMap(serializeElement(element));
}
encodedText.putArray("components", lines);
encodedText.putString("value", line.getText());
WritableMap bounds = processBounds(line.getBoundingBox());
encodedText.putMap("bounds", bounds);
encodedText.putString("type", "line");
return encodedText;
}
private WritableMap serializeElement(FirebaseVisionText.Element element) {
WritableMap encodedText = Arguments.createMap();
encodedText.putString("value", element.getText());
WritableMap bounds = processBounds(element.getBoundingBox());
encodedText.putMap("bounds", bounds);
encodedText.putString("type", "element");
return encodedText;
}
private WritableMap processBounds(Rect frame) {
WritableMap origin = Arguments.createMap();
int x = frame.left;
int y = frame.top;
if (frame.left < mWidth / 2) {
x = x + mPaddingLeft / 2;
} else if (frame.left > mWidth /2) {
x = x - mPaddingLeft / 2;
}
if (frame.top < mHeight / 2) {
y = y + mPaddingTop / 2;
} else if (frame.top > mHeight / 2) {
y = y - mPaddingTop / 2;
}
origin.putDouble("x", x * mScaleX);
origin.putDouble("y", y * mScaleY);
WritableMap size = Arguments.createMap();
size.putDouble("width", frame.width() * mScaleX);
size.putDouble("height", frame.height() * mScaleY);
WritableMap bounds = Arguments.createMap();
bounds.putMap("origin", origin);
bounds.putMap("size", size);
return bounds;
}
private WritableMap rotateTextX(WritableMap text) {
ReadableMap faceBounds = text.getMap("bounds");
ReadableMap oldOrigin = faceBounds.getMap("origin");
WritableMap mirroredOrigin = positionMirroredHorizontally(
oldOrigin, mImageDimensions.getWidth(), mScaleX);
double translateX = -faceBounds.getMap("size").getDouble("width");
WritableMap translatedMirroredOrigin = positionTranslatedHorizontally(mirroredOrigin, translateX);
WritableMap newBounds = Arguments.createMap();
newBounds.merge(faceBounds);
newBounds.putMap("origin", translatedMirroredOrigin);
text.putMap("bounds", newBounds);
ReadableArray oldComponents = text.getArray("components");
WritableArray newComponents = Arguments.createArray();
for (int i = 0; i < oldComponents.size(); ++i) {
WritableMap component = Arguments.createMap();
component.merge(oldComponents.getMap(i));
rotateTextX(component);
newComponents.pushMap(component);
}
text.putArray("components", newComponents);
return text;
}
public static WritableMap positionTranslatedHorizontally(ReadableMap position, double translateX) {
WritableMap newPosition = Arguments.createMap();
newPosition.merge(position);
newPosition.putDouble("x", position.getDouble("x") + translateX);
return newPosition;
}
public static WritableMap positionMirroredHorizontally(ReadableMap position, int containerWidth, double scaleX) {
WritableMap newPosition = Arguments.createMap();
newPosition.merge(position);
newPosition.putDouble("x", valueMirroredHorizontally(position.getDouble("x"), containerWidth, scaleX));
return newPosition;
}
public static double valueMirroredHorizontally(double elementX, int containerWidth, double scaleX) {
double originalX = elementX / scaleX;
double mirroredX = containerWidth - originalX;
return mirroredX * scaleX;
}
}

Some files were not shown because too many files have changed in this diff Show More