I'm using a NativeScript core OCR with cameraplus plugin and ml-kit from firebase. I have this code for the view:
<Page navigatingTo="onNavigatingTo" xmlns:Cam="@nstudio/nativescript-camera-plus">
<StackLayout>
<Cam:CameraPlus
id="camPlus"
height="70%"
width="70%"
showCaptureIcon="false"
showGalleryIcon="false"
showToggleIcon="false"
showFlashIcon="false"
confirmPhotos="false"
debug="true">
</Cam:CameraPlus>
<Button text="test" tap="onCapture" />
</StackLayout>
</Page>
and this for the js :
const HomeViewModel = require("./home-view-model");
const firebase = require("nativescript-plugin-firebase");
const imageSourceModule = require("tns-core-modules/image-source");
const CameraPlus = require("@nstudio/nativescript-camera-plus");
exports.onNavigatingTo = function (args) {
page = args.object;
mv = page.bindingContext = new HomeViewModel();
};
exports.onCapture = function() {
camera = page.getViewById("camPlus");
camera.takePicture({ saveToGallery: false })
.then(function (imageAsset) {
const source = new imageSourceModule.ImageSource();
source.fromAsset(imageAsset).
then((imageSource) => {
getTextFromPhoto(imageSource);
});
}).catch(function (err) {
console.log("Error -> " + err.message);
});
};
I'm having an error:
System.err: TypeError: Cannot read property 'then' of undefined
when I keep only the method:
camera.takePicture({ saveToGallery: false })
it works so there must be a js problem. The idea is to connect that photo with ml-kit (I have to use cameraplus plugin because It needs to be integrated instead of launching the camera app like in camera (basic) plugin
Please refer the documentation carefully, the return value of takePicture
method is void
not a Promise
. You must listen to the photoCapturedEvent
on CameraPlus
component.
Update:
If you are using NativeScript Core, you will have to add the listener programatically. Doing it from XML may not work.