Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@yoonit/nativescript-camera

Package Overview
Dependencies
Maintainers
10
Versions
26
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@yoonit/nativescript-camera - npm Package Compare versions

Comparing version 2.0.0 to 2.1.0

8

helpers/Validator.js

@@ -54,5 +54,10 @@ import "reflect-metadata";

if (errorMessage !== null) {
console.error("ValidateProps", errorMessage);
throw new Error(errorMessage);
}
if (Validator.PropMap !== null) {
Validator.PropMap.push({
name: propertyName,
value: arguments[parameterIndex]
});
}
}

@@ -107,3 +112,4 @@ }

Validator.RegexPX = /[+-]?([0-9]+([.][0-9]*)?|[.][0-9]+)+(px)$/ig;
Validator.PropMap = [];
export default Validator;
//# sourceMappingURL=Validator.js.map

6

package.json
{
"name": "@yoonit/nativescript-camera",
"version": "2.0.0",
"version": "2.1.0",
"description": "Yoonit Camera have a custom view that shows a preview layer of the front/back camera and detects human faces in it and read qr code.",

@@ -24,5 +24,5 @@ "main": "Yoonit.Camera",

"ci:tslint": "npm i && tslint '**/*.ts' --config '../tslint.json' --exclude '**/node_modules/**' --exclude '**/platforms/**'",
"demo:ios": "npm i && cd ../demo-vue && tns run ios",
"demo:ios": "cd ../demo-vue && tns run ios",
"demo:android": "cd ../demo-vue && tns debug android --debug-brk",
"demo:reset": "cd ../demo-vue && npx rimraf -- hooks node_modules platforms",
"demo:reset": "cd ../demo-vue && npx rimraf -- hooks node_modules platforms && cd ../src && npm i",
"plugin:prepare": "npm run build && cd ../demo-vue && tns plugin add ../src && tns plugin remove nativescript-yoonit-camera && npm i && tns plugin add ../src",

@@ -29,0 +29,0 @@ "clean": "npm run demo:reset && npx rimraf -- node_modules && npm i"

@@ -162,5 +162,5 @@ [<img src="https://raw.githubusercontent.com/Yoonit-Labs/nativescript-yoonit-camera/development/logo_cyberlabs.png" width="300">](https://cyberlabs.ai/)

| stopCapture | - | - | void | Stop any type of capture.
| setLens | `lens: string` | `"front" | "back"` | void | Set the camera lens facing: front or back.
| toggleLens | - | - | void | Toggle camera lens facing front or back.
| getLens | - | - | number | Return `number` that represents lens face state: 0 for front 1 for back camera.
| toggleLens | - | - | void | Toggle camera lens facing front/back.
| setCameraLens | `lens: string` | `"front"` or `"back"` | void | Set camera to use "front" or "back" lens. Default value is "front".
| getLens | - | - | string | Return "front" or "back".
| setNumberOfImages | `numberOfImages: Int` | Any positive `Int` value | void | Default value is 0. For value 0 is saved infinity images. When saved images reached the "number os images", the `onEndCapture` is triggered.

@@ -186,3 +186,3 @@ | setTimeBetweenImages | `milliseconds: number` | Any positive number that represent time in milli seconds | void | Set saving face/frame images time interval in milli seconds.

| - | - | -
| imageCaptured | `{ type: string, count: number, total: number, image: object = { path: string, source: blob } }` | Must have started capture type of face/frame. Emitted when the face image file saved: <ul><li>type: "face" | "frame"</li>li>count: current index</li><li>total: total to create</li><li>image.path: the face/frame image path</li><li>image.source: the blob file</li><ul>
| imageCaptured | `{ type: string, count: number, total: number, image: object = { path: string, source: blob } }` | Must have started capture type of face/frame. Emitted when the face image file saved: <ul><li>type: "face" or "frame"</li>li>count: current index</li><li>total: total to create</li><li>image.path: the face/frame image path</li><li>image.source: the blob file</li><ul>
| faceDetected | `{ x: number, y: number, width: number, height: number }` | Must have started capture type of face. Emit the detected face bounding box. Emit all parameters null if no more face detecting.

@@ -189,0 +189,0 @@ | endCapture | - | Must have started capture type of face/frame. Emitted when the number of image files created is equal of the number of images set (see the method `setNumberOfImages`).

@@ -17,4 +17,3 @@ // +-+-+-+-+-+-+

'YoonitCamera',
() => require('./Yoonit.Camera')
.YoonitCamera
() => YoonitCamera
)

@@ -54,4 +53,4 @@

stopCapture,
setLens,
toggleLens,
setCameraLens,
getLens,

@@ -84,4 +83,4 @@ startCapture,

stopCapture,
setLens,
toggleLens,
setCameraLens,
getLens,

@@ -88,0 +87,0 @@ startCapture,

import { CameraBase } from './Yoonit.Camera.common';
import * as permissions from 'nativescript-permissions';
import { ImageSource, File } from '@nativescript/core';
import Validator from "./helpers/Validator";
const CAMERA = () => android.Manifest.permission.CAMERA;

@@ -14,2 +15,8 @@ export class YoonitCamera extends CameraBase {

super.initNativeView();
Validator.PropMap.forEach((prop) => {
if (this.nativeView[prop.name]) {
this.nativeView[prop.name](prop.value);
}
});
Validator.PropMap = null;
}

@@ -16,0 +23,0 @@ disposeNativeView() {

@@ -5,24 +5,55 @@ import { ContentView, } from '@nativescript/core';

export class CameraBase extends ContentView {
set initialLens(value) { this.setLens(value); }
set captureType(value) { this.startCapture(value); }
set numberOfImages(value) { this.setNumberOfImages(value); }
set timeBetweenImages(value) { this.setTimeBetweenImages(value); }
set outputImageWidth(value) { this.setOutputImageWidth(value); }
set outputImageHeight(value) { this.setOutputImageHeight(value); }
set faceMinSize(value) { this.setFaceCaptureMinSize(value); }
set faceMaxSize(value) { this.setFaceCaptureMaxSize(value); }
set faceDetectionBox(value) { this.setFaceDetectionBox(value); }
set saveImageCaptured(value) { this.setSaveImageCaptured(value); }
set faceROI(value) { this.setFaceROIEnable(value); }
set faceROITopOffset(value) { this.setFaceROITopOffset(value); }
set faceROIRightOffset(value) { this.setFaceROIRightOffset(value); }
set faceROIBottomOffset(value) { this.setFaceROIBottomOffset(value); }
set faceROILeftOffset(value) { this.setFaceROILeftOffset(value); }
set faceROIMinSize(value) { this.setFaceROIMinSize(value); }
set lens(value) {
this.setCameraLens(value);
}
set captureType(value) {
this.startCapture(value);
}
set numberOfImages(value) {
this.setNumberOfImages(value);
}
set timeBetweenImages(value) {
this.setTimeBetweenImages(value);
}
set outputImageWidth(value) {
this.setOutputImageWidth(value);
}
set outputImageHeight(value) {
this.setOutputImageHeight(value);
}
set faceMinSize(value) {
this.setFaceCaptureMinSize(value);
}
set faceMaxSize(value) {
this.setFaceCaptureMaxSize(value);
}
set faceDetectionBox(value) {
this.setFaceDetectionBox(value);
}
set saveImageCaptured(value) {
this.setSaveImageCaptured(value);
}
set faceROI(value) {
this.setFaceROIEnable(value);
}
set faceROITopOffset(value) {
this.setFaceROITopOffset(value);
}
set faceROIRightOffset(value) {
this.setFaceROIRightOffset(value);
}
set faceROIBottomOffset(value) {
this.setFaceROIBottomOffset(value);
}
set faceROILeftOffset(value) {
this.setFaceROILeftOffset(value);
}
set faceROIMinSize(value) {
this.setFaceROIMinSize(value);
}
requestPermission(explanationText) {
return new Promise((resolve, reject) => resolve());
}
hasPermission() { return false; }
setLens(lens) {
this.getLens() !== lens && this.toggleLens();
hasPermission() {
return false;
}

@@ -38,6 +69,12 @@ preview() {

}
setCameraLens(lens) {
this.nativeView.setCameraLens(lens);
}
getLens() {
return this.nativeView.getCameraLens() === 0 ? 'front' : 'back';
return this.nativeView.getCameraLens();
}
startCapture(type) {
this.startCaptureType(type);
}
startCaptureType(type) {
this.nativeView.startCaptureType(type);

@@ -97,3 +134,3 @@ }

__metadata("design:returntype", void 0)
], CameraBase.prototype, "setLens", null);
], CameraBase.prototype, "setCameraLens", null);
__decorate([

@@ -105,3 +142,3 @@ ValidateProps('captureType', ['face', 'qrcode', 'frame', 'none']),

__metadata("design:returntype", void 0)
], CameraBase.prototype, "startCapture", null);
], CameraBase.prototype, "startCaptureType", null);
__decorate([

@@ -108,0 +145,0 @@ ValidateProps('numberOfImages', RegexNumber),

import { CameraBase } from './Yoonit.Camera.common';
import { ImageSource, knownFolders, path, File } from '@nativescript/core';
import Validator from "./helpers/Validator";
export class YoonitCamera extends CameraBase {

@@ -16,2 +17,8 @@ constructor() {

super.initNativeView();
Validator.PropMap.forEach((prop) => {
if (this.nativeView[prop.name]) {
this.nativeView[prop.name](prop.value);
}
});
Validator.PropMap = null;
}

@@ -18,0 +25,0 @@ disposeNativeView() {

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc