Socket
Socket
Sign inDemoInstall

expo-face-detector

Package Overview
Dependencies
5
Maintainers
28
Versions
88
Alerts
File Explorer

Advanced tools

Install Socket

Detect and block malicious and high-risk dependencies

Install

Comparing version 10.1.1 to 11.0.0

2

build/ExpoFaceDetector.d.ts

@@ -1,2 +0,2 @@

declare const _default: import("@unimodules/core").ProxyNativeModule;
declare const _default: import("expo-modules-core").ProxyNativeModule;
export default _default;

@@ -1,3 +0,3 @@

import { NativeModulesProxy } from '@unimodules/core';
import { NativeModulesProxy } from 'expo-modules-core';
export default NativeModulesProxy.ExpoFaceDetector;
//# sourceMappingURL=ExpoFaceDetector.js.map

@@ -1,3 +0,2 @@

import ExpoFaceDetector from './ExpoFaceDetector';
declare type Point = {
export declare type Point = {
x: number;

@@ -7,51 +6,178 @@ y: number;

export declare type FaceFeature = {
bounds: {
size: {
width: number;
height: number;
};
origin: Point;
};
/**
* An object containing face bounds.
*/
bounds: FaceFeatureBounds;
/**
* Probability that the face is smiling. Returned only if detection classifications property is
* set to `FaceDetectorClassifications.all`.
*/
smilingProbability?: number;
/**
* Position of the left ear in image coordinates. Returned only if detection classifications
* property is set to `FaceDetectorLandmarks.all`.
*/
leftEarPosition?: Point;
/**
* Position of the right ear in image coordinates. Returned only if detection classifications
* property is set to `FaceDetectorLandmarks.all`.
*/
rightEarPosition?: Point;
/**
* Position of the left eye in image coordinates. Returned only if detection classifications
* property is set to `FaceDetectorLandmarks.all`.
*/
leftEyePosition?: Point;
/**
* Probability that the left eye is open. Returned only if detection classifications property is
* set to `FaceDetectorClassifications.all`.
*/
leftEyeOpenProbability?: number;
/**
* Position of the right eye in image coordinates. Returned only if detection classifications
* property is set to `FaceDetectorLandmarks.all`.
*/
rightEyePosition?: Point;
/**
* Probability that the right eye is open. Returned only if detection classifications property is
* set to `FaceDetectorClassifications.all`.
*/
rightEyeOpenProbability?: number;
/**
* Position of the left cheek in image coordinates. Returned only if detection classifications
* property is set to `FaceDetectorLandmarks.all`.
*/
leftCheekPosition?: Point;
/**
* Position of the right cheek in image coordinates. Returned only if detection classifications
* property is set to `FaceDetectorLandmarks.all`.
*/
rightCheekPosition?: Point;
/**
* Position of the left edge of the mouth in image coordinates. Returned only if detection
* classifications property is set to `FaceDetectorLandmarks.all`.
*/
leftMouthPosition?: Point;
/**
* Position of the center of the mouth in image coordinates. Returned only if detection
* classifications property is set to `FaceDetectorLandmarks.all`.
*/
mouthPosition?: Point;
/**
* Position of the right edge of the mouth in image coordinates. Returned only if detection
* classifications property is set to `FaceDetectorLandmarks.all`.
*/
rightMouthPosition?: Point;
/**
* Position of the bottom edge of the mouth in image coordinates. Returned only if detection
* classifications property is set to `FaceDetectorLandmarks.all`.
*/
bottomMouthPosition?: Point;
/**
* Position of the nose base in image coordinates. Returned only if detection classifications
* property is set to `FaceDetectorLandmarks.all`.
*/
noseBasePosition?: Point;
/**
* Yaw angle of the face (heading, turning head left or right).
*/
yawAngle?: number;
/**
* Roll angle of the face (bank).
*/
rollAngle?: number;
/**
* A face identifier (used for tracking, if the same face appears on consecutive frames it will
* have the same `faceID`).
*/
faceID?: number;
};
declare type ValuesOf<T extends any[]> = T[number];
export declare type FaceDetectorMode = string[];
export declare type FaceDetectorLandmarks = ValuesOf<typeof ExpoFaceDetector.Landmarks>;
export declare type FaceDetectorClassifications = ValuesOf<typeof ExpoFaceDetector.Classifications>;
export interface Image {
export declare type FaceFeatureBounds = {
/**
* Size of the square containing the face in image coordinates,
*/
size: {
width: number;
height: number;
};
/**
* Position of the top left corner of a square containing the face in image coordinates,
*/
origin: Point;
};
export declare enum FaceDetectorMode {
fast = 1,
accurate = 2
}
export declare enum FaceDetectorLandmarks {
all = 1,
none = 2
}
export declare enum FaceDetectorClassifications {
all = 1,
none = 2
}
export declare type Image = {
/**
* URI of the image.
*/
uri: string;
/**
* Width of the image in pixels.
*/
width: number;
/**
* Height of the image in pixels.
*/
height: number;
/**
* Orientation of the image (value conforms to the EXIF orientation tag standard).
*/
orientation: number;
}
};
/**
* In order to configure detector's behavior modules pass a settings object which is then
* interpreted by this module.
*/
export declare type DetectionOptions = {
/**
* Whether to detect faces in fast or accurate mode. Use `FaceDetector.FaceDetectorMode.{fast, accurate}`.
*/
mode?: FaceDetectorMode;
/**
* Whether to detect and return landmarks positions on the face (ears, eyes, mouth, cheeks, nose).
* Use `FaceDetector.FaceDetectorLandmarks.{all, none}`.
*/
detectLandmarks?: FaceDetectorLandmarks;
/**
* Whether to run additional classifications on detected faces (smiling probability, open eye
* probabilities). Use `FaceDetector.FaceDetectorClassifications.{all, none}`.
*/
runClassifications?: FaceDetectorClassifications;
/**
* Minimal interval in milliseconds between two face detection events being submitted to JS.
* Use, when you expect lots of faces for long time and are afraid of JS Bridge being overloaded.
* @default `0`
*/
minDetectionInterval?: number;
/**
* Flag to enable tracking of faces between frames. If true, each face will be returned with
* `faceID` attribute which should be consistent across frames.
* @default `false`
*/
tracking?: boolean;
};
export declare function detectFacesAsync(uri: string, options?: DetectionOptions): Promise<{
export declare type DetectionResult = {
/**
* Array of faces objects.
*/
faces: FaceFeature[];
image: Image;
}>;
export declare const Constants: {
Mode: any;
Landmarks: any;
Classifications: any;
};
export {};
/**
* Detect faces on a picture.
* @param uri `file://` URI to the image.
* @param options A map of detection options.
* @return Returns a Promise which fulfils with [`DetectionResult`](#detectionresult) object.
*/
export declare function detectFacesAsync(uri: string, options?: DetectionOptions): Promise<DetectionResult>;

@@ -1,3 +0,28 @@

import { UnavailabilityError } from '@unimodules/core';
import { UnavailabilityError } from 'expo-modules-core';
import ExpoFaceDetector from './ExpoFaceDetector';
// @docsMissing
export var FaceDetectorMode;
(function (FaceDetectorMode) {
FaceDetectorMode[FaceDetectorMode["fast"] = 1] = "fast";
FaceDetectorMode[FaceDetectorMode["accurate"] = 2] = "accurate";
})(FaceDetectorMode || (FaceDetectorMode = {}));
// @docsMissing
export var FaceDetectorLandmarks;
(function (FaceDetectorLandmarks) {
FaceDetectorLandmarks[FaceDetectorLandmarks["all"] = 1] = "all";
FaceDetectorLandmarks[FaceDetectorLandmarks["none"] = 2] = "none";
})(FaceDetectorLandmarks || (FaceDetectorLandmarks = {}));
// @docsMissing
export var FaceDetectorClassifications;
(function (FaceDetectorClassifications) {
FaceDetectorClassifications[FaceDetectorClassifications["all"] = 1] = "all";
FaceDetectorClassifications[FaceDetectorClassifications["none"] = 2] = "none";
})(FaceDetectorClassifications || (FaceDetectorClassifications = {}));
// @needsAudit
/**
* Detect faces on a picture.
* @param uri `file://` URI to the image.
* @param options A map of detection options.
* @return Returns a Promise which fulfils with [`DetectionResult`](#detectionresult) object.
*/
export async function detectFacesAsync(uri, options = {}) {

@@ -9,7 +34,2 @@ if (!ExpoFaceDetector.detectFaces) {

}
export const Constants = {
Mode: ExpoFaceDetector.Mode,
Landmarks: ExpoFaceDetector.Landmarks,
Classifications: ExpoFaceDetector.Classifications,
};
//# sourceMappingURL=FaceDetector.js.map

@@ -13,6 +13,19 @@ # Changelog

## 10.1.1 — 2021-06-22
## 11.0.0 — 2021-09-28
_This version does not introduce any user-facing changes._
### 🛠 Breaking changes
- Replace exported `FaceDetector.Constants.*` with String Enums (`FaceDetector.FaceDetectorMode`, `FaceDetector.FaceDetectorLandmarks` and `FaceDetector.FaceDetectorClassifications`). ([#14179](https://github.com/expo/expo/pull/14179) by [@Simek](https://github.com/Simek))
- Dropped support for iOS 11.0 ([#14383](https://github.com/expo/expo/pull/14383) by [@cruzach](https://github.com/cruzach))
### 🐛 Bug fixes
- Fix building errors from use_frameworks! in Podfile. ([#14523](https://github.com/expo/expo/pull/14523) by [@kudo](https://github.com/kudo))
### 💡 Others
- Migrated from `@unimodules/core` to `expo-modules-core`. ([#13757](https://github.com/expo/expo/pull/13757) by [@tsapeta](https://github.com/tsapeta))
- Extract `detectFacesAsync` options as separate type named `DetectionOptions`. ([#14179](https://github.com/expo/expo/pull/14179) by [@Simek](https://github.com/Simek))
- Add missing `minDetectionInterval` and `tracking` parameters to the `DetectionOptions` type. ([#14179](https://github.com/expo/expo/pull/14179) by [@Simek](https://github.com/Simek))
## 10.1.0 — 2021-06-16

@@ -19,0 +32,0 @@

{
"name": "expo-face-detector",
"version": "10.1.1",
"version": "11.0.0",
"description": "Lets you use the power of Google Mobile Vision (https://developers.google.com/vision/face-detection-concepts) framework to detect faces on images.",

@@ -34,11 +34,8 @@ "main": "build/FaceDetector.js",

"license": "MIT",
"homepage": "https://docs.expo.io/versions/latest/sdk/facedetector/",
"homepage": "https://docs.expo.dev/versions/latest/sdk/facedetector/",
"jest": {
"preset": "expo-module-scripts/ios"
},
"unimodulePeerDependencies": {
"@unimodules/core": "*"
},
"dependencies": {
"expo-modules-core": "~0.2.0"
"expo-modules-core": "~0.4.0"
},

@@ -48,3 +45,3 @@ "devDependencies": {

},
"gitHead": "6e8cfadff90f106d6321d0dd8c4158f12a973d30"
"gitHead": "1fffde73411ee7a642b98f1506a8de921805d52b"
}

@@ -1,3 +0,3 @@

import { NativeModulesProxy } from '@unimodules/core';
import { NativeModulesProxy } from 'expo-modules-core';
export default NativeModulesProxy.ExpoFaceDetector;

@@ -1,59 +0,207 @@

import { UnavailabilityError } from '@unimodules/core';
import { UnavailabilityError } from 'expo-modules-core';
import ExpoFaceDetector from './ExpoFaceDetector';
type Point = { x: number; y: number };
// @docsMissing
export type Point = { x: number; y: number };
// @needsAudit
export type FaceFeature = {
bounds: {
size: {
width: number;
height: number;
};
origin: Point;
};
/**
* An object containing face bounds.
*/
bounds: FaceFeatureBounds;
/**
* Probability that the face is smiling. Returned only if detection classifications property is
* set to `FaceDetectorClassifications.all`.
*/
smilingProbability?: number;
/**
* Position of the left ear in image coordinates. Returned only if detection classifications
* property is set to `FaceDetectorLandmarks.all`.
*/
leftEarPosition?: Point;
/**
* Position of the right ear in image coordinates. Returned only if detection classifications
* property is set to `FaceDetectorLandmarks.all`.
*/
rightEarPosition?: Point;
/**
* Position of the left eye in image coordinates. Returned only if detection classifications
* property is set to `FaceDetectorLandmarks.all`.
*/
leftEyePosition?: Point;
/**
* Probability that the left eye is open. Returned only if detection classifications property is
* set to `FaceDetectorClassifications.all`.
*/
leftEyeOpenProbability?: number;
/**
* Position of the right eye in image coordinates. Returned only if detection classifications
* property is set to `FaceDetectorLandmarks.all`.
*/
rightEyePosition?: Point;
/**
* Probability that the right eye is open. Returned only if detection classifications property is
* set to `FaceDetectorClassifications.all`.
*/
rightEyeOpenProbability?: number;
/**
* Position of the left cheek in image coordinates. Returned only if detection classifications
* property is set to `FaceDetectorLandmarks.all`.
*/
leftCheekPosition?: Point;
/**
* Position of the right cheek in image coordinates. Returned only if detection classifications
* property is set to `FaceDetectorLandmarks.all`.
*/
rightCheekPosition?: Point;
/**
* Position of the left edge of the mouth in image coordinates. Returned only if detection
* classifications property is set to `FaceDetectorLandmarks.all`.
*/
leftMouthPosition?: Point;
/**
* Position of the center of the mouth in image coordinates. Returned only if detection
* classifications property is set to `FaceDetectorLandmarks.all`.
*/
mouthPosition?: Point;
/**
* Position of the right edge of the mouth in image coordinates. Returned only if detection
* classifications property is set to `FaceDetectorLandmarks.all`.
*/
rightMouthPosition?: Point;
/**
* Position of the bottom edge of the mouth in image coordinates. Returned only if detection
* classifications property is set to `FaceDetectorLandmarks.all`.
*/
bottomMouthPosition?: Point;
/**
* Position of the nose base in image coordinates. Returned only if detection classifications
* property is set to `FaceDetectorLandmarks.all`.
*/
noseBasePosition?: Point;
/**
* Yaw angle of the face (heading, turning head left or right).
*/
yawAngle?: number;
/**
* Roll angle of the face (bank).
*/
rollAngle?: number;
/**
* A face identifier (used for tracking, if the same face appears on consecutive frames it will
* have the same `faceID`).
*/
faceID?: number;
};
type ValuesOf<T extends any[]> = T[number];
// @needsAudit
export type FaceFeatureBounds = {
/**
* Size of the square containing the face in image coordinates,
*/
size: {
width: number;
height: number;
};
/**
* Position of the top left corner of a square containing the face in image coordinates,
*/
origin: Point;
};
export type FaceDetectorMode = string[];
// @docsMissing
export enum FaceDetectorMode {
fast = 1,
accurate = 2,
}
export type FaceDetectorLandmarks = ValuesOf<typeof ExpoFaceDetector.Landmarks>;
// @docsMissing
export enum FaceDetectorLandmarks {
all = 1,
none = 2,
}
export type FaceDetectorClassifications = ValuesOf<typeof ExpoFaceDetector.Classifications>;
// @docsMissing
export enum FaceDetectorClassifications {
all = 1,
none = 2,
}
export interface Image {
// @needsAudit
export type Image = {
/**
* URI of the image.
*/
uri: string;
/**
* Width of the image in pixels.
*/
width: number;
/**
* Height of the image in pixels.
*/
height: number;
/**
* Orientation of the image (value conforms to the EXIF orientation tag standard).
*/
orientation: number;
}
};
// @needsAudit
/**
* In order to configure detector's behavior modules pass a settings object which is then
* interpreted by this module.
*/
export type DetectionOptions = {
/**
* Whether to detect faces in fast or accurate mode. Use `FaceDetector.FaceDetectorMode.{fast, accurate}`.
*/
mode?: FaceDetectorMode;
/**
* Whether to detect and return landmarks positions on the face (ears, eyes, mouth, cheeks, nose).
* Use `FaceDetector.FaceDetectorLandmarks.{all, none}`.
*/
detectLandmarks?: FaceDetectorLandmarks;
/**
* Whether to run additional classifications on detected faces (smiling probability, open eye
* probabilities). Use `FaceDetector.FaceDetectorClassifications.{all, none}`.
*/
runClassifications?: FaceDetectorClassifications;
/**
* Minimal interval in milliseconds between two face detection events being submitted to JS.
* Use, when you expect lots of faces for long time and are afraid of JS Bridge being overloaded.
* @default `0`
*/
minDetectionInterval?: number;
/**
* Flag to enable tracking of faces between frames. If true, each face will be returned with
* `faceID` attribute which should be consistent across frames.
* @default `false`
*/
tracking?: boolean;
};
// @needsAudit
export type DetectionResult = {
/**
* Array of faces objects.
*/
faces: FaceFeature[];
// @docsMissing
image: Image;
};
// @needsAudit
/**
* Detect faces on a picture.
* @param uri `file://` URI to the image.
* @param options A map of detection options.
* @return Returns a Promise which fulfils with [`DetectionResult`](#detectionresult) object.
*/
export async function detectFacesAsync(
uri: string,
options: DetectionOptions = {}
): Promise<{ faces: FaceFeature[]; image: Image }> {
): Promise<DetectionResult> {
if (!ExpoFaceDetector.detectFaces) {

@@ -64,7 +212,1 @@ throw new UnavailabilityError('expo-face-detector', 'detectFaces');

}
export const Constants = {
Mode: ExpoFaceDetector.Mode,
Landmarks: ExpoFaceDetector.Landmarks,
Classifications: ExpoFaceDetector.Classifications,
};

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc