ngx-scanner-face
This library is built to provide a solution analysis of human face recognition.
This library analyzes each face and provides statistics for Angular web applications easily.
This Github.
Installation
Install ngx-scanner-face
from npm
:
npm install ngx-scanner-face@<version> --save
Add wanted package to NgModule imports:
import { NgxScannerFaceModule } from 'ngx-scanner-face';
@NgModule({
imports: [
NgxScannerFaceModule
]
})
Camera :hammer_and_wrench:
<ngx-scanner-face #scanner="scanner" [isAuto]="true"></ngx-scanner-face>
<span>{{scanner.data.value | json}}</span>
<span>{{scanner.data | async | json}}</span>
Image src :hammer_and_wrench:
<ngx-scanner-face #scanner="scanner" [src]="'http://domain.com/picture.png'"></ngx-scanner-face>
<span>{{scanner.data.value | json}}</span>
<span>{{scanner.data | async | json}}</span>
Select files :hammer_and_wrench:
import { NgxScannerFaceService, ScannerFaceConfig, ScannerFaceResult } from 'ngx-scanner-face';
public scannerConfig: ScannerFaceConfig = {};
public scannerResult: ScannerFaceResult[] = [];
constructor(private face: NgxScannerFaceService) {}
public onSelects(files: any) {
this.face.loadFiles(files, this.scannerConfig).subscribe((res: ScannerFaceResult[]) => {
this.scannerResult = res;
});
}
<input #file type="file" (change)="onSelects(file.files)" [multiple]="'multiple'" [accept]="'.jpg, .png'"/>
<div *ngFor="let row of scannerResult">
<img [src]="row.url" [alt]="row.name">
</div>
API Documentation
Input :old_key:
Field | Description | Type | Default |
---|
[src] | image url | string | - |
[isAuto] | auto camera | boolean | false |
[fps] | frames/ms | number | 30 |
[timeoutDetect] | detect faces/ms | number | 1000 |
[style] | style for canvas and video | Object | null |
[videoStyle] | style for video | Object | null |
[canvasStyle] | style for canvas | Object | null |
[constraints] | setting video | MediaStreamConstraints | { audio: false, video: true } |
[env] | env config | | { perfadd: false } |
[draw] | draw config | | { font: 'monospace', lineHeight: 20 } |
[human] | user configuration for human, used to fine-tune behavior | | {...} |
[config] | Config all | BaseConfig | { src:..., isAuto:..., isLoading:..., fps:..., env:..., draw:..., human:..., style:..., constraints:... } |
Output :old_key:
Field | Description | Type | Default |
---|
(event) | Data | Result | - |
Global variable in component :old_key:
Field | Description | Type | Default |
---|
isStart | start status | boolean | false |
isPause | pause status | boolean | false |
isLoading | loading status | boolean | false |
isTorch | flashlight status | boolean | false |
data | data | BehaviorSubject | {} |
devices | devices | BehaviorSubject<Device[]> | [] |
deviceIndexActive | device index | number | 0 |
Global event in component :old_key:
Field | Description | Type | Default |
---|
(start) | start camera | AsyncSubject | - |
(stop) | stop camera | AsyncSubject | - |
(play) | play video | AsyncSubject | - |
(pause) | pause video | AsyncSubject | - |
(torcher) | toggle on/off flashlight | AsyncSubject | - |
(loadImage) | load image from src | AsyncSubject | - |
(download) | download image form video | AsyncSubject | - |
Global event in service :old_key:
Field | Description | Type | Default |
---|
(toBase64) | used for file selection | AsyncSubject | - |
(loadFiles) | used for file selection | AsyncSubject | - |
(streamToBase64) | used for with camera scanner | AsyncSubject | - |
(streamToBlobURL) | used for with camera scanner | AsyncSubject | - |
Models :magnet:
ScannerFaceConfig
interface ScannerFaceConfig {
src?: string;
isAuto?: boolean;
isLoading?: boolean;
fps?: number;
timeoutDetect?: number;
env?: Partial<Env>;
draw?: Partial<DrawOptions>;
human?: Partial<Config>;
constraints?: MediaStreamConstraints;
}
ScannerFaceResult
interface ScannerFaceResult {
file?: File;
name?: string;
url?: string;
blob?: any;
base64?: string;
result?: Result;
}
Device
interface Device {
deviceId: string;
kind: string;
label: string;
groupId: string;
}
Env
class Env {
browser: boolean;
node: boolean;
worker: boolean;
platform: string;
agent: string;
backends: string[];
initial: boolean;
filter: boolean | undefined;
tfjs: {
version: undefined | string;
};
offscreen: undefined | boolean;
perfadd: boolean;
tensorflow: {
version: undefined | string;
gpu: undefined | boolean;
};
wasm: {
supported: undefined | boolean;
backend: undefined | boolean;
simd: undefined | boolean;
multithread: undefined | boolean;
};
webgl: {
supported: undefined | boolean;
backend: undefined | boolean;
version: undefined | string;
renderer: undefined | string;
};
webgpu: {
supported: undefined | boolean;
backend: undefined | boolean;
adapter: undefined | string;
};
cpu: {
model: undefined | string;
flags: string[];
};
kernels: string[];
Canvas: undefined;
Image: undefined;
ImageData: undefined;
constructor();
updateBackend(): Promise<void>;
updateCPU(): void;
}
Config
interface Config {
backend: '' | 'cpu' | 'wasm' | 'webgl' | 'humangl' | 'tensorflow' | 'webgpu';
wasmPath: string;
wasmPlatformFetch: boolean;
debug: boolean;
async: boolean;
warmup: '' | 'none' | 'face' | 'full' | 'body';
modelBasePath: string;
cacheModels: boolean;
validateModels: boolean;
cacheSensitivity: number;
flags: Record<string, unknown>;
softwareKernels: boolean;
deallocate: boolean;
skipAllowed: boolean;
filter: Partial<FilterConfig>;
gesture: Partial<GestureConfig>;
face: Partial<FaceConfig>;
body: Partial<BodyConfig>;
hand: Partial<HandConfig>;
object: Partial<ObjectConfig>;
segmentation: Partial<SegmentationConfig>;
}
DrawOptions
interface DrawOptions {
color: string;
alpha: number;
labelColor: string;
shadowColor: string;
font: string;
lineHeight: number;
lineWidth: number;
pointSize: number;
roundRect: number;
drawPoints: boolean;
drawLabels: boolean;
drawAttention: boolean;
drawGestures: boolean;
drawBoxes: boolean;
drawPolygons: boolean;
drawGaze: boolean;
fillPolygons: boolean;
useDepth: boolean;
useCurves: boolean;
}
Result
interface Result {
face: FaceResult[];
body: BodyResult[];
hand: HandResult[];
gesture: GestureResult[];
object: ObjectResult[];
performance: Record<string, number>;
canvas?: AnyCanvas | null;
readonly timestamp: number;
persons: PersonResult[];
error: string | null;
canvas2: HTMLCanvasElement;
base64: string;
blobUrl?: string;
}
Models in Config :magnet:
FilterConfig
interface FilterConfig {
enabled: boolean;
equalization: boolean;
width: number;
height: number;
return: boolean;
flip: boolean;
brightness: number;
contrast: number;
sharpness: number;
blur: number;
saturation: number;
hue: number;
negative: boolean;
sepia: boolean;
vintage: boolean;
kodachrome: boolean;
technicolor: boolean;
polaroid: boolean;
pixelate: number;
}
GestureConfig
interface GestureConfig {
enabled: boolean;
}
FaceConfig
interface FaceConfig extends GestureConfig {
detector: Partial<FaceDetectorConfig>;
mesh: Partial<FaceMeshConfig>;
attention: Partial<FaceAttentionConfig>;
iris: Partial<FaceIrisConfig>;
description: Partial<FaceDescriptionConfig>;
emotion: Partial<FaceEmotionConfig>;
antispoof: Partial<FaceAntiSpoofConfig>;
liveness: Partial<FaceLivenessConfig>;
gear: Partial<FaceGearConfig>;
}
BodyConfig
interface BodyConfig extends GenericConfig {
maxDetected: number;
minConfidence: number;
}
HandConfig
interface HandConfig extends GenericConfig {
rotation: boolean;
minConfidence: number;
iouThreshold: number;
maxDetected: number;
landmarks: boolean;
detector: {
modelPath?: string;
};
skeleton: {
modelPath?: string;
};
}
ObjectConfig
interface ObjectConfig extends GenericConfig {
minConfidence: number;
iouThreshold: number;
maxDetected: number;
}
SegmentationConfig
interface SegmentationConfig extends GenericConfig {
blur: number;
}
Models in Result :magnet:
FaceResult
interface FaceResult {
id: number;
score: number;
boxScore: number;
faceScore: number;
box: Box;
boxRaw: Box;
mesh: Point[];
meshRaw: Point[];
annotations: Record<FaceLandmark, Point[]>;
age?: number;
gender?: Gender;
genderScore?: number;
emotion?: {
score: number;
emotion: Emotion;
}[];
race?: {
score: number;
race: Race;
}[];
embedding?: number[];
iris?: number;
real?: number;
live?: number;
rotation?: {
angle: {
roll: number;
yaw: number;
pitch: number;
};
matrix: [number, number, number, number, number, number, number, number, number];
gaze: {
bearing: number;
strength: number;
};
} | null;
tensor?: Tensor;
}
BodyResult
interface BodyResult {
id: number;
score: number;
box: Box;
boxRaw: Box;
keypoints: BodyKeypoint[];
annotations: Record<BodyAnnotation, Point[][]>;
}
HandResult
interface HandResult {
id: number;
score: number;
boxScore: number;
fingerScore: number;
box: Box;
boxRaw: Box;
keypoints: Point[];
label: HandType;
annotations: Record<Finger, Point[]>;
landmarks: Record<Finger, {
curl: FingerCurl;
direction: FingerDirection;
}>;
}
GestureResult
type GestureResult = {
'face': number;
gesture: FaceGesture;
} | {
'iris': number;
gesture: IrisGesture;
} | {
'body': number;
gesture: BodyGesture;
} | {
'hand': number;
gesture: HandGesture;
};
ObjectResult
interface ObjectResult {
id: number;
score: number;
class: number;
label: ObjectType;
box: Box;
boxRaw: Box;
}
PersonResult
interface PersonResult {
id: number;
face: FaceResult;
body: BodyResult | null;
hands: {
left: HandResult | null;
right: HandResult | null;
};
gestures: GestureResult[];
box: Box;
boxRaw?: Box;
}
Models in FaceConfig :magnet:
FaceDetectorConfig
interface FaceDetectorConfig extends GenericConfig {
rotation: boolean;
maxDetected: number;
minConfidence: number;
iouThreshold: number;
mask: boolean;
return: boolean;
}
FaceMeshConfig
interface FaceMeshConfig extends GenericConfig {
keepInvalid: boolean;
}
FaceDescriptionConfig
interface FaceDescriptionConfig extends GenericConfig {
minConfidence: number;
}
FaceEmotionConfig
interface FaceEmotionConfig extends GenericConfig {
minConfidence: number;
}
FaceGearConfig
interface FaceGearConfig extends GenericConfig {
minConfidence: number;
}
Support versions
Support versions |
---|
Angular 16 | 1.2.3 |
Angular 12 | 1.2.2 |
Author Information
Author Information |
---|
Author | DaiDH |
Phone | +84845882882 |
Country | Vietnam |
To make this library more complete, please donate to me if you can!
MIT License. Copyright (c) 2021 DaiDH