Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

vue-camera-gestures

Package Overview
Dependencies
Maintainers
1
Versions
16
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

vue-camera-gestures - npm Package Compare versions

Comparing version 0.2.4 to 0.3.0

45

dist/vue-camera-gestures-plain.esm.js

@@ -281,3 +281,4 @@ import { load } from '@tensorflow-models/mobilenet';

var res = await this.knn.predictClass(logits, TOPK);
var gestureIndex = this.gestureIndexFromClassIndex(res.classIndex);
var classIndex = parseInt(res.label);
var gestureIndex = this.gestureIndexFromClassIndex(classIndex);
var neutralDetected = gestureIndex === -2;

@@ -312,2 +313,4 @@ var gesture = neutralDetected

updateState: function updateState () {
var this$1 = this;
// Model provided - skip everything and just use the given model

@@ -336,3 +339,3 @@ if (this.model) {

// Go back to start
this.$emit('verificationFailed', this.getModelJson());
this.getModelJson().then(function (x) { return this$1.$emit('verificationFailed', x); });
this.reset();

@@ -366,3 +369,3 @@ return

if (this.state === 'training' && this.doVerification) {
this.$emit('doneTraining', this.getModelJson());
this.getModelJson().then(function (x) { return this$1.$emit('doneTraining', x); });
this.state = 'testing';

@@ -374,3 +377,3 @@ this.currentGestureIndex = !this.trainNeutralLast ? -2 : 0;

// verification completed successfully!
this.$emit('doneVerification', this.getModelJson());
this.getModelJson().then(function (x) { return this$1.$emit('doneVerification', x); });
}

@@ -459,20 +462,24 @@ this.state = 'predicting';

},
getModelJson: function getModelJson () {
// With thanks to https://github.com/tensorflow/tfjs/issues/633#issuecomment-456308218
getModelJson: async function getModelJson () {
// With thanks to https://github.com/tensorflow/tfjs/issues/633#issuecomment-660642769
var dataset = this.knn.getClassifierDataset();
var datasetObj = {};
Object.keys(dataset).forEach(function (key) {
var data = dataset[key].dataSync();
datasetObj[key] = Array.from(data);
});
return JSON.stringify(datasetObj)
var data = [];
for (var label in dataset) {
data.push({
label: label,
values: Array.from(await dataset[label].data()),
shape: dataset[label].shape
});
}
return JSON.stringify(data)
},
loadModelFromJson: function loadModelFromJson (json) {
// With thanks to https://github.com/tensorflow/tfjs/issues/633#issuecomment-456308218
var tensorObj = JSON.parse(json);
// With thanks to https://github.com/tensorflow/tfjs/issues/633#issuecomment-660642769
var model = JSON.parse(json);
// convert back to tensor
Object.keys(tensorObj).forEach(function (key) {
tensorObj[key] = tensor(tensorObj[key], [tensorObj[key].length / 1000, 1000]);
var dataset = {};
model.forEach(function (example) {
dataset[example.label] = tensor(example.values, example.shape);
});
this.knn.setClassifierDataset(tensorObj);
this.knn.setClassifierDataset(dataset);
}

@@ -628,7 +635,7 @@ },

if (!inject) { return }
inject("data-v-1619a330_0", { source: ".camera-gestures-container[data-v-1619a330]{width:227px}video.camera-gestures-camera-feed[data-v-1619a330]{transform:rotateY(180deg);-webkit-transform:rotateY(180deg);-moz-transform:rotateY(180deg);width:227px;max-width:100%}.camera-gestures-progress-bar[data-v-1619a330]{height:5px;background:#41b883;border-radius:5px 0 0 5px}.camera-gestures-progress-bar.invisible[data-v-1619a330]{background:0 0}.camera-gestures-instructions[data-v-1619a330]{text-align:center}.camera-gestures-loader-container[data-v-1619a330]{width:227px;height:100px}.camera-gestures-lds-ring[data-v-1619a330]{display:block;position:relative;left:calc(50% - 32px);top:calc(50% - 32px);width:64px;height:64px}.camera-gestures-lds-ring div[data-v-1619a330]{box-sizing:border-box;display:block;position:absolute;width:51px;height:51px;margin:6px;border:6px solid #41b883;border-radius:50%;animation:camera-gestures-lds-ring-data-v-1619a330 1.2s cubic-bezier(.5,0,.5,1) infinite;border-color:#41b883 transparent transparent transparent}.camera-gestures-lds-ring div[data-v-1619a330]:nth-child(1){animation-delay:-.45s}.camera-gestures-lds-ring div[data-v-1619a330]:nth-child(2){animation-delay:-.3s}.camera-gestures-lds-ring div[data-v-1619a330]:nth-child(3){animation-delay:-.15s}@keyframes camera-gestures-lds-ring-data-v-1619a330{0%{transform:rotate(0)}100%{transform:rotate(360deg)}}", map: undefined, media: undefined });
inject("data-v-776e4379_0", { source: ".camera-gestures-container[data-v-776e4379]{width:227px}video.camera-gestures-camera-feed[data-v-776e4379]{transform:rotateY(180deg);-webkit-transform:rotateY(180deg);-moz-transform:rotateY(180deg);width:227px;max-width:100%}.camera-gestures-progress-bar[data-v-776e4379]{height:5px;background:#41b883;border-radius:5px 0 0 5px}.camera-gestures-progress-bar.invisible[data-v-776e4379]{background:0 0}.camera-gestures-instructions[data-v-776e4379]{text-align:center}.camera-gestures-loader-container[data-v-776e4379]{width:227px;height:100px}.camera-gestures-lds-ring[data-v-776e4379]{display:block;position:relative;left:calc(50% - 32px);top:calc(50% - 32px);width:64px;height:64px}.camera-gestures-lds-ring div[data-v-776e4379]{box-sizing:border-box;display:block;position:absolute;width:51px;height:51px;margin:6px;border:6px solid #41b883;border-radius:50%;animation:camera-gestures-lds-ring-data-v-776e4379 1.2s cubic-bezier(.5,0,.5,1) infinite;border-color:#41b883 transparent transparent transparent}.camera-gestures-lds-ring div[data-v-776e4379]:nth-child(1){animation-delay:-.45s}.camera-gestures-lds-ring div[data-v-776e4379]:nth-child(2){animation-delay:-.3s}.camera-gestures-lds-ring div[data-v-776e4379]:nth-child(3){animation-delay:-.15s}@keyframes camera-gestures-lds-ring-data-v-776e4379{0%{transform:rotate(0)}100%{transform:rotate(360deg)}}", map: undefined, media: undefined });
};
/* scoped */
var __vue_scope_id__ = "data-v-1619a330";
var __vue_scope_id__ = "data-v-776e4379";
/* module identifier */

@@ -635,0 +642,0 @@ var __vue_module_identifier__ = undefined;

@@ -280,3 +280,4 @@ var CameraGestures = (function (exports, mobilenet, tfjs, knnClassifier) {

var res = await this.knn.predictClass(logits, TOPK);
var gestureIndex = this.gestureIndexFromClassIndex(res.classIndex);
var classIndex = parseInt(res.label);
var gestureIndex = this.gestureIndexFromClassIndex(classIndex);
var neutralDetected = gestureIndex === -2;

@@ -311,2 +312,4 @@ var gesture = neutralDetected

updateState: function updateState () {
var this$1 = this;
// Model provided - skip everything and just use the given model

@@ -335,3 +338,3 @@ if (this.model) {

// Go back to start
this.$emit('verificationFailed', this.getModelJson());
this.getModelJson().then(function (x) { return this$1.$emit('verificationFailed', x); });
this.reset();

@@ -365,3 +368,3 @@ return

if (this.state === 'training' && this.doVerification) {
this.$emit('doneTraining', this.getModelJson());
this.getModelJson().then(function (x) { return this$1.$emit('doneTraining', x); });
this.state = 'testing';

@@ -373,3 +376,3 @@ this.currentGestureIndex = !this.trainNeutralLast ? -2 : 0;

// verification completed successfully!
this.$emit('doneVerification', this.getModelJson());
this.getModelJson().then(function (x) { return this$1.$emit('doneVerification', x); });
}

@@ -458,20 +461,24 @@ this.state = 'predicting';

},
getModelJson: function getModelJson () {
// With thanks to https://github.com/tensorflow/tfjs/issues/633#issuecomment-456308218
getModelJson: async function getModelJson () {
// With thanks to https://github.com/tensorflow/tfjs/issues/633#issuecomment-660642769
var dataset = this.knn.getClassifierDataset();
var datasetObj = {};
Object.keys(dataset).forEach(function (key) {
var data = dataset[key].dataSync();
datasetObj[key] = Array.from(data);
});
return JSON.stringify(datasetObj)
var data = [];
for (var label in dataset) {
data.push({
label: label,
values: Array.from(await dataset[label].data()),
shape: dataset[label].shape
});
}
return JSON.stringify(data)
},
loadModelFromJson: function loadModelFromJson (json) {
// With thanks to https://github.com/tensorflow/tfjs/issues/633#issuecomment-456308218
var tensorObj = JSON.parse(json);
// With thanks to https://github.com/tensorflow/tfjs/issues/633#issuecomment-660642769
var model = JSON.parse(json);
// convert back to tensor
Object.keys(tensorObj).forEach(function (key) {
tensorObj[key] = tfjs.tensor(tensorObj[key], [tensorObj[key].length / 1000, 1000]);
var dataset = {};
model.forEach(function (example) {
dataset[example.label] = tfjs.tensor(example.values, example.shape);
});
this.knn.setClassifierDataset(tensorObj);
this.knn.setClassifierDataset(dataset);
}

@@ -627,7 +634,7 @@ },

if (!inject) { return }
inject("data-v-1619a330_0", { source: ".camera-gestures-container[data-v-1619a330]{width:227px}video.camera-gestures-camera-feed[data-v-1619a330]{transform:rotateY(180deg);-webkit-transform:rotateY(180deg);-moz-transform:rotateY(180deg);width:227px;max-width:100%}.camera-gestures-progress-bar[data-v-1619a330]{height:5px;background:#41b883;border-radius:5px 0 0 5px}.camera-gestures-progress-bar.invisible[data-v-1619a330]{background:0 0}.camera-gestures-instructions[data-v-1619a330]{text-align:center}.camera-gestures-loader-container[data-v-1619a330]{width:227px;height:100px}.camera-gestures-lds-ring[data-v-1619a330]{display:block;position:relative;left:calc(50% - 32px);top:calc(50% - 32px);width:64px;height:64px}.camera-gestures-lds-ring div[data-v-1619a330]{box-sizing:border-box;display:block;position:absolute;width:51px;height:51px;margin:6px;border:6px solid #41b883;border-radius:50%;animation:camera-gestures-lds-ring-data-v-1619a330 1.2s cubic-bezier(.5,0,.5,1) infinite;border-color:#41b883 transparent transparent transparent}.camera-gestures-lds-ring div[data-v-1619a330]:nth-child(1){animation-delay:-.45s}.camera-gestures-lds-ring div[data-v-1619a330]:nth-child(2){animation-delay:-.3s}.camera-gestures-lds-ring div[data-v-1619a330]:nth-child(3){animation-delay:-.15s}@keyframes camera-gestures-lds-ring-data-v-1619a330{0%{transform:rotate(0)}100%{transform:rotate(360deg)}}", map: undefined, media: undefined });
inject("data-v-776e4379_0", { source: ".camera-gestures-container[data-v-776e4379]{width:227px}video.camera-gestures-camera-feed[data-v-776e4379]{transform:rotateY(180deg);-webkit-transform:rotateY(180deg);-moz-transform:rotateY(180deg);width:227px;max-width:100%}.camera-gestures-progress-bar[data-v-776e4379]{height:5px;background:#41b883;border-radius:5px 0 0 5px}.camera-gestures-progress-bar.invisible[data-v-776e4379]{background:0 0}.camera-gestures-instructions[data-v-776e4379]{text-align:center}.camera-gestures-loader-container[data-v-776e4379]{width:227px;height:100px}.camera-gestures-lds-ring[data-v-776e4379]{display:block;position:relative;left:calc(50% - 32px);top:calc(50% - 32px);width:64px;height:64px}.camera-gestures-lds-ring div[data-v-776e4379]{box-sizing:border-box;display:block;position:absolute;width:51px;height:51px;margin:6px;border:6px solid #41b883;border-radius:50%;animation:camera-gestures-lds-ring-data-v-776e4379 1.2s cubic-bezier(.5,0,.5,1) infinite;border-color:#41b883 transparent transparent transparent}.camera-gestures-lds-ring div[data-v-776e4379]:nth-child(1){animation-delay:-.45s}.camera-gestures-lds-ring div[data-v-776e4379]:nth-child(2){animation-delay:-.3s}.camera-gestures-lds-ring div[data-v-776e4379]:nth-child(3){animation-delay:-.15s}@keyframes camera-gestures-lds-ring-data-v-776e4379{0%{transform:rotate(0)}100%{transform:rotate(360deg)}}", map: undefined, media: undefined });
};
/* scoped */
var __vue_scope_id__ = "data-v-1619a330";
var __vue_scope_id__ = "data-v-776e4379";
/* module identifier */

@@ -634,0 +641,0 @@ var __vue_module_identifier__ = undefined;

@@ -283,3 +283,4 @@ (function (global, factory) {

var res = await this.knn.predictClass(logits, TOPK);
var gestureIndex = this.gestureIndexFromClassIndex(res.classIndex);
var classIndex = parseInt(res.label);
var gestureIndex = this.gestureIndexFromClassIndex(classIndex);
var neutralDetected = gestureIndex === -2;

@@ -314,2 +315,4 @@ var gesture = neutralDetected

updateState: function updateState () {
var this$1 = this;
// Model provided - skip everything and just use the given model

@@ -338,3 +341,3 @@ if (this.model) {

// Go back to start
this.$emit('verificationFailed', this.getModelJson());
this.getModelJson().then(function (x) { return this$1.$emit('verificationFailed', x); });
this.reset();

@@ -368,3 +371,3 @@ return

if (this.state === 'training' && this.doVerification) {
this.$emit('doneTraining', this.getModelJson());
this.getModelJson().then(function (x) { return this$1.$emit('doneTraining', x); });
this.state = 'testing';

@@ -376,3 +379,3 @@ this.currentGestureIndex = !this.trainNeutralLast ? -2 : 0;

// verification completed successfully!
this.$emit('doneVerification', this.getModelJson());
this.getModelJson().then(function (x) { return this$1.$emit('doneVerification', x); });
}

@@ -461,20 +464,24 @@ this.state = 'predicting';

},
getModelJson: function getModelJson () {
// With thanks to https://github.com/tensorflow/tfjs/issues/633#issuecomment-456308218
getModelJson: async function getModelJson () {
// With thanks to https://github.com/tensorflow/tfjs/issues/633#issuecomment-660642769
var dataset = this.knn.getClassifierDataset();
var datasetObj = {};
Object.keys(dataset).forEach(function (key) {
var data = dataset[key].dataSync();
datasetObj[key] = Array.from(data);
});
return JSON.stringify(datasetObj)
var data = [];
for (var label in dataset) {
data.push({
label: label,
values: Array.from(await dataset[label].data()),
shape: dataset[label].shape
});
}
return JSON.stringify(data)
},
loadModelFromJson: function loadModelFromJson (json) {
// With thanks to https://github.com/tensorflow/tfjs/issues/633#issuecomment-456308218
var tensorObj = JSON.parse(json);
// With thanks to https://github.com/tensorflow/tfjs/issues/633#issuecomment-660642769
var model = JSON.parse(json);
// convert back to tensor
Object.keys(tensorObj).forEach(function (key) {
tensorObj[key] = tfjs.tensor(tensorObj[key], [tensorObj[key].length / 1000, 1000]);
var dataset = {};
model.forEach(function (example) {
dataset[example.label] = tfjs.tensor(example.values, example.shape);
});
this.knn.setClassifierDataset(tensorObj);
this.knn.setClassifierDataset(dataset);
}

@@ -630,7 +637,7 @@ },

if (!inject) { return }
inject("data-v-1619a330_0", { source: ".camera-gestures-container[data-v-1619a330]{width:227px}video.camera-gestures-camera-feed[data-v-1619a330]{transform:rotateY(180deg);-webkit-transform:rotateY(180deg);-moz-transform:rotateY(180deg);width:227px;max-width:100%}.camera-gestures-progress-bar[data-v-1619a330]{height:5px;background:#41b883;border-radius:5px 0 0 5px}.camera-gestures-progress-bar.invisible[data-v-1619a330]{background:0 0}.camera-gestures-instructions[data-v-1619a330]{text-align:center}.camera-gestures-loader-container[data-v-1619a330]{width:227px;height:100px}.camera-gestures-lds-ring[data-v-1619a330]{display:block;position:relative;left:calc(50% - 32px);top:calc(50% - 32px);width:64px;height:64px}.camera-gestures-lds-ring div[data-v-1619a330]{box-sizing:border-box;display:block;position:absolute;width:51px;height:51px;margin:6px;border:6px solid #41b883;border-radius:50%;animation:camera-gestures-lds-ring-data-v-1619a330 1.2s cubic-bezier(.5,0,.5,1) infinite;border-color:#41b883 transparent transparent transparent}.camera-gestures-lds-ring div[data-v-1619a330]:nth-child(1){animation-delay:-.45s}.camera-gestures-lds-ring div[data-v-1619a330]:nth-child(2){animation-delay:-.3s}.camera-gestures-lds-ring div[data-v-1619a330]:nth-child(3){animation-delay:-.15s}@keyframes camera-gestures-lds-ring-data-v-1619a330{0%{transform:rotate(0)}100%{transform:rotate(360deg)}}", map: undefined, media: undefined });
inject("data-v-776e4379_0", { source: ".camera-gestures-container[data-v-776e4379]{width:227px}video.camera-gestures-camera-feed[data-v-776e4379]{transform:rotateY(180deg);-webkit-transform:rotateY(180deg);-moz-transform:rotateY(180deg);width:227px;max-width:100%}.camera-gestures-progress-bar[data-v-776e4379]{height:5px;background:#41b883;border-radius:5px 0 0 5px}.camera-gestures-progress-bar.invisible[data-v-776e4379]{background:0 0}.camera-gestures-instructions[data-v-776e4379]{text-align:center}.camera-gestures-loader-container[data-v-776e4379]{width:227px;height:100px}.camera-gestures-lds-ring[data-v-776e4379]{display:block;position:relative;left:calc(50% - 32px);top:calc(50% - 32px);width:64px;height:64px}.camera-gestures-lds-ring div[data-v-776e4379]{box-sizing:border-box;display:block;position:absolute;width:51px;height:51px;margin:6px;border:6px solid #41b883;border-radius:50%;animation:camera-gestures-lds-ring-data-v-776e4379 1.2s cubic-bezier(.5,0,.5,1) infinite;border-color:#41b883 transparent transparent transparent}.camera-gestures-lds-ring div[data-v-776e4379]:nth-child(1){animation-delay:-.45s}.camera-gestures-lds-ring div[data-v-776e4379]:nth-child(2){animation-delay:-.3s}.camera-gestures-lds-ring div[data-v-776e4379]:nth-child(3){animation-delay:-.15s}@keyframes camera-gestures-lds-ring-data-v-776e4379{0%{transform:rotate(0)}100%{transform:rotate(360deg)}}", map: undefined, media: undefined });
};
/* scoped */
var __vue_scope_id__ = "data-v-1619a330";
var __vue_scope_id__ = "data-v-776e4379";
/* module identifier */

@@ -637,0 +644,0 @@ var __vue_module_identifier__ = undefined;

@@ -347,8 +347,5 @@ # Guide

## Saving the generated model
::: warning
This feature is very experimental and probably isn't going to work very well yet.
:::
Due to differences in lighting conditions, how far the user is positioned from the camera, background noise, choice of gestures, etc, for best results it is recommended that gestures be trained each time before the user uses them.
It is, however, possible to save and load models generated through training. A trained model can be retrieved by subscribing to the @doneTraining event.
It is, however, possible to save and load models generated through training. A trained model can be retrieved by subscribing to the `@doneTraining` event.
```html

@@ -355,0 +352,0 @@ <template>

{
"name": "vue-camera-gestures",
"version": "0.2.4",
"version": "0.3.0",
"description": "Let users control your Vue app using AI and their camera in just 1 line of HTML!",

@@ -5,0 +5,0 @@ "main": "dist/vue-camera-gestures.umd.js",

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc