Socket
Socket
Sign inDemoInstall

@tensorflow/tfjs-data

Package Overview
Dependencies
Maintainers
13
Versions
126
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@tensorflow/tfjs-data - npm Package Compare versions

Comparing version 0.1.4 to 0.1.5

ROADMAP.md

47

dist/dataset.js

@@ -51,4 +51,2 @@ "use strict";

var lazy_iterator_1 = require("./iterators/lazy_iterator");
var lazy_iterator_2 = require("./iterators/lazy_iterator");
var lazy_iterator_3 = require("./iterators/lazy_iterator");
var deep_map_1 = require("./util/deep_map");

@@ -136,3 +134,3 @@ var Dataset = (function () {

}); }); });
return [2, lazy_iterator_2.iteratorFromConcatenated(iteratorIterator.take(count))];
return [2, lazy_iterator_1.iteratorFromConcatenated(iteratorIterator.take(count))];
});

@@ -234,3 +232,3 @@ }); });

return datasetFromIteratorFn(function () { return __awaiter(_this, void 0, void 0, function () { return __generator(this, function (_a) {
return [2, lazy_iterator_3.iteratorFromItems(items)];
return [2, lazy_iterator_1.iteratorFromItems(items)];
}); }); });

@@ -287,28 +285,31 @@ }

function batchConcat(arrays) {
var elementShape = shapeAndValues(arrays[0])[0];
var batchShape = [arrays.length].concat(elementShape);
var resultVals = new Float32Array(batchShape.reduce(function (x, y) { return x * y; }));
if (arrays.length === 0) {
throw new Error('Can\'t make a batch of zero elements.');
}
if (arrays[0] instanceof tf.Tensor) {
return tf.stack(arrays);
}
else if (Array.isArray(arrays[0])) {
return batchConcatArrays(arrays);
}
else {
var numbers = arrays;
return tf.Tensor.make([numbers.length], { values: new Float32Array(numbers) });
}
}
function batchConcatArrays(arrays) {
var rowLength = arrays[0].length;
var batchShape = [arrays.length, arrays[0].length];
var values = new Float32Array(arrays.length * rowLength);
var offset = 0;
for (var _i = 0, arrays_1 = arrays; _i < arrays_1.length; _i++) {
var a = arrays_1[_i];
var _a = shapeAndValues(a), aShape = _a[0], aVals = _a[1];
if (!tf.util.arraysEqual(aShape, elementShape)) {
if (a.length !== rowLength) {
throw new Error('Elements must have the same shape to be batched');
}
resultVals.set(aVals, offset);
offset += aVals.length;
values.set(a, offset);
offset += rowLength;
}
return tf.Tensor.make(batchShape, { values: resultVals });
return tf.Tensor.make(batchShape, { values: values });
}
function shapeAndValues(array) {
if (array instanceof tf.Tensor) {
return [array.shape, array.dataSync()];
}
else if (Array.isArray(array)) {
return [[array.length], array];
}
else {
return [[], [array]];
}
}
//# sourceMappingURL=dataset.js.map

@@ -1,5 +0,5 @@

export { array, Dataset, datasetFromIteratorFn, zip } from './dataset';
export { array, Dataset, zip } from './dataset';
export { CSVDataset } from './datasets/csv_dataset';
export { TextLineDataset } from './datasets/text_line_dataset';
export { csv } from './readers';
export { csv, generator } from './readers';
export { FileDataSource } from './sources/file_data_source';

@@ -6,0 +6,0 @@ export { URLDataSource } from './sources/url_data_source';

@@ -6,3 +6,2 @@ "use strict";

exports.Dataset = dataset_1.Dataset;
exports.datasetFromIteratorFn = dataset_1.datasetFromIteratorFn;
exports.zip = dataset_1.zip;

@@ -15,2 +14,3 @@ var csv_dataset_1 = require("./datasets/csv_dataset");

exports.csv = readers_1.csv;
exports.generator = readers_1.generator;
var file_data_source_1 = require("./sources/file_data_source");

@@ -17,0 +17,0 @@ exports.FileDataSource = file_data_source_1.FileDataSource;

@@ -48,3 +48,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
var utf8 = require("utf8");
var tfjs_core_1 = require("@tensorflow/tfjs-core");
var lazy_iterator_1 = require("./lazy_iterator");

@@ -88,4 +88,9 @@ var string_iterator_1 = require("./string_iterator");

_this.upstream = upstream;
_this.partial = new Uint8Array([]);
_this.partialBytesValid = 0;
if (tfjs_core_1.ENV.get('IS_BROWSER')) {
_this.decoder = new TextDecoder('utf-8');
}
else {
var StringDecoder = require('string_decoder').StringDecoder;
_this.decoder = new StringDecoder('utf8');
}
return _this;

@@ -98,3 +103,3 @@ }

return __awaiter(this, void 0, void 0, function () {
var chunkResult, chunk, partialBytesRemaining, nextIndex, okUpToIndex, splitUtfWidth, bulk, reassembled;
var chunkResult, chunk, text;
return __generator(this, function (_a) {

@@ -106,6 +111,3 @@ switch (_a.label) {

if (chunkResult.done) {
if (this.partial.length === 0) {
return [2, false];
}
chunk = new Uint8Array([]);
return [2, false];
}

@@ -115,32 +117,9 @@ else {

}
partialBytesRemaining = this.partial.length - this.partialBytesValid;
nextIndex = partialBytesRemaining;
okUpToIndex = nextIndex;
splitUtfWidth = 0;
while (nextIndex < chunk.length) {
okUpToIndex = nextIndex;
splitUtfWidth = utfWidth(chunk[nextIndex]);
nextIndex = okUpToIndex + splitUtfWidth;
if (tfjs_core_1.ENV.get('IS_BROWSER')) {
text = this.decoder.decode(chunk, { stream: true });
}
if (nextIndex === chunk.length) {
okUpToIndex = nextIndex;
}
bulk = utf8.decode(String.fromCharCode.apply(null, chunk.slice(partialBytesRemaining, okUpToIndex)));
if (partialBytesRemaining > 0) {
this.partial.set(chunk.slice(0, partialBytesRemaining), this.partialBytesValid);
reassembled = utf8.decode(String.fromCharCode.apply(null, this.partial));
this.outputQueue.push(reassembled + bulk);
}
else {
this.outputQueue.push(bulk);
text = this.decoder.write(Buffer.from(chunk.buffer));
}
if (okUpToIndex === chunk.length) {
this.partial = new Uint8Array([]);
this.partialBytesValid = 0;
}
else {
this.partial = new Uint8Array(new ArrayBuffer(splitUtfWidth));
this.partial.set(chunk.slice(okUpToIndex), 0);
this.partialBytesValid = chunk.length - okUpToIndex;
}
this.outputQueue.push(text);
return [2, true];

@@ -153,22 +132,2 @@ }

}(lazy_iterator_1.OneToManyIterator));
function utfWidth(firstByte) {
if (firstByte >= 252) {
return 6;
}
else if (firstByte >= 248) {
return 5;
}
else if (firstByte >= 240) {
return 4;
}
else if (firstByte >= 224) {
return 3;
}
else if (firstByte >= 192) {
return 2;
}
else {
return 1;
}
}
//# sourceMappingURL=byte_chunk_iterator.js.map

@@ -0,3 +1,5 @@

import { Dataset } from './dataset';
import { CSVDataset } from './datasets/csv_dataset';
import { CSVConfig } from './types';
import { CSVConfig, DataElement } from './types';
export declare function csv(source: string, csvConfig?: CSVConfig): CSVDataset;
export declare function generator<T extends DataElement>(f: () => IteratorResult<T> | Promise<IteratorResult<T>>): Dataset<T>;
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __generator = (this && this.__generator) || function (thisArg, body) {
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
function verb(n) { return function (v) { return step([n, v]); }; }
function step(op) {
if (f) throw new TypeError("Generator is already executing.");
while (_) try {
if (f = 1, y && (t = y[op[0] & 2 ? "return" : op[0] ? "throw" : "next"]) && !(t = t.call(y, op[1])).done) return t;
if (y = 0, t) op = [0, t.value];
switch (op[0]) {
case 0: case 1: t = op; break;
case 4: _.label++; return { value: op[1], done: false };
case 5: _.label++; y = op[1]; op = [0]; continue;
case 7: op = _.ops.pop(); _.trys.pop(); continue;
default:
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
if (t[2]) _.ops.pop();
_.trys.pop(); continue;
}
op = body.call(thisArg, _);
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
}
};
Object.defineProperty(exports, "__esModule", { value: true });
var dataset_1 = require("./dataset");
var csv_dataset_1 = require("./datasets/csv_dataset");
var lazy_iterator_1 = require("./iterators/lazy_iterator");
var url_data_source_1 = require("./sources/url_data_source");

@@ -10,2 +47,10 @@ function csv(source, csvConfig) {

exports.csv = csv;
function generator(f) {
var _this = this;
var iter = lazy_iterator_1.iteratorFromFunction(f);
return dataset_1.datasetFromIteratorFn(function () { return __awaiter(_this, void 0, void 0, function () { return __generator(this, function (_a) {
return [2, iter];
}); }); });
}
exports.generator = generator;
//# sourceMappingURL=readers.js.map

@@ -1,2 +0,2 @@

declare const version = "0.1.4";
declare const version = "0.1.5";
export { version };
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
var version = '0.1.4';
var version = '0.1.5';
exports.version = version;
//# sourceMappingURL=version.js.map
{
"name": "@tensorflow/tfjs-data",
"version": "0.1.4",
"version": "0.1.5",
"description": "TensorFlow Data API in JavaScript",

@@ -14,3 +14,3 @@ "private": false,

"devDependencies": {
"@tensorflow/tfjs-core": "0.14.2",
"@tensorflow/tfjs-core": "0.14.3",
"@types/fetch-mock": "^6.0.1",

@@ -56,3 +56,3 @@ "@types/jasmine": "~2.5.53",

"peerDependencies": {
"@tensorflow/tfjs-core": "0.14.2"
"@tensorflow/tfjs-core": "0.14.3"
},

@@ -62,5 +62,4 @@ "dependencies": {

"node-fetch": "~2.1.2",
"seedrandom": "~2.4.3",
"utf8": "~2.1.2"
"seedrandom": "~2.4.3"
}
}

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc