@tensorflow/tfjs-node
Advanced tools
Comparing version 0.1.11 to 0.1.12
@@ -46,2 +46,3 @@ "use strict"; | ||
var tfn = require("../index"); | ||
var file_system_1 = require("./file_system"); | ||
describe('File system IOHandler', function () { | ||
@@ -217,128 +218,264 @@ var mkdtemp = util_1.promisify(fs.mkdtemp); | ||
}); | ||
it('load: two weight files', function (done) { return __awaiter(_this, void 0, void 0, function () { | ||
var weightsManifest, modelJSON, modelJSONPath, weightsData1, weightsData2, handler; | ||
return __generator(this, function (_a) { | ||
switch (_a.label) { | ||
case 0: | ||
weightsManifest = [ | ||
{ | ||
paths: ['weights.1.bin'], | ||
weights: [{ | ||
describe('load json model', function () { | ||
it('load: two weight files', function (done) { return __awaiter(_this, void 0, void 0, function () { | ||
var weightsManifest, modelJSON, modelJSONPath, weightsData1, weightsData2, handler; | ||
return __generator(this, function (_a) { | ||
switch (_a.label) { | ||
case 0: | ||
weightsManifest = [ | ||
{ | ||
paths: ['weights.1.bin'], | ||
weights: [{ | ||
name: 'dense/kernel', | ||
shape: [3, 1], | ||
dtype: 'float32', | ||
}], | ||
}, | ||
{ | ||
paths: ['weights.2.bin'], | ||
weights: [{ | ||
name: 'dense/bias', | ||
shape: [1], | ||
dtype: 'float32', | ||
}] | ||
} | ||
]; | ||
modelJSON = { | ||
modelTopology: modelTopology1, | ||
weightsManifest: weightsManifest, | ||
}; | ||
modelJSONPath = path.join(testDir, 'model.json'); | ||
return [4, writeFile(modelJSONPath, JSON.stringify(modelJSON), 'utf8')]; | ||
case 1: | ||
_a.sent(); | ||
weightsData1 = Buffer.from(new Float32Array([-1.1, -3.3, -3.3]).buffer); | ||
return [4, writeFile(path.join(testDir, 'weights.1.bin'), weightsData1, 'binary')]; | ||
case 2: | ||
_a.sent(); | ||
weightsData2 = Buffer.from(new Float32Array([-7.7]).buffer); | ||
return [4, writeFile(path.join(testDir, 'weights.2.bin'), weightsData2, 'binary')]; | ||
case 3: | ||
_a.sent(); | ||
handler = tfc.io.getLoadHandlers("file://" + modelJSONPath)[0]; | ||
handler.load() | ||
.then(function (modelArtifacts) { | ||
expect(modelArtifacts.modelTopology).toEqual(modelTopology1); | ||
expect(modelArtifacts.weightSpecs).toEqual([ | ||
{ | ||
name: 'dense/kernel', | ||
shape: [3, 1], | ||
dtype: 'float32', | ||
}], | ||
}, | ||
{ | ||
paths: ['weights.2.bin'], | ||
weights: [{ | ||
}, | ||
{ | ||
name: 'dense/bias', | ||
shape: [1], | ||
dtype: 'float32', | ||
}] | ||
} | ||
]; | ||
modelJSON = { | ||
modelTopology: modelTopology1, | ||
weightsManifest: weightsManifest, | ||
}; | ||
modelJSONPath = path.join(testDir, 'model.json'); | ||
return [4, writeFile(modelJSONPath, JSON.stringify(modelJSON), 'utf8')]; | ||
case 1: | ||
_a.sent(); | ||
weightsData1 = Buffer.from(new Float32Array([-1.1, -3.3, -3.3]).buffer); | ||
return [4, writeFile(path.join(testDir, 'weights.1.bin'), weightsData1, 'binary')]; | ||
case 2: | ||
_a.sent(); | ||
weightsData2 = Buffer.from(new Float32Array([-7.7]).buffer); | ||
return [4, writeFile(path.join(testDir, 'weights.2.bin'), weightsData2, 'binary')]; | ||
case 3: | ||
_a.sent(); | ||
handler = tfc.io.getLoadHandlers("file://" + modelJSONPath)[0]; | ||
handler.load() | ||
.then(function (modelArtifacts) { | ||
expect(modelArtifacts.modelTopology).toEqual(modelTopology1); | ||
expect(modelArtifacts.weightSpecs).toEqual([ | ||
} | ||
]); | ||
test_util_1.expectArraysClose(new Float32Array(modelArtifacts.weightData), new Float32Array([-1.1, -3.3, -3.3, -7.7])); | ||
done(); | ||
}) | ||
.catch(function (err) { return done.fail(err.stack); }); | ||
return [2]; | ||
} | ||
}); | ||
}); }); | ||
it('loading from nonexistent model.json path fails', function (done) { | ||
var handler = tfc.io.getLoadHandlers("file://" + testDir + "/foo/model.json")[0]; | ||
handler.load() | ||
.then(function (getModelArtifactsInfoForJSON) { | ||
done.fail('Loading from nonexisting model.json path succeeded ' + | ||
'unexpectedly.'); | ||
}) | ||
.catch(function (err) { | ||
expect(err.message) | ||
.toMatch(/model\.json.*does not exist.*loading failed/); | ||
done(); | ||
}); | ||
}); | ||
it('loading from missing weights path fails', function (done) { return __awaiter(_this, void 0, void 0, function () { | ||
var weightsManifest, modelJSON, modelJSONPath, weightsData1, handler; | ||
return __generator(this, function (_a) { | ||
switch (_a.label) { | ||
case 0: | ||
weightsManifest = [ | ||
{ | ||
name: 'dense/kernel', | ||
shape: [3, 1], | ||
dtype: 'float32', | ||
paths: ['weights.1.bin'], | ||
weights: [{ | ||
name: 'dense/kernel', | ||
shape: [3, 1], | ||
dtype: 'float32', | ||
}], | ||
}, | ||
{ | ||
name: 'dense/bias', | ||
shape: [1], | ||
dtype: 'float32', | ||
paths: ['weights.2.bin'], | ||
weights: [{ | ||
name: 'dense/bias', | ||
shape: [1], | ||
dtype: 'float32', | ||
}] | ||
} | ||
]); | ||
test_util_1.expectArraysClose(new Float32Array(modelArtifacts.weightData), new Float32Array([-1.1, -3.3, -3.3, -7.7])); | ||
done(); | ||
}) | ||
.catch(function (err) { return done.fail(err.stack); }); | ||
return [2]; | ||
} | ||
}); | ||
}); }); | ||
it('loading from nonexistent model.json path fails', function (done) { | ||
var handler = tfc.io.getLoadHandlers("file://" + testDir + "/foo/model.json")[0]; | ||
handler.load() | ||
.then(function (getModelArtifactsInfoForJSON) { | ||
done.fail('Loading from nonexisting model.json path succeeded ' + | ||
'unexpectedly.'); | ||
}) | ||
.catch(function (err) { | ||
expect(err.message) | ||
.toMatch(/model\.json.*does not exist.*loading failed/); | ||
done(); | ||
}); | ||
]; | ||
modelJSON = { | ||
modelTopology: modelTopology1, | ||
weightsManifest: weightsManifest, | ||
}; | ||
modelJSONPath = path.join(testDir, 'model.json'); | ||
return [4, writeFile(modelJSONPath, JSON.stringify(modelJSON), 'utf8')]; | ||
case 1: | ||
_a.sent(); | ||
weightsData1 = Buffer.from(new Float32Array([-1.1, -3.3, -3.3]).buffer); | ||
return [4, writeFile(path.join(testDir, 'weights.1.bin'), weightsData1, 'binary')]; | ||
case 2: | ||
_a.sent(); | ||
handler = tfc.io.getLoadHandlers("file://" + modelJSONPath)[0]; | ||
handler.load() | ||
.then(function (modelArtifacts) { | ||
done.fail('Loading with missing weights file succeeded ' + | ||
'unexpectedly.'); | ||
}) | ||
.catch(function (err) { | ||
expect(err.message) | ||
.toMatch(/Weight file .*weights\.2\.bin does not exist/); | ||
done(); | ||
}); | ||
return [2]; | ||
} | ||
}); | ||
}); }); | ||
}); | ||
it('loading from missing weights path fails', function (done) { return __awaiter(_this, void 0, void 0, function () { | ||
var weightsManifest, modelJSON, modelJSONPath, weightsData1, handler; | ||
return __generator(this, function (_a) { | ||
switch (_a.label) { | ||
case 0: | ||
weightsManifest = [ | ||
{ | ||
paths: ['weights.1.bin'], | ||
weights: [{ | ||
describe('load binary model', function () { | ||
it('load: two weight files', function (done) { return __awaiter(_this, void 0, void 0, function () { | ||
var weightsManifest, modelPath, modelData, modelManifestJSONPath, weightsData1, weightsData2, handler; | ||
return __generator(this, function (_a) { | ||
switch (_a.label) { | ||
case 0: | ||
weightsManifest = [ | ||
{ | ||
paths: ['weights.1.bin'], | ||
weights: [{ | ||
name: 'dense/kernel', | ||
shape: [3, 1], | ||
dtype: 'float32', | ||
}], | ||
}, | ||
{ | ||
paths: ['weights.2.bin'], | ||
weights: [{ | ||
name: 'dense/bias', | ||
shape: [1], | ||
dtype: 'float32', | ||
}] | ||
} | ||
]; | ||
modelPath = path.join(testDir, 'model.pb'); | ||
modelData = Buffer.from(new Uint8Array([1, 2, 3]).buffer); | ||
return [4, writeFile(modelPath, modelData, 'binary')]; | ||
case 1: | ||
_a.sent(); | ||
modelManifestJSONPath = path.join(testDir, 'manifest.json'); | ||
return [4, writeFile(modelManifestJSONPath, JSON.stringify(weightsManifest), 'utf8')]; | ||
case 2: | ||
_a.sent(); | ||
weightsData1 = Buffer.from(new Float32Array([-1.1, -3.3, -3.3]).buffer); | ||
return [4, writeFile(path.join(testDir, 'weights.1.bin'), weightsData1, 'binary')]; | ||
case 3: | ||
_a.sent(); | ||
weightsData2 = Buffer.from(new Float32Array([-7.7]).buffer); | ||
return [4, writeFile(path.join(testDir, 'weights.2.bin'), weightsData2, 'binary')]; | ||
case 4: | ||
_a.sent(); | ||
handler = new file_system_1.NodeFileSystem(["" + modelPath, "" + modelManifestJSONPath]); | ||
handler.load() | ||
.then(function (modelArtifacts) { | ||
test_util_1.expectArraysClose(new Uint8Array(modelArtifacts.modelTopology), new Uint8Array(modelData)); | ||
expect(modelArtifacts.weightSpecs).toEqual([ | ||
{ | ||
name: 'dense/kernel', | ||
shape: [3, 1], | ||
dtype: 'float32', | ||
}], | ||
}, | ||
{ | ||
paths: ['weights.2.bin'], | ||
weights: [{ | ||
}, | ||
{ | ||
name: 'dense/bias', | ||
shape: [1], | ||
dtype: 'float32', | ||
}] | ||
} | ||
]; | ||
modelJSON = { | ||
modelTopology: modelTopology1, | ||
weightsManifest: weightsManifest, | ||
}; | ||
modelJSONPath = path.join(testDir, 'model.json'); | ||
return [4, writeFile(modelJSONPath, JSON.stringify(modelJSON), 'utf8')]; | ||
case 1: | ||
_a.sent(); | ||
weightsData1 = Buffer.from(new Float32Array([-1.1, -3.3, -3.3]).buffer); | ||
return [4, writeFile(path.join(testDir, 'weights.1.bin'), weightsData1, 'binary')]; | ||
case 2: | ||
_a.sent(); | ||
handler = tfc.io.getLoadHandlers("file://" + modelJSONPath)[0]; | ||
handler.load() | ||
.then(function (modelArtifacts) { | ||
done.fail('Loading with missing weights file succeeded ' + | ||
'unexpectedly.'); | ||
}) | ||
.catch(function (err) { | ||
expect(err.message) | ||
.toMatch(/Weight file .*weights\.2\.bin does not exist/); | ||
done(); | ||
}); | ||
return [2]; | ||
} | ||
} | ||
]); | ||
test_util_1.expectArraysClose(new Float32Array(modelArtifacts.weightData), new Float32Array([-1.1, -3.3, -3.3, -7.7])); | ||
done(); | ||
}) | ||
.catch(function (err) { return done.fail(err.stack); }); | ||
return [2]; | ||
} | ||
}); | ||
}); }); | ||
it('path length does not equal 2 fails', function () { | ||
expect(function () { return new file_system_1.NodeFileSystem([testDir + "/foo/model.pb"]); }) | ||
.toThrowError(/file paths must have a length of 2.*actual length is 1.*/); | ||
}); | ||
}); }); | ||
it('loading from nonexistent model.json path fails', function (done) { | ||
var handler = new file_system_1.NodeFileSystem([testDir + "/foo/model.pb", testDir + "/foo/manifest.json"]); | ||
handler.load() | ||
.then(function (getModelArtifactsInfoForJSON) { | ||
done.fail('Loading from nonexisting model.pb path succeeded ' + | ||
'unexpectedly.'); | ||
}) | ||
.catch(function (err) { | ||
expect(err.message) | ||
.toMatch(/model\.pb.*does not exist.*loading failed/); | ||
done(); | ||
}); | ||
}); | ||
it('loading from missing weights path fails', function (done) { return __awaiter(_this, void 0, void 0, function () { | ||
var weightsManifest, modelPath, modelData, modelManifestJSONPath, weightsData1, handler; | ||
return __generator(this, function (_a) { | ||
switch (_a.label) { | ||
case 0: | ||
weightsManifest = [ | ||
{ | ||
paths: ['weights.1.bin'], | ||
weights: [{ | ||
name: 'dense/kernel', | ||
shape: [3, 1], | ||
dtype: 'float32', | ||
}], | ||
}, | ||
{ | ||
paths: ['weights.2.bin'], | ||
weights: [{ | ||
name: 'dense/bias', | ||
shape: [1], | ||
dtype: 'float32', | ||
}] | ||
} | ||
]; | ||
modelPath = path.join(testDir, 'model.pb'); | ||
modelData = Buffer.from(new Uint8Array([1, 2, 3]).buffer); | ||
return [4, writeFile(modelPath, modelData, 'binary')]; | ||
case 1: | ||
_a.sent(); | ||
modelManifestJSONPath = path.join(testDir, 'manifest.json'); | ||
return [4, writeFile(modelManifestJSONPath, JSON.stringify(weightsManifest), 'utf8')]; | ||
case 2: | ||
_a.sent(); | ||
weightsData1 = Buffer.from(new Float32Array([-1.1, -3.3, -3.3]).buffer); | ||
return [4, writeFile(path.join(testDir, 'weights.1.bin'), weightsData1, 'binary')]; | ||
case 3: | ||
_a.sent(); | ||
handler = new file_system_1.NodeFileSystem(["" + modelPath, "" + modelManifestJSONPath]); | ||
handler.load() | ||
.then(function (modelArtifacts) { | ||
done.fail('Loading with missing weights file succeeded ' + | ||
'unexpectedly.'); | ||
}) | ||
.catch(function (err) { | ||
expect(err.message) | ||
.toMatch(/Weight file .*weights\.2\.bin does not exist/); | ||
done(); | ||
}); | ||
return [2]; | ||
} | ||
}); | ||
}); }); | ||
}); | ||
it('Exported file-system handler class exists', function () { | ||
@@ -349,2 +486,20 @@ var handler = new tfn.io.NodeFileSystem(testDir); | ||
}); | ||
describe('nodeFileSystemRouter', function () { | ||
it('should handle single path', function () { | ||
expect(file_system_1.nodeFileSystemRouter('file://model.json')).toBeDefined(); | ||
}); | ||
it('should handle multiple paths', function () { | ||
expect(file_system_1.nodeFileSystemRouter([ | ||
'file://model.json', 'file://weights.json' | ||
])).toBeDefined(); | ||
}); | ||
it('should return null for non file path', function () { | ||
expect(file_system_1.nodeFileSystemRouter('http://model.json')).toBeNull(); | ||
}); | ||
it('should return null for multiple paths with mismatched scheme', function () { | ||
expect(file_system_1.nodeFileSystemRouter([ | ||
'file://model.json', 'http://weights.json' | ||
])).toBeNull(); | ||
}); | ||
}); | ||
}); |
@@ -7,7 +7,11 @@ import * as tfc from '@tensorflow/tfjs-core'; | ||
readonly WEIGHTS_BINARY_FILENAME: string; | ||
readonly MODEL_BINARY_FILENAME: string; | ||
constructor(path: string | string[]); | ||
save(modelArtifacts: tfc.io.ModelArtifacts): Promise<tfc.io.SaveResult>; | ||
load(): Promise<tfc.io.ModelArtifacts>; | ||
protected loadBinaryModel(): Promise<tfc.io.ModelArtifacts>; | ||
protected loadJSONModel(): Promise<tfc.io.ModelArtifacts>; | ||
private loadWeights(weightsManifest, path); | ||
protected createOrVerifyDirectory(): Promise<void>; | ||
} | ||
export declare const nodeFileSystemRouter: (url: string) => NodeFileSystem; | ||
export declare const nodeFileSystemRouter: (url: string | string[]) => NodeFileSystem; |
@@ -38,2 +38,3 @@ "use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
var tfc = require("@tensorflow/tfjs-core"); | ||
var fs = require("fs"); | ||
@@ -61,3 +62,6 @@ var path_1 = require("path"); | ||
this.WEIGHTS_BINARY_FILENAME = 'weights.bin'; | ||
this.MODEL_BINARY_FILENAME = 'tensorflowjs.pb'; | ||
if (Array.isArray(path)) { | ||
tfc.util.assert(path.length === 2, 'file paths must have a length of 2, ' + | ||
("(actual length is " + path.length + ").")); | ||
this.path = path.map(function (p) { return path_1.resolve(p); }); | ||
@@ -110,57 +114,117 @@ } | ||
return __awaiter(this, void 0, void 0, function () { | ||
var info, modelJSON, _a, _b, modelArtifacts, dirName, buffers, weightSpecs, _i, _c, group, _d, _e, path, weightFilePath, buffer; | ||
return __generator(this, function (_f) { | ||
switch (_f.label) { | ||
return __generator(this, function (_a) { | ||
return [2, Array.isArray(this.path) ? this.loadBinaryModel() : | ||
this.loadJSONModel()]; | ||
}); | ||
}); | ||
}; | ||
NodeFileSystem.prototype.loadBinaryModel = function () { | ||
return __awaiter(this, void 0, void 0, function () { | ||
var topologyPath, weightManifestPath, topology, weightManifest, modelTopology, weightsManifest, _a, _b, modelArtifacts, _c, weightSpecs, weightData; | ||
return __generator(this, function (_d) { | ||
switch (_d.label) { | ||
case 0: | ||
if (Array.isArray(this.path)) { | ||
throw new Error('Loading from multiple paths is not supported yet.'); | ||
topologyPath = this.path[0]; | ||
weightManifestPath = this.path[1]; | ||
return [4, stat(topologyPath).catch(doesNotExistHandler('Topology Path'))]; | ||
case 1: | ||
topology = _d.sent(); | ||
return [4, stat(weightManifestPath) | ||
.catch(doesNotExistHandler('Weight Manifest Path'))]; | ||
case 2: | ||
weightManifest = _d.sent(); | ||
if (!topology.isFile()) { | ||
throw new Error('File specified for topology is not a file!'); | ||
} | ||
return [4, stat(this.path).catch(doesNotExistHandler('Path'))]; | ||
if (!weightManifest.isFile()) { | ||
throw new Error('File specified for the weight manifest is not a file!'); | ||
} | ||
return [4, readFile(this.path[0])]; | ||
case 3: | ||
modelTopology = _d.sent(); | ||
_b = (_a = JSON).parse; | ||
return [4, readFile(this.path[1], 'utf8')]; | ||
case 4: | ||
weightsManifest = _b.apply(_a, [_d.sent()]); | ||
modelArtifacts = { | ||
modelTopology: modelTopology, | ||
}; | ||
return [4, this.loadWeights(weightsManifest, this.path[1])]; | ||
case 5: | ||
_c = _d.sent(), weightSpecs = _c[0], weightData = _c[1]; | ||
modelArtifacts.weightSpecs = weightSpecs; | ||
modelArtifacts.weightData = weightData; | ||
return [2, modelArtifacts]; | ||
} | ||
}); | ||
}); | ||
}; | ||
NodeFileSystem.prototype.loadJSONModel = function () { | ||
return __awaiter(this, void 0, void 0, function () { | ||
var path, info, modelJSON, _a, _b, modelArtifacts, _c, weightSpecs, weightData; | ||
return __generator(this, function (_d) { | ||
switch (_d.label) { | ||
case 0: | ||
path = this.path; | ||
return [4, stat(path).catch(doesNotExistHandler('Path'))]; | ||
case 1: | ||
info = _f.sent(); | ||
if (!info.isFile()) return [3, 11]; | ||
info = _d.sent(); | ||
if (!info.isFile()) return [3, 5]; | ||
_b = (_a = JSON).parse; | ||
return [4, readFile(this.path, 'utf8')]; | ||
return [4, readFile(path, 'utf8')]; | ||
case 2: | ||
modelJSON = _b.apply(_a, [_f.sent()]); | ||
modelJSON = _b.apply(_a, [_d.sent()]); | ||
modelArtifacts = { | ||
modelTopology: modelJSON.modelTopology, | ||
}; | ||
if (!(modelJSON.weightsManifest != null)) return [3, 10]; | ||
dirName = path_1.dirname(this.path); | ||
if (!(modelJSON.weightsManifest != null)) return [3, 4]; | ||
return [4, this.loadWeights(modelJSON.weightsManifest, path)]; | ||
case 3: | ||
_c = _d.sent(), weightSpecs = _c[0], weightData = _c[1]; | ||
modelArtifacts.weightSpecs = weightSpecs; | ||
modelArtifacts.weightData = weightData; | ||
_d.label = 4; | ||
case 4: return [2, modelArtifacts]; | ||
case 5: throw new Error('The path to load from must be a file. Loading from a directory ' + | ||
'is not supported.'); | ||
} | ||
}); | ||
}); | ||
}; | ||
NodeFileSystem.prototype.loadWeights = function (weightsManifest, path) { | ||
return __awaiter(this, void 0, void 0, function () { | ||
var dirName, buffers, weightSpecs, _i, weightsManifest_1, group, _a, _b, path_2, weightFilePath, buffer; | ||
return __generator(this, function (_c) { | ||
switch (_c.label) { | ||
case 0: | ||
dirName = path_1.dirname(path); | ||
buffers = []; | ||
weightSpecs = []; | ||
_i = 0, _c = modelJSON.weightsManifest; | ||
_f.label = 3; | ||
_i = 0, weightsManifest_1 = weightsManifest; | ||
_c.label = 1; | ||
case 1: | ||
if (!(_i < weightsManifest_1.length)) return [3, 7]; | ||
group = weightsManifest_1[_i]; | ||
_a = 0, _b = group.paths; | ||
_c.label = 2; | ||
case 2: | ||
if (!(_a < _b.length)) return [3, 5]; | ||
path_2 = _b[_a]; | ||
weightFilePath = path_1.join(dirName, path_2); | ||
return [4, readFile(weightFilePath) | ||
.catch(doesNotExistHandler('Weight file'))]; | ||
case 3: | ||
if (!(_i < _c.length)) return [3, 9]; | ||
group = _c[_i]; | ||
_d = 0, _e = group.paths; | ||
_f.label = 4; | ||
buffer = _c.sent(); | ||
buffers.push(buffer); | ||
_c.label = 4; | ||
case 4: | ||
if (!(_d < _e.length)) return [3, 7]; | ||
path = _e[_d]; | ||
weightFilePath = path_1.join(dirName, path); | ||
return [4, readFile(weightFilePath) | ||
.catch(doesNotExistHandler('Weight file'))]; | ||
_a++; | ||
return [3, 2]; | ||
case 5: | ||
buffer = _f.sent(); | ||
buffers.push(buffer); | ||
_f.label = 6; | ||
weightSpecs.push.apply(weightSpecs, group.weights); | ||
_c.label = 6; | ||
case 6: | ||
_d++; | ||
return [3, 4]; | ||
case 7: | ||
weightSpecs.push.apply(weightSpecs, group.weights); | ||
_f.label = 8; | ||
case 8: | ||
_i++; | ||
return [3, 3]; | ||
case 9: | ||
modelArtifacts.weightSpecs = weightSpecs; | ||
modelArtifacts.weightData = io_utils_1.toArrayBuffer(buffers); | ||
_f.label = 10; | ||
case 10: return [2, modelArtifacts]; | ||
case 11: throw new Error('The path to load from must be a file. Loading from a directory ' + | ||
'is not supported.'); | ||
return [3, 1]; | ||
case 7: return [2, [weightSpecs, io_utils_1.toArrayBuffer(buffers)]]; | ||
} | ||
@@ -214,8 +278,18 @@ }); | ||
exports.nodeFileSystemRouter = function (url) { | ||
if (url.startsWith(NodeFileSystem.URL_SCHEME)) { | ||
return new NodeFileSystem(url.slice(NodeFileSystem.URL_SCHEME.length)); | ||
if (Array.isArray(url)) { | ||
if (url.every(function (urlElement) { return urlElement.startsWith(NodeFileSystem.URL_SCHEME); })) { | ||
return new NodeFileSystem(url.map(function (urlElement) { return urlElement.slice(NodeFileSystem.URL_SCHEME.length); })); | ||
} | ||
else { | ||
return null; | ||
} | ||
} | ||
else { | ||
return null; | ||
if (url.startsWith(NodeFileSystem.URL_SCHEME)) { | ||
return new NodeFileSystem(url.slice(NodeFileSystem.URL_SCHEME.length)); | ||
} | ||
else { | ||
return null; | ||
} | ||
} | ||
}; |
@@ -21,3 +21,3 @@ import { BackendTimingInfo, DataType, KernelBackend, Rank, ShapeMap, Tensor, Tensor1D, Tensor2D, Tensor3D, Tensor4D } from '@tensorflow/tfjs-core'; | ||
matMul(a: Tensor2D, b: Tensor2D, transposeA: boolean, transposeB: boolean): Tensor2D; | ||
stridedSlice<T extends Tensor>(x: T, begin: number[], end: number[], strides: number[], beginMask: number, endMask: number): T; | ||
stridedSlice<T extends Tensor>(x: T, begin: number[], end: number[], strides: number[], beginMask: number, endMask: number, ellipsisMask: number, newAxisMask: number, shrinkAxisMask: number): T; | ||
slice<T extends Tensor>(x: T, begin: number[], size: number[]): T; | ||
@@ -24,0 +24,0 @@ reverse<T extends Tensor>(a: T, axis: number[]): T; |
@@ -152,3 +152,3 @@ "use strict"; | ||
}; | ||
NodeJSKernelBackend.prototype.stridedSlice = function (x, begin, end, strides, beginMask, endMask) { | ||
NodeJSKernelBackend.prototype.stridedSlice = function (x, begin, end, strides, beginMask, endMask, ellipsisMask, newAxisMask, shrinkAxisMask) { | ||
var beginTensor = tfjs_core_1.tensor1d(begin, 'int32'); | ||
@@ -160,6 +160,17 @@ var endTensor = tfjs_core_1.tensor1d(end, 'int32'); | ||
{ name: 'begin_mask', type: this.binding.TF_ATTR_INT, value: beginMask }, | ||
{ name: 'end_mask', type: this.binding.TF_ATTR_INT, value: endMask }, | ||
{ name: 'ellipsis_mask', type: this.binding.TF_ATTR_INT, value: 0 }, | ||
{ name: 'new_axis_mask', type: this.binding.TF_ATTR_INT, value: 0 }, | ||
{ name: 'shrink_axis_mask', type: this.binding.TF_ATTR_INT, value: 0 } | ||
{ name: 'end_mask', type: this.binding.TF_ATTR_INT, value: endMask }, { | ||
name: 'ellipsis_mask', | ||
type: this.binding.TF_ATTR_INT, | ||
value: ellipsisMask | ||
}, | ||
{ | ||
name: 'new_axis_mask', | ||
type: this.binding.TF_ATTR_INT, | ||
value: newAxisMask | ||
}, | ||
{ | ||
name: 'shrink_axis_mask', | ||
type: this.binding.TF_ATTR_INT, | ||
value: shrinkAxisMask | ||
} | ||
]; | ||
@@ -166,0 +177,0 @@ return this.executeSingleOutput('StridedSlice', opAttrs, [x, beginTensor, endTensor, stridesTensor]); |
@@ -16,2 +16,6 @@ "use strict"; | ||
]; | ||
if (process.platform === 'win32') { | ||
IGNORE_LIST.push('clip test-tensorflow {} propagates NaNs'); | ||
IGNORE_LIST.push('maxPool test-tensorflow {} [x=[3,3,1] f=[2,2] s=1 ignores NaNs'); | ||
} | ||
var runner = new jasmineCtor(); | ||
@@ -18,0 +22,0 @@ runner.loadConfig({ |
@@ -1,2 +0,2 @@ | ||
declare const version = "0.1.11"; | ||
declare const version = "0.1.12"; | ||
export { version }; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
var version = '0.1.11'; | ||
var version = '0.1.12'; | ||
exports.version = version; |
{ | ||
"name": "@tensorflow/tfjs-node", | ||
"version": "0.1.11", | ||
"version": "0.1.12", | ||
"main": "dist/index.js", | ||
@@ -14,4 +14,4 @@ "types": "dist/index.d.ts", | ||
"build": "tsc", | ||
"clean-deps": "rm -rf deps/tensorflow/*", | ||
"enable-gpu": "yarn clean-deps && ./scripts/download-libtensorflow.sh linux-gpu && yarn", | ||
"clean-deps": "rm -rf deps", | ||
"enable-gpu": "yarn clean-deps && node scripts/get_libtensorflow.js linux-gpu && yarn", | ||
"format": "clang-format -i -style=Google binding/*.cc binding/*.h", | ||
@@ -26,3 +26,3 @@ "build-npm": "./scripts/build-npm.sh", | ||
"devDependencies": { | ||
"@tensorflow/tfjs-core": "~0.12.8", | ||
"@tensorflow/tfjs-core": "~0.12.10", | ||
"@types/bindings": "~1.3.0", | ||
@@ -41,7 +41,10 @@ "@types/jasmine": "~2.8.6", | ||
"dependencies": { | ||
"bindings": "~1.3.0" | ||
"adm-zip": "^0.4.11", | ||
"bindings": "~1.3.0", | ||
"rimraf": "^2.6.2", | ||
"tar": "^4.4.6" | ||
}, | ||
"peerDependencies": { | ||
"@tensorflow/tfjs-core": "~0.12.8" | ||
"@tensorflow/tfjs-core": "~0.12.10" | ||
} | ||
} |
@@ -16,2 +16,3 @@ <a id="travis-badge" href="https://travis-ci.org/tensorflow/tfjs-node" alt="Build Status"> | ||
- Linux GPU (Ubuntu 14.04 or higher and Cuda 9.0 w/ CUDNN v7) ([see installation instructions](https://www.tensorflow.org/install/install_linux)) | ||
- Windows 7 or later (Currently, [CPU only](https://github.com/tensorflow/tfjs/issues/602)) | ||
@@ -18,0 +19,0 @@ *Other Linux variants might also work but this project matches [core TensorFlow installation requirements](https://www.tensorflow.org/install/install_linux).* |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Misc. License Issues
License(Experimental) A package's licensing information has fine-grained problems.
Found 1 instance in 1 package
Network access
Supply chain riskThis module accesses the network.
Found 1 instance in 1 package
Filesystem access
Supply chain riskAccesses the file system, and could potentially read sensitive data.
Found 1 instance in 1 package
1048903
63
2703
87
5
1
6
2
+ Addedadm-zip@^0.4.11
+ Addedrimraf@^2.6.2
+ Addedtar@^4.4.6
+ Addedadm-zip@0.4.16(transitive)
+ Addedbalanced-match@1.0.2(transitive)
+ Addedbrace-expansion@1.1.11(transitive)
+ Addedchownr@1.1.4(transitive)
+ Addedconcat-map@0.0.1(transitive)
+ Addedfs-minipass@1.2.7(transitive)
+ Addedfs.realpath@1.0.0(transitive)
+ Addedglob@7.2.3(transitive)
+ Addedinflight@1.0.6(transitive)
+ Addedinherits@2.0.4(transitive)
+ Addedminimatch@3.1.2(transitive)
+ Addedminimist@1.2.8(transitive)
+ Addedminipass@2.9.0(transitive)
+ Addedminizlib@1.3.3(transitive)
+ Addedmkdirp@0.5.6(transitive)
+ Addedonce@1.4.0(transitive)
+ Addedpath-is-absolute@1.0.1(transitive)
+ Addedrimraf@2.7.1(transitive)
+ Addedsafe-buffer@5.2.1(transitive)
+ Addedtar@4.4.19(transitive)
+ Addedwrappy@1.0.2(transitive)
+ Addedyallist@3.1.1(transitive)