Socket
Socket
Sign inDemoInstall

@tensorflow/tfjs-layers

Package Overview
Dependencies
Maintainers
10
Versions
157
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@tensorflow/tfjs-layers - npm Package Compare versions

Comparing version 0.0.3 to 0.0.4

2

dist/activations.d.ts

@@ -1,2 +0,2 @@

import { Tensor } from 'deeplearn';
import { Tensor } from '@tensorflow/tfjs-core';
import { ConfigDictValue } from './types';

@@ -3,0 +3,0 @@ export declare type ActivationFn = (tensor: Tensor, axis?: number) => Tensor;

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
var deeplearn_1 = require("deeplearn");
var tfjs_core_1 = require("@tensorflow/tfjs-core");
var K = require("./backend/deeplearnjs_backend");

@@ -62,3 +62,3 @@ var errors_1 = require("./errors");

function relu6(x) {
return K.minimum(deeplearn_1.scalar(6.0), K.relu(x));
return K.minimum(tfjs_core_1.scalar(6.0), K.relu(x));
}

@@ -65,0 +65,0 @@ exports.relu6 = relu6;

@@ -1,2 +0,2 @@

import { Scalar, Tensor, Tensor1D } from 'deeplearn';
import { Scalar, Tensor, Tensor1D } from '@tensorflow/tfjs-core';
import { DataFormat, PaddingMode, PoolMode } from '../common';

@@ -3,0 +3,0 @@ import { Constraint } from '../constraints';

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
var dl = require("deeplearn");
var deeplearn_1 = require("deeplearn");
var tfc = require("@tensorflow/tfjs-core");
var tfjs_core_1 = require("@tensorflow/tfjs-core");
var _ = require("underscore");

@@ -24,3 +24,3 @@ var common_1 = require("../common");

function setBackend(requestedBackend) {
dl.setBackend(requestedBackend);
tfc.setBackend(requestedBackend);
backend = requestedBackend;

@@ -35,3 +35,3 @@ disposeScalarCache();

function keep(x) {
return dl.keep(x);
return tfc.keep(x);
}

@@ -48,4 +48,4 @@ exports.keep = keep;

if (scalarCache[dtype][value] == null) {
scalarCache[dtype][value] = deeplearn_1.scalar(value, dtype);
dl.keep(scalarCache[dtype][value]);
scalarCache[dtype][value] = tfjs_core_1.scalar(value, dtype);
tfc.keep(scalarCache[dtype][value]);
}

@@ -73,3 +73,3 @@ return scalarCache[dtype][value];

function dtype(x) {
return (x instanceof deeplearn_1.Tensor) ? DEFAULT_DTYPE : x.dtype;
return (x instanceof tfjs_core_1.Tensor) ? DEFAULT_DTYPE : x.dtype;
}

@@ -107,3 +107,3 @@ exports.dtype = dtype;

function transpose(x, perm) {
return dl.transpose(x, perm);
return tfc.transpose(x, perm);
}

@@ -113,3 +113,3 @@ exports.transpose = transpose;

function reverse(x, axes) {
return dl.reverse(x, axes);
return tfc.reverse(x, axes);
}

@@ -128,3 +128,3 @@ exports.reverse = reverse;

function squeeze(x, axis) {
return dl.squeeze(x, [axis]);
return tfc.squeeze(x, [axis]);
}

@@ -157,9 +157,9 @@ exports.squeeze = squeeze;

case 1:
return dl.slice1d(array, start, size);
return tfc.slice1d(array, start, size);
case 2:
return dl.slice2d(array, [start, 0], [size, array.shape[1]]);
return tfc.slice2d(array, [start, 0], [size, array.shape[1]]);
case 3:
return dl.slice3d(array, [start, 0, 0], [size, array.shape[1], array.shape[2]]);
return tfc.slice3d(array, [start, 0, 0], [size, array.shape[1], array.shape[2]]);
case 4:
return dl.slice4d(array, [start, 0, 0, 0], [size, array.shape[1], array.shape[2], array.shape[3]]);
return tfc.slice4d(array, [start, 0, 0, 0], [size, array.shape[1], array.shape[2], array.shape[3]]);
default:

@@ -174,9 +174,9 @@ throw new errors_1.ValueError("sliceAlongFirstAxis() received an unsupported subtype of Tensor: " +

case 1:
return dl.slice1d(array, start, size);
return tfc.slice1d(array, start, size);
case 2:
return dl.slice2d(array, [0, start], [array.shape[0], size]);
return tfc.slice2d(array, [0, start], [array.shape[0], size]);
case 3:
return dl.slice3d(array, [0, 0, start], [array.shape[0], array.shape[1], size]);
return tfc.slice3d(array, [0, 0, start], [array.shape[0], array.shape[1], size]);
case 4:
return dl.slice4d(array, [0, 0, 0, start], [array.shape[0], array.shape[1], array.shape[2], size]);
return tfc.slice4d(array, [0, 0, 0, start], [array.shape[0], array.shape[1], array.shape[2], size]);
default:

@@ -203,3 +203,3 @@ throw new errors_1.ValueError("sliceAlongLastAxis() received an unsupported subtype of Tensor: " +

}
return dl.concat(tensors, axis);
return tfc.concat(tensors, axis);
}

@@ -210,9 +210,9 @@ exports.concatenate = concatenate;

case 1:
return dl.concat1d([a, b]);
return tfc.concat1d([a, b]);
case 2:
return dl.concat2d([a, b], 0);
return tfc.concat2d([a, b], 0);
case 3:
return dl.concat3d([a, b], 0);
return tfc.concat3d([a, b], 0);
case 4:
return dl.concat4d([a, b], 0);
return tfc.concat4d([a, b], 0);
default:

@@ -232,3 +232,3 @@ throw new errors_1.ValueError('concatAlongFirstAxis() received an unsupported subtype of ' +

}
return dl.tile(x, n);
return tfc.tile(x, n);
}

@@ -252,3 +252,3 @@ exports.tile = tile;

function zeros(shape, dtype) {
return dl.zeros(shape);
return tfc.zeros(shape);
}

@@ -261,11 +261,11 @@ exports.zeros = zeros;

function zerosLike(x, dtype, name) {
return new types_1.LayerVariable(dl.zerosLike(x), dtype, name);
return new types_1.LayerVariable(tfc.zerosLike(x), dtype, name);
}
exports.zerosLike = zerosLike;
function ones(shape, dtype) {
return dl.ones(shape);
return tfc.ones(shape);
}
exports.ones = ones;
function onesVariable(shape, dtype, name) {
var allocated = dl.ones(shape);
var allocated = tfc.ones(shape);
return new types_1.LayerVariable(allocated, dtype, name);

@@ -275,3 +275,3 @@ }

function onesLike(x, dtype, name) {
var allocated = dl.onesLike(x);
var allocated = tfc.onesLike(x);
return new types_1.LayerVariable(allocated, dtype, name);

@@ -291,3 +291,3 @@ }

}
return deeplearn_1.tensor2d(buffer, [size, size]);
return tfjs_core_1.tensor2d(buffer, [size, size]);
}

@@ -300,31 +300,31 @@ exports.eye = eye;

function neg(x) {
return dl.neg(x);
return tfc.neg(x);
}
exports.neg = neg;
function add(x, y) {
return dl.add(x, y);
return tfc.add(x, y);
}
exports.add = add;
function subtract(x, y) {
return dl.sub(x, y);
return tfc.sub(x, y);
}
exports.subtract = subtract;
function multiply(x, y) {
return dl.mul(x, y);
return tfc.mul(x, y);
}
exports.multiply = multiply;
function divide(x, y) {
return dl.div(x, y);
return tfc.div(x, y);
}
exports.divide = divide;
function scalarTimesArray(c, x) {
return dl.mul(c, x);
return tfc.mul(c, x);
}
exports.scalarTimesArray = scalarTimesArray;
function scalarPlusArray(c, x) {
return dl.add(c, x);
return tfc.add(c, x);
}
exports.scalarPlusArray = scalarPlusArray;
function randomUniform(shape, minval, maxval, dtype, seed) {
return dl.randomUniform(shape, minval, maxval);
return tfc.randomUniform(shape, minval, maxval);
}

@@ -340,3 +340,3 @@ exports.randomUniform = randomUniform;

if (stddev === void 0) { stddev = 1.0; }
return dl.truncatedNormal(shape, mean, stddev);
return tfc.truncatedNormal(shape, mean, stddev);
}

@@ -358,3 +358,3 @@ exports.truncatedNormal = truncatedNormal;

var dtypeString = (dtype === types_1.DType.float32) ? 'float32' : 'int32';
return dl.randomNormal(shape, mean, stddev, dtypeString, seed);
return tfc.randomNormal(shape, mean, stddev, dtypeString, seed);
}

@@ -374,7 +374,7 @@ exports.randomNormal = randomNormal;

function updateAdd(x, increment) {
return x.write(dl.add(x.read(), increment));
return x.write(tfc.add(x.read(), increment));
}
exports.updateAdd = updateAdd;
function updateSub(x, decrement) {
return x.write(dl.sub(x.read(), decrement));
return x.write(tfc.sub(x.read(), decrement));
}

@@ -389,3 +389,3 @@ exports.updateSub = updateSub;

if (ndim(x) === 2) {
return dl.matMul(x, y);
return tfc.matMul(x, y);
}

@@ -397,3 +397,3 @@ else if (ndim(x) === 3) {

x = x.reshape([xShape0 * xShape1, xShape2]);
return dl.matMul(x, y).reshape([
return tfc.matMul(x, y).reshape([
xShape0, xShape1, y.shape[1]

@@ -414,3 +414,3 @@ ]);

}
return dl.oneHot(indices, numClasses);
return tfc.oneHot(indices, numClasses);
}

@@ -420,3 +420,3 @@ exports.oneHot = oneHot;

axis = normalizeAxis(x, axis);
return dl.mean(x, axis, keepDims);
return tfc.mean(x, axis, keepDims);
}

@@ -426,3 +426,3 @@ exports.mean = mean;

if (axis === void 0) { axis = -1; }
return dl.argMax(x, axis);
return tfc.argMax(x, axis);
}

@@ -432,41 +432,41 @@ exports.argmax = argmax;

if (Array.isArray(indices)) {
indices = deeplearn_1.tensor1d(indices);
indices = tfjs_core_1.tensor1d(indices);
}
return dl.gather(reference, indices, axis);
return tfc.gather(reference, indices, axis);
}
exports.gather = gather;
function max(x, axis, keepDims) {
return dl.max(x, axis, keepDims);
return tfc.max(x, axis, keepDims);
}
exports.max = max;
function min(x, axis, keepDims) {
return dl.min(x, axis, keepDims);
return tfc.min(x, axis, keepDims);
}
exports.min = min;
function minimum(x, y) {
return dl.minimum(x, y);
return tfc.minimum(x, y);
}
exports.minimum = minimum;
function sum(x, axis, keepDims) {
return dl.sum(x, axis, keepDims);
return tfc.sum(x, axis, keepDims);
}
exports.sum = sum;
function abs(x) {
return dl.abs(x);
return tfc.abs(x);
}
exports.abs = abs;
function square(x) {
return dl.mulStrict(x, x);
return tfc.mulStrict(x, x);
}
exports.square = square;
function sqrt(x) {
return dl.sqrt(x);
return tfc.sqrt(x);
}
exports.sqrt = sqrt;
function exp(x) {
return dl.exp(x);
return tfc.exp(x);
}
exports.exp = exp;
function log(x) {
return dl.log(x);
return tfc.log(x);
}

@@ -476,3 +476,3 @@ exports.log = log;

if (typeof (a) === 'number') {
a = deeplearn_1.scalar(Math.round(a), 'int32');
a = tfjs_core_1.scalar(Math.round(a), 'int32');
}

@@ -482,31 +482,31 @@ if (a.dtype !== 'int32') {

}
return dl.pow(x, a);
return tfc.pow(x, a);
}
exports.pow = pow;
function clip(x, minValue, maxValue) {
return dl.clipByValue(x, minValue, maxValue);
return tfc.clipByValue(x, minValue, maxValue);
}
exports.clip = clip;
function equal(x, y) {
return dl.equal(x, y);
return tfc.equal(x, y);
}
exports.equal = equal;
function greater(x, y) {
return dl.greater(x, y);
return tfc.greater(x, y);
}
exports.greater = greater;
function greaterEqual(x, y) {
return dl.greaterEqual(x, y);
return tfc.greaterEqual(x, y);
}
exports.greaterEqual = greaterEqual;
function maximum(x, y) {
return dl.maximum(x, y);
return tfc.maximum(x, y);
}
exports.maximum = maximum;
function sin(x) {
return dl.sin(x.value());
return tfc.sin(x.value());
}
exports.sin = sin;
function cos(x) {
return dl.cos(x.value());
return tfc.cos(x.value());
}

@@ -518,9 +518,9 @@ exports.cos = cos;

if (ndim(x) === 2) {
out = dl.batchNormalization2d(x, mean, variance, epsilon);
out = tfc.batchNormalization2d(x, mean, variance, epsilon);
}
else if (ndim(x) === 3) {
out = dl.batchNormalization3d(x, mean, variance, epsilon);
out = tfc.batchNormalization3d(x, mean, variance, epsilon);
}
else if (ndim(x) === 4) {
out = dl.batchNormalization4d(x, mean, variance, epsilon);
out = tfc.batchNormalization4d(x, mean, variance, epsilon);
}

@@ -548,3 +548,3 @@ else {

}
return dl.add(x, bias);
return tfc.add(x, bias);
}

@@ -558,23 +558,23 @@ exports.biasAdd = biasAdd;

}
return dl.elu(x);
return tfc.elu(x);
}
exports.elu = elu;
function selu(x) {
return dl.selu(x);
return tfc.selu(x);
}
exports.selu = selu;
function relu(x) {
return dl.relu(x);
return tfc.relu(x);
}
exports.relu = relu;
function softplus(x) {
return dl.log(dl.add(getScalar(1), dl.exp(x)));
return tfc.log(tfc.add(getScalar(1), tfc.exp(x)));
}
exports.softplus = softplus;
function softsign(x) {
return dl.div(x, dl.add(getScalar(1), dl.abs(x)));
return tfc.div(x, tfc.add(getScalar(1), tfc.abs(x)));
}
exports.softsign = softsign;
function tanh(x) {
return dl.tanh(x);
return tfc.tanh(x);
}

@@ -590,5 +590,5 @@ exports.tanh = tanh;

}
var multiplier = dl.step(dl.add(neg(level), randomUniform(x.shape, 0, 1, types_1.DType.float32)));
multiplier = dl.mul(divide(getScalar(1), subtract(getScalar(1), level)), multiplier);
return dl.mul(x, multiplier);
var multiplier = tfc.step(tfc.add(neg(level), randomUniform(x.shape, 0, 1, types_1.DType.float32)));
multiplier = tfc.mul(divide(getScalar(1), subtract(getScalar(1), level)), multiplier);
return tfc.mul(x, multiplier);
}

@@ -598,3 +598,3 @@ exports.dropout = dropout;

var squareSum = sum(square(x), axis, true);
var epsilonTensor = scalarTimesArray(deeplearn_1.scalar(exports.epsilon()), dl.onesLike(x));
var epsilonTensor = scalarTimesArray(tfjs_core_1.scalar(exports.epsilon()), tfc.onesLike(x));
var norm = sqrt(maximum(squareSum, epsilonTensor));

@@ -606,3 +606,3 @@ return divide(x, norm);

if (dataFormat === common_1.DataFormat.CHANNEL_FIRST) {
return dl.transpose(x, [0, 2, 3, 1]);
return tfc.transpose(x, [0, 2, 3, 1]);
}

@@ -643,3 +643,3 @@ else {

}
var y = dl.conv1d(x, kernel, strides, padding === common_1.PaddingMode.SAME ? 'same' : 'valid');
var y = tfc.conv1d(x, kernel, strides, padding === common_1.PaddingMode.SAME ? 'same' : 'valid');
if (bias != null) {

@@ -686,3 +686,3 @@ y = biasAdd(y, bias);

}
y = dl.conv2d(y, kernel, strides, padding === common_1.PaddingMode.SAME ? 'same' : 'valid');
y = tfc.conv2d(y, kernel, strides, padding === common_1.PaddingMode.SAME ? 'same' : 'valid');
if (bias != null) {

@@ -692,3 +692,3 @@ y = biasAdd(y, bias);

if (dataFormat === common_1.DataFormat.CHANNEL_FIRST) {
y = dl.transpose(y, [0, 3, 1, 2]);
y = tfc.transpose(y, [0, 3, 1, 2]);
}

@@ -713,5 +713,5 @@ return y;

}
y = dl.depthwiseConv2d(y, depthwiseKernel, strides, padding === common_1.PaddingMode.SAME ? 'same' : 'valid', dilationRate);
y = tfc.depthwiseConv2d(y, depthwiseKernel, strides, padding === common_1.PaddingMode.SAME ? 'same' : 'valid', dilationRate);
if (dataFormat === common_1.DataFormat.CHANNEL_FIRST) {
y = dl.transpose(y, [0, 3, 1, 2]);
y = tfc.transpose(y, [0, 3, 1, 2]);
}

@@ -738,9 +738,9 @@ return y;

if (poolMode === common_1.PoolMode.MAX) {
y = dl.maxPool(x, poolSize, strides, paddingString);
y = tfc.maxPool(x, poolSize, strides, paddingString);
}
else {
y = dl.avgPool(x, poolSize, strides, paddingString);
y = tfc.avgPool(x, poolSize, strides, paddingString);
}
if (dataFormat === common_1.DataFormat.CHANNEL_FIRST) {
y = dl.transpose(y, [0, 3, 1, 2]);
y = tfc.transpose(y, [0, 3, 1, 2]);
}

@@ -770,3 +770,3 @@ return y;

if (axis === void 0) { axis = -1; }
return dl.softmax(x, axis);
return tfc.softmax(x, axis);
}

@@ -784,3 +784,3 @@ exports.softmax = softmax;

output = clip(output, exports.epsilon(), 1 - exports.epsilon());
return dl.neg(dl.sum(dl.mul(target, dl.log(output)), shape(output).length - 1));
return tfc.neg(tfc.sum(tfc.mul(target, tfc.log(output)), shape(output).length - 1));
}

@@ -790,5 +790,5 @@ exports.categoricalCrossentropy = categoricalCrossentropy;

if (fromLogits === void 0) { fromLogits = false; }
var flatTarget = dl.floor(flatten(target));
var flatTarget = tfc.floor(flatten(target));
var outputShape = shape(output);
var oneHotTarget = reshape(dl.oneHot(flatTarget, outputShape[outputShape.length - 1]), outputShape);
var oneHotTarget = reshape(tfc.oneHot(flatTarget, outputShape[outputShape.length - 1]), outputShape);
return categoricalCrossentropy(oneHotTarget, output, fromLogits);

@@ -802,3 +802,3 @@ }

y = clip(output, exports.epsilon(), 1 - exports.epsilon());
y = log(divide(y, subtract(dl.onesLike(y), y)));
y = log(divide(y, subtract(tfc.onesLike(y), y)));
}

@@ -812,6 +812,6 @@ else {

function sigmoidCrossEntropyWithLogits(target, output) {
var maxOutput = dl.maximum(output, dl.zerosLike(output));
var outputXTarget = dl.mul(output, target);
var sigmoidOutput = dl.log(dl.add(getScalar(1), dl.exp(dl.neg(dl.abs(output)))));
var result = dl.add(dl.sub(maxOutput, outputXTarget), sigmoidOutput);
var maxOutput = tfc.maximum(output, tfc.zerosLike(output));
var outputXTarget = tfc.mul(output, target);
var sigmoidOutput = tfc.log(tfc.add(getScalar(1), tfc.exp(tfc.neg(tfc.abs(output)))));
var result = tfc.add(tfc.sub(maxOutput, outputXTarget), sigmoidOutput);
return result;

@@ -821,7 +821,7 @@ }

function sigmoid(x) {
return dl.sigmoid(x);
return tfc.sigmoid(x);
}
exports.sigmoid = sigmoid;
function hardSigmoid(x) {
var y = scalarPlusArray(deeplearn_1.scalar(0.5), scalarTimesArray(deeplearn_1.scalar(0.2), x));
var y = scalarPlusArray(tfjs_core_1.scalar(0.5), scalarTimesArray(tfjs_core_1.scalar(0.2), x));
return clip(y, 0, 1);

@@ -884,5 +884,5 @@ }

var variableList = variables.map(function (variable) { return variable.read(); });
var valudAndGrads = deeplearn_1.variableGrads(lossFn, variableList);
var valudAndGrads = tfjs_core_1.variableGrads(lossFn, variableList);
return variables.map(function (variable) { return valudAndGrads.grads[variable.name]; });
}
exports.gradients = gradients;

@@ -1,6 +0,9 @@

import { Scalar, Tensor } from 'deeplearn';
import { Scalar, Tensor } from '@tensorflow/tfjs-core';
import { Model } from './engine/training';
export declare type Logs = {
export declare type UnresolvedLogs = {
[key: string]: number | Scalar;
};
export declare type Logs = {
[key: string]: number;
};
export declare type Params = {

@@ -15,8 +18,8 @@ [key: string]: number | string | boolean | number[] | string[] | boolean[];

setModel(model: Model): void;
onEpochBegin(epoch: number, logs?: Logs): Promise<void>;
onEpochEnd(epoch: number, logs?: Logs): Promise<void>;
onBatchBegin(batch: number, logs?: Logs): Promise<void>;
onBatchEnd(batch: number, logs?: Logs): Promise<void>;
onTrainBegin(logs?: Logs): Promise<void>;
onTrainEnd(logs?: Logs): Promise<void>;
onEpochBegin(epoch: number, logs?: UnresolvedLogs): Promise<void>;
onEpochEnd(epoch: number, logs?: UnresolvedLogs): Promise<void>;
onBatchBegin(batch: number, logs?: UnresolvedLogs): Promise<void>;
onBatchEnd(batch: number, logs?: UnresolvedLogs): Promise<void>;
onTrainBegin(logs?: UnresolvedLogs): Promise<void>;
onTrainEnd(logs?: UnresolvedLogs): Promise<void>;
}

@@ -30,8 +33,8 @@ export declare class CallbackList {

setModel(model: Model): void;
onEpochBegin(epoch: number, logs?: Logs): Promise<void>;
onEpochEnd(epoch: number, logs?: Logs): Promise<void>;
onBatchBegin(batch: number, logs?: Logs): Promise<void>;
onBatchEnd(batch: number, logs?: Logs): Promise<void>;
onTrainBegin(logs?: Logs): Promise<void>;
onTrainEnd(logs?: Logs): Promise<void>;
onEpochBegin(epoch: number, logs?: UnresolvedLogs): Promise<void>;
onEpochEnd(epoch: number, logs?: UnresolvedLogs): Promise<void>;
onBatchBegin(batch: number, logs?: UnresolvedLogs): Promise<void>;
onBatchEnd(batch: number, logs?: UnresolvedLogs): Promise<void>;
onTrainBegin(logs?: UnresolvedLogs): Promise<void>;
onTrainEnd(logs?: UnresolvedLogs): Promise<void>;
}

@@ -43,6 +46,7 @@ export declare class BaseLogger extends Callback {

constructor();
onEpochBegin(epoch: number, logs?: Logs): Promise<void>;
onBatchEnd(batch: number, logs?: Logs): Promise<void>;
onEpochEnd(epoch: number, logs?: Logs): Promise<void>;
onEpochBegin(epoch: number, logs?: UnresolvedLogs): Promise<void>;
onBatchEnd(batch: number, logs?: UnresolvedLogs): Promise<void>;
onEpochEnd(epoch: number, logs?: UnresolvedLogs): Promise<void>;
}
export declare function resolveScalarsInLogs(logs: UnresolvedLogs): Promise<void>;
export declare class History extends Callback {

@@ -53,4 +57,4 @@ epoch: number[];

};
onTrainBegin(logs?: Logs): Promise<void>;
onEpochEnd(epoch: number, logs?: Logs): Promise<void>;
onTrainBegin(logs?: UnresolvedLogs): Promise<void>;
onEpochEnd(epoch: number, logs?: UnresolvedLogs): Promise<void>;
syncData(): Promise<void>;

@@ -74,9 +78,9 @@ }

constructor(config: CustomCallbackConfig);
onEpochBegin(epoch: number, logs?: Logs): Promise<void>;
onEpochEnd(epoch: number, logs?: Logs): Promise<void>;
onBatchBegin(batch: number, logs?: Logs): Promise<void>;
onBatchEnd(batch: number, logs?: Logs): Promise<void>;
onTrainBegin(logs?: Logs): Promise<void>;
onTrainEnd(logs?: Logs): Promise<void>;
onEpochBegin(epoch: number, logs?: UnresolvedLogs): Promise<void>;
onEpochEnd(epoch: number, logs?: UnresolvedLogs): Promise<void>;
onBatchBegin(batch: number, logs?: UnresolvedLogs): Promise<void>;
onBatchEnd(batch: number, logs?: UnresolvedLogs): Promise<void>;
onTrainBegin(logs?: UnresolvedLogs): Promise<void>;
onTrainEnd(logs?: UnresolvedLogs): Promise<void>;
}
export declare function standardizeCallbacks(callbacks: Callback | Callback[] | CustomCallbackConfig | CustomCallbackConfig[]): Callback[];

@@ -48,3 +48,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
var deeplearn_1 = require("deeplearn");
var tfjs_core_1 = require("@tensorflow/tfjs-core");
var K = require("./backend/deeplearnjs_backend");

@@ -317,3 +317,3 @@ var generic_utils = require("./utils/generic_utils");

}
deeplearn_1.tidy(function () {
tfjs_core_1.tidy(function () {
_this.totals[key] =

@@ -347,3 +347,3 @@ K.scalarPlusArray(_this.totals[key], K.multiply(value, K.getScalar(batchSize)));

else {
deeplearn_1.tidy(function () {
tfjs_core_1.tidy(function () {
logs[key] =

@@ -368,2 +368,33 @@ K.scalarTimesArray(K.divide(K.getScalar(1), K.getScalar(_this.seen)), _this.totals[key]);

exports.BaseLogger = BaseLogger;
function resolveScalarsInLogs(logs) {
return __awaiter(this, void 0, void 0, function () {
var promises, keys, key, value, valueScalar, values, i;
return __generator(this, function (_a) {
switch (_a.label) {
case 0:
if (logs == null) {
return [2];
}
promises = [];
keys = [];
for (key in logs) {
value = logs[key];
if (typeof value !== 'number') {
valueScalar = value;
promises.push(valueScalar.data());
keys.push(key);
}
}
return [4, Promise.all(promises)];
case 1:
values = _a.sent();
for (i = 0; i < values.length; ++i) {
logs[keys[i]] = values[i][0];
}
return [2];
}
});
});
}
exports.resolveScalarsInLogs = resolveScalarsInLogs;
var History = (function (_super) {

@@ -453,8 +484,11 @@ __extends(History, _super);

case 0:
if (!(this.epochBegin != null)) return [3, 2];
return [4, this.epochBegin(epoch, logs)];
if (!(this.epochBegin != null)) return [3, 3];
return [4, resolveScalarsInLogs(logs)];
case 1:
_a.sent();
_a.label = 2;
case 2: return [2];
return [4, this.epochBegin(epoch, logs)];
case 2:
_a.sent();
_a.label = 3;
case 3: return [2];
}

@@ -469,8 +503,11 @@ });

case 0:
if (!(this.epochEnd != null)) return [3, 2];
return [4, this.epochEnd(epoch, logs)];
if (!(this.epochEnd != null)) return [3, 3];
return [4, resolveScalarsInLogs(logs)];
case 1:
_a.sent();
_a.label = 2;
case 2: return [2];
return [4, this.epochEnd(epoch, logs)];
case 2:
_a.sent();
_a.label = 3;
case 3: return [2];
}

@@ -485,8 +522,11 @@ });

case 0:
if (!(this.batchBegin != null)) return [3, 2];
return [4, this.batchBegin(batch, logs)];
if (!(this.batchBegin != null)) return [3, 3];
return [4, resolveScalarsInLogs(logs)];
case 1:
_a.sent();
_a.label = 2;
case 2: return [2];
return [4, this.batchBegin(batch, logs)];
case 2:
_a.sent();
_a.label = 3;
case 3: return [2];
}

@@ -501,8 +541,11 @@ });

case 0:
if (!(this.batchEnd != null)) return [3, 2];
return [4, this.batchEnd(batch, logs)];
if (!(this.batchEnd != null)) return [3, 3];
return [4, resolveScalarsInLogs(logs)];
case 1:
_a.sent();
_a.label = 2;
case 2: return [2];
return [4, this.batchEnd(batch, logs)];
case 2:
_a.sent();
_a.label = 3;
case 3: return [2];
}

@@ -517,8 +560,11 @@ });

case 0:
if (!(this.trainBegin != null)) return [3, 2];
return [4, this.trainBegin(logs)];
if (!(this.trainBegin != null)) return [3, 3];
return [4, resolveScalarsInLogs(logs)];
case 1:
_a.sent();
_a.label = 2;
case 2: return [2];
return [4, this.trainBegin(logs)];
case 2:
_a.sent();
_a.label = 3;
case 3: return [2];
}

@@ -533,8 +579,11 @@ });

case 0:
if (!(this.trainEnd != null)) return [3, 2];
return [4, this.trainEnd(logs)];
if (!(this.trainEnd != null)) return [3, 3];
return [4, resolveScalarsInLogs(logs)];
case 1:
_a.sent();
_a.label = 2;
case 2: return [2];
return [4, this.trainEnd(logs)];
case 2:
_a.sent();
_a.label = 3;
case 3: return [2];
}

@@ -541,0 +590,0 @@ });

@@ -1,2 +0,2 @@

import { Tensor } from 'deeplearn';
import { Tensor } from '@tensorflow/tfjs-core';
import { ConfigDict, ConfigDictValue } from './types';

@@ -3,0 +3,0 @@ export declare abstract class Constraint {

@@ -19,3 +19,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
var deeplearn_1 = require("deeplearn");
var tfjs_core_1 = require("@tensorflow/tfjs-core");
var K = require("./backend/deeplearnjs_backend");

@@ -33,3 +33,3 @@ var generic_utils_1 = require("./utils/generic_utils");

Constraint = __decorate([
deeplearn_1.doc({ heading: 'Constraints', subheading: 'Classes', namespace: 'constraints' })
tfjs_core_1.doc({ heading: 'Constraints', subheading: 'Classes', namespace: 'constraints' })
], Constraint);

@@ -36,0 +36,0 @@ return Constraint;

@@ -1,2 +0,2 @@

import { Tensor } from 'deeplearn';
import { Tensor } from '@tensorflow/tfjs-core';
import { SymbolicTensor } from '../types';

@@ -3,0 +3,0 @@ export interface Feed {

@@ -1,2 +0,2 @@

import { Tensor } from 'deeplearn';
import { Tensor } from '@tensorflow/tfjs-core';
import { Constraint } from '../constraints';

@@ -3,0 +3,0 @@ import { Initializer } from '../initializers';

@@ -1,2 +0,2 @@

import { Optimizer, Scalar, Tensor, Tensor1D } from 'deeplearn';
import { Optimizer, Scalar, Tensor, Tensor1D } from '@tensorflow/tfjs-core';
import { Callback, CustomCallbackConfig, History } from '../callbacks';

@@ -27,9 +27,13 @@ import * as optimizers from '../optimizers';

}
export interface ModelPredictConfig {
batchSize?: number;
verbose?: boolean;
}
export interface ModelEvaluateConfig {
batchSize?: number;
verbose?: ModelLoggingVerbosity;
sampleWeight?: Tensor;
steps?: number;
}
export interface ModelFitConfig {
x: Tensor | Tensor[] | {
[inputName: string]: Tensor;
};
y: Tensor | Tensor[] | {
[inputName: string]: Tensor;
};
batchSize?: number;

@@ -78,6 +82,6 @@ epochs?: number;

private checkTrainableWeightsConsistency();
evaluate(x: Tensor | Tensor[], y: Tensor | Tensor[], batchSize?: number, verbose?: ModelLoggingVerbosity, sampleWeight?: Tensor, steps?: number): Scalar | Scalar[];
evaluate(x: Tensor | Tensor[], y: Tensor | Tensor[], config?: ModelEvaluateConfig): Promise<Scalar | Scalar[]>;
private checkNumSamples(ins, batchSize?, steps?, stepsName?);
private predictLoop(ins, batchSize?, verbose?);
predict(x: Tensor | Tensor[], batchSize?: number, verbose?: boolean): Tensor | Tensor[];
predict(x: Tensor | Tensor[], config?: ModelPredictConfig): Promise<Tensor | Tensor[]>;
predictOnBatch(x: Tensor): Tensor | Tensor[];

@@ -93,3 +97,7 @@ protected standardizeUserData(x: Tensor | Tensor[] | {

private makeTestFunction();
fit(config: ModelFitConfig): Promise<History>;
fit(x: Tensor | Tensor[] | {
[inputName: string]: Tensor;
}, y: Tensor | Tensor[] | {
[inputName: string]: Tensor;
}, config?: ModelFitConfig): Promise<History>;
}

@@ -54,4 +54,4 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
var dl = require("deeplearn");
var deeplearn_1 = require("deeplearn");
var tfc = require("@tensorflow/tfjs-core");
var tfjs_core_1 = require("@tensorflow/tfjs-core");
var _ = require("underscore");

@@ -68,3 +68,3 @@ var K = require("../backend/deeplearnjs_backend");

function isDataTensor(x) {
return x instanceof deeplearn_1.Tensor;
return x instanceof tfjs_core_1.Tensor;
}

@@ -519,13 +519,16 @@ exports.isDataTensor = isDataTensor;

};
Model.prototype.evaluate = function (x, y, batchSize, verbose, sampleWeight, steps) {
if (batchSize === void 0) { batchSize = 32; }
if (batchSize == null) {
batchSize = 32;
}
var standardizedOuts = this.standardizeUserData(x, y, true, batchSize);
var ins = standardizedOuts[0].concat(standardizedOuts[1]);
this.makeTestFunction();
var f = this.testFunction;
var testOuts = this.testLoop(f, ins, batchSize, verbose, steps);
return generic_utils_1.singletonOrArray(testOuts);
Model.prototype.evaluate = function (x, y, config) {
if (config === void 0) { config = {}; }
return __awaiter(this, void 0, void 0, function () {
var batchSize, standardizedOuts, ins, f, testOuts;
return __generator(this, function (_a) {
batchSize = config.batchSize == null ? 32 : config.batchSize;
standardizedOuts = this.standardizeUserData(x, y, true, batchSize);
ins = standardizedOuts[0].concat(standardizedOuts[1]);
this.makeTestFunction();
f = this.testFunction;
testOuts = this.testLoop(f, ins, batchSize, config.verbose, config.steps);
return [2, generic_utils_1.singletonOrArray(testOuts)];
});
});
};

@@ -567,3 +570,3 @@ Model.prototype.checkNumSamples = function (ins, batchSize, steps, stepsName) {

var _loop_3 = function (batchIndex) {
var batchOuts = dl.tidy(function () {
var batchOuts = tfc.tidy(function () {
var batchStart = batches[batchIndex][0];

@@ -601,7 +604,12 @@ var batchEnd = batches[batchIndex][1];

};
Model.prototype.predict = function (x, batchSize, verbose) {
if (batchSize === void 0) { batchSize = 32; }
if (verbose === void 0) { verbose = false; }
checkInputData(x, this.inputNames, this.feedInputShapes, false);
return this.predictLoop(x, batchSize);
Model.prototype.predict = function (x, config) {
if (config === void 0) { config = {}; }
return __awaiter(this, void 0, void 0, function () {
var batchSize;
return __generator(this, function (_a) {
checkInputData(x, this.inputNames, this.feedInputShapes, false);
batchSize = config.batchSize == null ? 32 : config.batchSize;
return [2, this.predictLoop(x, batchSize)];
});
});
};

@@ -643,3 +651,2 @@ Model.prototype.predictOnBatch = function (x) {

Model.prototype.fitLoop = function (f, ins, outLabels, batchSize, epochs, verbose, callbacks, valF, valIns, shuffle, callbackMetrics, initialEpoch, stepsPerEpoch, validationSteps) {
if (epochs === void 0) { epochs = 100; }
if (initialEpoch === void 0) { initialEpoch = 0; }

@@ -652,2 +659,8 @@ return __awaiter(this, void 0, void 0, function () {

case 0:
if (batchSize == null) {
batchSize = 32;
}
if (epochs == null) {
epochs = 100;
}
if (shuffle == null) {

@@ -715,3 +728,3 @@ shuffle = true;

}
epochIndexArray1D_1 = deeplearn_1.tensor1d(epochIndexArray);
epochIndexArray1D_1 = tfjs_core_1.tensor1d(epochIndexArray);
batches_1 = makeBatches(numTrainSamples, batchSize);

@@ -727,3 +740,3 @@ _loop_5 = function (batchIndex) {

_a.sent();
dl.tidy(function () {
tfc.tidy(function () {
var batchStart = batches_1[batchIndex][0];

@@ -814,3 +827,3 @@ var batchEnd = batches_1[batchIndex][1];

var batches = makeBatches(numSamples, batchSize);
var indexArray = deeplearn_1.tensor1d(_.range(numSamples));
var indexArray = tfjs_core_1.tensor1d(_.range(numSamples));
for (var batchIndex = 0; batchIndex < batches.length; ++batchIndex) {

@@ -856,3 +869,3 @@ var batchStart = batches[batchIndex][0];

this.testFunction = function (data) {
return dl.tidy(function () {
return tfc.tidy(function () {
var valOutputs = [];

@@ -889,17 +902,10 @@ var totalLoss;

};
Model.prototype.fit = function (config) {
Model.prototype.fit = function (x, y, config) {
if (config === void 0) { config = {}; }
return __awaiter(this, void 0, void 0, function () {
var _this = this;
var standardizedOuts, inputs, targets, doValidation, valX, valY, valIns, valStandardized, splitAt, originalBatchSize, ins, trainFunction, outLabels, valFunction, callbackMetrics, callbacks;
var batchSize, standardizedOuts, inputs, targets, doValidation, valX, valY, valIns, valStandardized, splitAt, originalBatchSize, ins, trainFunction, outLabels, valFunction, callbackMetrics, callbacks;
return __generator(this, function (_a) {
if (config.batchSize == null) {
config.batchSize = 32;
}
if (config.x == null) {
throw new errors_1.ValueError('x data is missing from ModelFitConfig.');
}
if (config.y == null) {
throw new errors_1.ValueError('y data is missing from ModelFitConfig.');
}
standardizedOuts = this.standardizeUserData(config.x, config.y, false, config.batchSize);
batchSize = config.batchSize == null ? 32 : config.batchSize;
standardizedOuts = this.standardizeUserData(x, y, false, batchSize);
inputs = standardizedOuts[0];

@@ -922,3 +928,3 @@ targets = standardizedOuts[1];

}
valStandardized = this.standardizeUserData(valX, valY, true, config.batchSize);
valStandardized = this.standardizeUserData(valX, valY, true, batchSize);
valX = valStandardized[0];

@@ -1003,3 +1009,3 @@ valY = valStandardized[1];

callbacks = callbacks_1.standardizeCallbacks(config.callbacks);
return [2, this.fitLoop(trainFunction, ins, outLabels, config.batchSize, config.epochs, config.verbose, callbacks, valFunction, valIns, config.shuffle, callbackMetrics, null, null, null)];
return [2, this.fitLoop(trainFunction, ins, outLabels, batchSize, config.epochs, config.verbose, callbacks, valFunction, valIns, config.shuffle, callbackMetrics, null, null, null)];
});

@@ -1009,18 +1015,18 @@ });

__decorate([
deeplearn_1.doc({ heading: 'Models', subheading: 'Classes' })
tfjs_core_1.doc({ heading: 'Models', subheading: 'Classes' })
], Model.prototype, "compile", null);
__decorate([
deeplearn_1.doc({ heading: 'Models', subheading: 'Classes' })
tfjs_core_1.doc({ heading: 'Models', subheading: 'Classes', configParamIndices: [2] })
], Model.prototype, "evaluate", null);
__decorate([
deeplearn_1.doc({ heading: 'Models', subheading: 'Classes' })
tfjs_core_1.doc({ heading: 'Models', subheading: 'Classes', configParamIndices: [1] })
], Model.prototype, "predict", null);
__decorate([
deeplearn_1.doc({ heading: 'Models', subheading: 'Classes' })
tfjs_core_1.doc({ heading: 'Models', subheading: 'Classes' })
], Model.prototype, "predictOnBatch", null);
__decorate([
deeplearn_1.doc({ heading: 'Models', subheading: 'Classes' })
tfjs_core_1.doc({ heading: 'Models', subheading: 'Classes', configParamIndices: [2] })
], Model.prototype, "fit", null);
Model = __decorate([
deeplearn_1.doc({ heading: 'Models', subheading: 'Classes' })
tfjs_core_1.doc({ heading: 'Models', subheading: 'Classes' })
], Model);

@@ -1027,0 +1033,0 @@ return Model;

@@ -12,3 +12,3 @@ import { MaxNorm, MaxNormConfig, MinMaxNorm, MinMaxNormConfig, NonNeg, UnitNorm, UnitNormConfig } from './constraints';

import { GlobalPooling2DLayerConfig, Pooling1DLayerConfig, Pooling2DLayerConfig } from './layers/pooling';
import { GRUCellLayerConfig, GRULayerConfig, LSTMCellLayerConfig, LSTMLayerConfig, RNNCell, SimpleRNNCellLayerConfig, SimpleRNNLayerConfig } from './layers/recurrent';
import { GRUCellLayerConfig, GRULayerConfig, LSTMCellLayerConfig, LSTMLayerConfig, RNN, RNNCell, RNNLayerConfig, SimpleRNNCellLayerConfig, SimpleRNNLayerConfig, StackedRNNCellsConfig } from './layers/recurrent';
import { BidirectionalLayerConfig, WrapperLayerConfig } from './layers/wrappers';

@@ -56,2 +56,4 @@ import { Sequential, SequentialConfig } from './models';

static simpleRNNCell(config: SimpleRNNCellLayerConfig): RNNCell;
static rnn(config: RNNLayerConfig): RNN;
static stackedRNNCells(config: StackedRNNCellsConfig): RNNCell;
static bidirectional(config: BidirectionalLayerConfig): Layer;

@@ -58,0 +60,0 @@ static timeDistributed(config: WrapperLayerConfig): Layer;

@@ -9,3 +9,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
var deeplearn_1 = require("deeplearn");
var tfjs_core_1 = require("@tensorflow/tfjs-core");
var constraints_1 = require("./constraints");

@@ -45,3 +45,3 @@ var topology_1 = require("./engine/topology");

__decorate([
deeplearn_1.doc({
tfjs_core_1.doc({
heading: 'Models',

@@ -54,3 +54,3 @@ subheading: 'Creation',

__decorate([
deeplearn_1.doc({
tfjs_core_1.doc({
heading: 'Models',

@@ -63,3 +63,3 @@ subheading: 'Creation',

__decorate([
deeplearn_1.doc({
tfjs_core_1.doc({
heading: 'Models',

@@ -71,3 +71,3 @@ subheading: 'Loading',

__decorate([
deeplearn_1.doc({
tfjs_core_1.doc({
heading: 'Models',

@@ -80,3 +80,3 @@ subheading: 'Inputs',

__decorate([
deeplearn_1.doc({
tfjs_core_1.doc({
heading: 'Models',

@@ -184,2 +184,8 @@ subheading: 'Inputs',

};
LayerExports.rnn = function (config) {
return new recurrent_1.RNN(config);
};
LayerExports.stackedRNNCells = function (config) {
return new recurrent_1.StackedRNNCells(config);
};
LayerExports.bidirectional = function (config) {

@@ -192,3 +198,3 @@ return new wrappers_1.Bidirectional(config);

__decorate([
deeplearn_1.doc({
tfjs_core_1.doc({
heading: 'Layers',

@@ -202,3 +208,3 @@ subheading: 'Convolutional',

__decorate([
deeplearn_1.doc({
tfjs_core_1.doc({
heading: 'Layers',

@@ -212,3 +218,3 @@ subheading: 'Convolutional',

__decorate([
deeplearn_1.doc({
tfjs_core_1.doc({
heading: 'Layers',

@@ -222,3 +228,3 @@ subheading: 'Convolutional',

__decorate([
deeplearn_1.doc({
tfjs_core_1.doc({
heading: 'Layers',

@@ -232,3 +238,3 @@ subheading: 'Core',

__decorate([
deeplearn_1.doc({
tfjs_core_1.doc({
heading: 'Layers',

@@ -242,3 +248,3 @@ subheading: 'Core',

__decorate([
deeplearn_1.doc({
tfjs_core_1.doc({
heading: 'Layers',

@@ -252,3 +258,3 @@ subheading: 'Core',

__decorate([
deeplearn_1.doc({
tfjs_core_1.doc({
heading: 'Layers',

@@ -262,3 +268,3 @@ subheading: 'Core',

__decorate([
deeplearn_1.doc({
tfjs_core_1.doc({
heading: 'Layers',

@@ -272,3 +278,3 @@ subheading: 'Core',

__decorate([
deeplearn_1.doc({
tfjs_core_1.doc({
heading: 'Layers',

@@ -282,3 +288,3 @@ subheading: 'Core',

__decorate([
deeplearn_1.doc({
tfjs_core_1.doc({
heading: 'Layers',

@@ -292,3 +298,3 @@ subheading: 'Merge',

__decorate([
deeplearn_1.doc({
tfjs_core_1.doc({
heading: 'Layers',

@@ -302,3 +308,3 @@ subheading: 'Merge',

__decorate([
deeplearn_1.doc({
tfjs_core_1.doc({
heading: 'Layers',

@@ -312,3 +318,3 @@ subheading: 'Merge',

__decorate([
deeplearn_1.doc({
tfjs_core_1.doc({
heading: 'Layers',

@@ -322,3 +328,3 @@ subheading: 'Merge',

__decorate([
deeplearn_1.doc({
tfjs_core_1.doc({
heading: 'Layers',

@@ -332,3 +338,3 @@ subheading: 'Merge',

__decorate([
deeplearn_1.doc({
tfjs_core_1.doc({
heading: 'Layers',

@@ -342,3 +348,3 @@ subheading: 'Merge',

__decorate([
deeplearn_1.doc({
tfjs_core_1.doc({
heading: 'Layers',

@@ -352,3 +358,3 @@ subheading: 'Normalization',

__decorate([
deeplearn_1.doc({
tfjs_core_1.doc({
heading: 'Layers',

@@ -362,3 +368,3 @@ subheading: 'Pooling',

__decorate([
deeplearn_1.doc({
tfjs_core_1.doc({
heading: 'Layers',

@@ -372,3 +378,3 @@ subheading: 'Pooling',

__decorate([
deeplearn_1.doc({
tfjs_core_1.doc({
heading: 'Layers',

@@ -382,3 +388,3 @@ subheading: 'Pooling',

__decorate([
deeplearn_1.doc({
tfjs_core_1.doc({
heading: 'Layers',

@@ -392,3 +398,3 @@ subheading: 'Pooling',

__decorate([
deeplearn_1.doc({
tfjs_core_1.doc({
heading: 'Layers',

@@ -402,3 +408,3 @@ subheading: 'Pooling',

__decorate([
deeplearn_1.doc({
tfjs_core_1.doc({
heading: 'Layers',

@@ -412,3 +418,3 @@ subheading: 'Pooling',

__decorate([
deeplearn_1.doc({
tfjs_core_1.doc({
heading: 'Layers',

@@ -422,3 +428,3 @@ subheading: 'Pooling',

__decorate([
deeplearn_1.doc({
tfjs_core_1.doc({
heading: 'Layers',

@@ -432,3 +438,3 @@ subheading: 'Pooling',

__decorate([
deeplearn_1.doc({
tfjs_core_1.doc({
heading: 'Layers',

@@ -442,3 +448,3 @@ subheading: 'Recurrent',

__decorate([
deeplearn_1.doc({
tfjs_core_1.doc({
heading: 'Layers',

@@ -452,3 +458,3 @@ subheading: 'Recurrent',

__decorate([
deeplearn_1.doc({
tfjs_core_1.doc({
heading: 'Layers',

@@ -462,3 +468,3 @@ subheading: 'Recurrent',

__decorate([
deeplearn_1.doc({
tfjs_core_1.doc({
heading: 'Layers',

@@ -472,3 +478,3 @@ subheading: 'Recurrent',

__decorate([
deeplearn_1.doc({
tfjs_core_1.doc({
heading: 'Layers',

@@ -482,3 +488,3 @@ subheading: 'Recurrent',

__decorate([
deeplearn_1.doc({
tfjs_core_1.doc({
heading: 'Layers',

@@ -492,4 +498,22 @@ subheading: 'Recurrent',

__decorate([
deeplearn_1.doc({
tfjs_core_1.doc({
heading: 'Layers',
subheading: 'Recurrent',
namespace: 'layers',
useDocsFrom: 'RNN',
configParamIndices: [0]
})
], LayerExports, "rnn", null);
__decorate([
tfjs_core_1.doc({
heading: 'Layers',
subheading: 'Recurrent',
namespace: 'layers',
useDocsFrom: 'RNN',
configParamIndices: [0]
})
], LayerExports, "stackedRNNCells", null);
__decorate([
tfjs_core_1.doc({
heading: 'Layers',
subheading: 'Wrapper',

@@ -502,3 +526,3 @@ namespace: 'layers',

__decorate([
deeplearn_1.doc({
tfjs_core_1.doc({
heading: 'Layers',

@@ -530,3 +554,3 @@ subheading: 'Wrapper',

__decorate([
deeplearn_1.doc({
tfjs_core_1.doc({
heading: 'Constraints',

@@ -539,3 +563,3 @@ namespace: 'constraints',

__decorate([
deeplearn_1.doc({
tfjs_core_1.doc({
heading: 'Constraints',

@@ -548,6 +572,6 @@ namespace: 'constraints',

__decorate([
deeplearn_1.doc({ heading: 'Constraints', namespace: 'constraints', useDocsFrom: 'NonNeg' })
tfjs_core_1.doc({ heading: 'Constraints', namespace: 'constraints', useDocsFrom: 'NonNeg' })
], ConstraintExports, "nonNeg", null);
__decorate([
deeplearn_1.doc({
tfjs_core_1.doc({
heading: 'Constraints',

@@ -602,3 +626,3 @@ namespace: 'constraints',

__decorate([
deeplearn_1.doc({
tfjs_core_1.doc({
heading: 'Initializers',

@@ -610,6 +634,6 @@ namespace: 'initializers',

__decorate([
deeplearn_1.doc({ heading: 'Initializers', namespace: 'initializers', useDocsFrom: 'Ones' })
tfjs_core_1.doc({ heading: 'Initializers', namespace: 'initializers', useDocsFrom: 'Ones' })
], InitializerExports, "ones", null);
__decorate([
deeplearn_1.doc({
tfjs_core_1.doc({
heading: 'Initializers',

@@ -622,3 +646,3 @@ namespace: 'initializers',

__decorate([
deeplearn_1.doc({
tfjs_core_1.doc({
heading: 'Initializers',

@@ -631,3 +655,3 @@ namespace: 'initializers',

__decorate([
deeplearn_1.doc({
tfjs_core_1.doc({
heading: 'Initializers',

@@ -640,3 +664,3 @@ namespace: 'initializers',

__decorate([
deeplearn_1.doc({
tfjs_core_1.doc({
heading: 'Initializers',

@@ -649,3 +673,3 @@ namespace: 'initializers',

__decorate([
deeplearn_1.doc({
tfjs_core_1.doc({
heading: 'Initializers',

@@ -658,3 +682,3 @@ namespace: 'initializers',

__decorate([
deeplearn_1.doc({
tfjs_core_1.doc({
heading: 'Initializers',

@@ -667,3 +691,3 @@ namespace: 'initializers',

__decorate([
deeplearn_1.doc({
tfjs_core_1.doc({
heading: 'Initializers',

@@ -676,3 +700,3 @@ namespace: 'initializers',

__decorate([
deeplearn_1.doc({
tfjs_core_1.doc({
heading: 'Initializers',

@@ -685,3 +709,3 @@ namespace: 'initializers',

__decorate([
deeplearn_1.doc({
tfjs_core_1.doc({
heading: 'Initializers',

@@ -694,3 +718,3 @@ namespace: 'initializers',

__decorate([
deeplearn_1.doc({
tfjs_core_1.doc({
heading: 'Initializers',

@@ -718,9 +742,9 @@ namespace: 'initializers',

__decorate([
deeplearn_1.doc({ heading: 'Regularizers', namespace: 'regularizers', useDocsFrom: 'L1L2' })
tfjs_core_1.doc({ heading: 'Regularizers', namespace: 'regularizers', useDocsFrom: 'L1L2' })
], RegularizerExports, "l1l2", null);
__decorate([
deeplearn_1.doc({ heading: 'Regularizers', namespace: 'regularizers', useDocsFrom: 'L1L2' })
tfjs_core_1.doc({ heading: 'Regularizers', namespace: 'regularizers', useDocsFrom: 'L1L2' })
], RegularizerExports, "l1", null);
__decorate([
deeplearn_1.doc({ heading: 'Regularizers', namespace: 'regularizers', useDocsFrom: 'L1L2' })
tfjs_core_1.doc({ heading: 'Regularizers', namespace: 'regularizers', useDocsFrom: 'L1L2' })
], RegularizerExports, "l2", null);

@@ -727,0 +751,0 @@ return RegularizerExports;

@@ -1,7 +0,9 @@

import * as dl from 'deeplearn';
import * as dl from '@tensorflow/tfjs-core';
import * as backend from './backend/deeplearnjs_backend';
import { ConstraintExports, InitializerExports, LayerExports, ModelExports, RegularizerExports } from './exports';
export { Callback, CallbackList, CustomCallback, CustomCallbackConfig, Logs } from './callbacks';
export { Model } from './engine/training';
export { ModelAndWeightsConfig, Sequential } from './models';
export { Layer } from './engine/topology';
export { Model, ModelCompileConfig, ModelEvaluateConfig, ModelFitConfig, ModelPredictConfig } from './engine/training';
export { GRUCellLayerConfig, GRULayerConfig, LSTMCellLayerConfig, LSTMLayerConfig, RNN, RNNCell, RNNLayerConfig, SimpleRNNCellLayerConfig, SimpleRNNLayerConfig } from './layers/recurrent';
export { ModelAndWeightsConfig, Sequential, SequentialConfig } from './models';
export { SymbolicTensor } from './types';

@@ -8,0 +10,0 @@ export { version as version_layers } from './version';

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
var dl = require("deeplearn");
var dl = require("@tensorflow/tfjs-core");
exports.dl = dl;

@@ -12,4 +12,9 @@ var backend = require("./backend/deeplearnjs_backend");

exports.CustomCallback = callbacks_1.CustomCallback;
var topology_1 = require("./engine/topology");
exports.Layer = topology_1.Layer;
var training_1 = require("./engine/training");
exports.Model = training_1.Model;
var recurrent_1 = require("./layers/recurrent");
exports.RNN = recurrent_1.RNN;
exports.RNNCell = recurrent_1.RNNCell;
var models_1 = require("./models");

@@ -16,0 +21,0 @@ exports.Sequential = models_1.Sequential;

@@ -1,2 +0,2 @@

import { Tensor } from 'deeplearn';
import { Tensor } from '@tensorflow/tfjs-core';
import { DType, Shape } from './types';

@@ -3,0 +3,0 @@ import { ConfigDict, ConfigDictValue } from './types';

@@ -19,3 +19,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
var deeplearn_1 = require("deeplearn");
var tfjs_core_1 = require("@tensorflow/tfjs-core");
var _ = require("underscore");

@@ -57,3 +57,3 @@ var K = require("./backend/deeplearnjs_backend");

Initializer = __decorate([
deeplearn_1.doc({ heading: 'Initializers', subheading: 'Classes', namespace: 'initializers' })
tfjs_core_1.doc({ heading: 'Initializers', subheading: 'Classes', namespace: 'initializers' })
], Initializer);

@@ -95,3 +95,3 @@ return Initializer;

Constant.prototype.apply = function (shape, dtype) {
return K.scalarTimesArray(deeplearn_1.scalar(this.value), K.ones(shape, dtype));
return K.scalarTimesArray(tfjs_core_1.scalar(this.value), K.ones(shape, dtype));
};

@@ -174,3 +174,3 @@ Constant.prototype.getConfig = function () {

var _this = _super.call(this) || this;
_this.gain = config.gain != null ? deeplearn_1.scalar(config.gain) : K.getScalar(1.0);
_this.gain = config.gain != null ? tfjs_core_1.scalar(config.gain) : K.getScalar(1.0);
return _this;

@@ -177,0 +177,0 @@ }

@@ -1,2 +0,2 @@

import { Tensor } from 'deeplearn';
import { Tensor } from '@tensorflow/tfjs-core';
import { Constraint, ConstraintIdentifier } from '../constraints';

@@ -3,0 +3,0 @@ import { Initializer, InitializerIdentifier } from '../initializers';

@@ -1,2 +0,2 @@

import { Tensor } from 'deeplearn';
import { Tensor } from '@tensorflow/tfjs-core';
import { ActivationFn } from '../activations';

@@ -3,0 +3,0 @@ import { DataFormat, PaddingMode } from '../common';

@@ -1,2 +0,2 @@

import { Tensor } from 'deeplearn';
import { Tensor } from '@tensorflow/tfjs-core';
import { ActivationFn, ActivationIdentifier } from '../activations';

@@ -3,0 +3,0 @@ import { Constraint, ConstraintIdentifier } from '../constraints';

@@ -1,2 +0,2 @@

import { Tensor } from 'deeplearn';
import { Tensor } from '@tensorflow/tfjs-core';
import { Constraint, ConstraintIdentifier } from '../constraints';

@@ -3,0 +3,0 @@ import { Layer, LayerConfig } from '../engine/topology';

@@ -1,2 +0,2 @@

import { Tensor } from 'deeplearn';
import { Tensor } from '@tensorflow/tfjs-core';
import { Layer, LayerConfig } from '../engine/topology';

@@ -3,0 +3,0 @@ import { Shape } from '../types';

@@ -1,2 +0,2 @@

import { Tensor } from 'deeplearn';
import { Tensor } from '@tensorflow/tfjs-core';
import { Constraint, ConstraintIdentifier } from '../constraints';

@@ -3,0 +3,0 @@ import { Layer, LayerConfig } from '../engine/topology';

@@ -1,2 +0,2 @@

import { Tensor } from 'deeplearn';
import { Tensor } from '@tensorflow/tfjs-core';
import { DataFormat, PaddingMode } from '../common';

@@ -3,0 +3,0 @@ import { Layer, LayerConfig } from '../engine/topology';

@@ -1,2 +0,2 @@

import { Tensor } from 'deeplearn';
import { Tensor } from '@tensorflow/tfjs-core';
import { ActivationFn, ActivationIdentifier } from '../activations';

@@ -262,1 +262,17 @@ import { Constraint, ConstraintIdentifier } from '../constraints';

}
export interface StackedRNNCellsConfig extends LayerConfig {
cells: RNNCell[];
}
export declare class StackedRNNCells extends RNNCell {
protected cells: RNNCell[];
constructor(config: StackedRNNCellsConfig);
readonly stateSize: number[];
call(inputs: Tensor | Tensor[], kwargs: any): Tensor | Tensor[];
build(inputShape: Shape | Shape[]): void;
getConfig(): ConfigDict;
static fromConfig<T>(cls: generic_utils.Constructor<T>, config: ConfigDict, customObjects?: ConfigDict): T;
readonly trainableWeights: LayerVariable[];
readonly nonTrainableWeights: LayerVariable[];
getWeights(): Tensor[];
setWeights(weights: Tensor[]): void;
}

@@ -19,3 +19,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
var deeplearn_1 = require("deeplearn");
var tfjs_core_1 = require("@tensorflow/tfjs-core");
var _ = require("underscore");

@@ -33,2 +33,3 @@ var activations_1 = require("../activations");

var math_utils = require("../utils/math_utils");
var serialization_1 = require("./serialization");
var RNN = (function (_super) {

@@ -38,2 +39,3 @@ __extends(RNN, _super);

var _this = _super.call(this, config) || this;
var cell;
if (config.cell == null) {

@@ -43,9 +45,12 @@ throw new errors_1.ValueError('cell property is missing for the constructor of RNN.');

else if (Array.isArray(config.cell)) {
throw new errors_1.NotImplementedError('StackedRNNCells has not been implemented yet.');
cell = new StackedRNNCells({ cells: config.cell });
}
if (config.cell.stateSize == null) {
else {
cell = config.cell;
}
if (cell.stateSize == null) {
throw new errors_1.ValueError('The RNN cell should have an attribute `stateSize` (tuple of ' +
'integers, one integer per RNN state).');
}
_this.cell = config.cell;
_this.cell = cell;
_this.returnSequences =

@@ -380,3 +385,3 @@ config.returnSequences == null ? false : config.returnSequences;

RNNCell = __decorate([
deeplearn_1.doc({ heading: 'Layers', subheading: 'Classes' })
tfjs_core_1.doc({ heading: 'Layers', subheading: 'Classes' })
], RNNCell);

@@ -1240,1 +1245,161 @@ return RNNCell;

generic_utils.ClassNameMap.register('LSTM', LSTM);
var StackedRNNCells = (function (_super) {
__extends(StackedRNNCells, _super);
function StackedRNNCells(config) {
var _this = _super.call(this, config) || this;
_this.cells = config.cells;
return _this;
}
Object.defineProperty(StackedRNNCells.prototype, "stateSize", {
get: function () {
var stateSize = [];
for (var _i = 0, _a = this.cells.slice().reverse(); _i < _a.length; _i++) {
var cell = _a[_i];
if (Array.isArray(cell.stateSize)) {
stateSize.push.apply(stateSize, cell.stateSize);
}
else {
stateSize.push(cell.stateSize);
}
}
return stateSize;
},
enumerable: true,
configurable: true
});
StackedRNNCells.prototype.call = function (inputs, kwargs) {
inputs = inputs;
var states = inputs.slice(1);
var nestedStates = [];
for (var _i = 0, _a = this.cells.slice().reverse(); _i < _a.length; _i++) {
var cell = _a[_i];
if (Array.isArray(cell.stateSize)) {
nestedStates.push(states.splice(0, cell.stateSize.length));
}
else {
nestedStates.push(states.splice(0, 1));
}
}
nestedStates.reverse();
var newNestedStates = [];
var callInputs;
for (var i = 0; i < this.cells.length; ++i) {
var cell = this.cells[i];
states = nestedStates[i];
if (i === 0) {
callInputs = [inputs[0]].concat(states);
}
else {
callInputs = [callInputs[0]].concat(states);
}
callInputs = cell.call(callInputs, kwargs);
newNestedStates.push(callInputs.slice(1));
}
states = [];
for (var _b = 0, _c = newNestedStates.slice().reverse(); _b < _c.length; _b++) {
var cellStates = _c[_b];
states.push.apply(states, cellStates);
}
return [callInputs[0]].concat(states);
};
StackedRNNCells.prototype.build = function (inputShape) {
if (generic_utils.isArrayOfShapes(inputShape)) {
inputShape = inputShape[0];
}
inputShape = inputShape;
var outputDim;
for (var _i = 0, _a = this.cells; _i < _a.length; _i++) {
var cell = _a[_i];
cell.build(inputShape);
if (Array.isArray(cell.stateSize)) {
outputDim = cell.stateSize[0];
}
else {
outputDim = cell.stateSize;
}
inputShape = [inputShape[0], outputDim];
}
this.built = true;
};
StackedRNNCells.prototype.getConfig = function () {
var cellConfigs = [];
for (var _i = 0, _a = this.cells; _i < _a.length; _i++) {
var cell = _a[_i];
cellConfigs.push({
'className': this.constructor.name,
'config': cell.getConfig(),
});
}
var config = { 'cells': cellConfigs };
var baseConfig = _super.prototype.getConfig.call(this);
Object.assign(config, baseConfig);
return config;
};
StackedRNNCells.fromConfig = function (cls, config, customObjects) {
if (customObjects === void 0) { customObjects = {}; }
var cells = [];
for (var _i = 0, _a = config['cells']; _i < _a.length; _i++) {
var cellConfig = _a[_i];
cells.push(serialization_1.deserialize(cellConfig, customObjects));
}
return new cls({ cells: cells });
};
Object.defineProperty(StackedRNNCells.prototype, "trainableWeights", {
get: function () {
if (!this.trainable) {
return [];
}
var weights = [];
for (var _i = 0, _a = this.cells; _i < _a.length; _i++) {
var cell = _a[_i];
weights.push.apply(weights, cell.trainableWeights);
}
return weights;
},
enumerable: true,
configurable: true
});
Object.defineProperty(StackedRNNCells.prototype, "nonTrainableWeights", {
get: function () {
var weights = [];
for (var _i = 0, _a = this.cells; _i < _a.length; _i++) {
var cell = _a[_i];
weights.push.apply(weights, cell.nonTrainableWeights);
}
if (!this.trainable) {
var trainableWeights = [];
for (var _b = 0, _c = this.cells; _b < _c.length; _b++) {
var cell = _c[_b];
trainableWeights.push.apply(trainableWeights, cell.trainableWeights);
}
return trainableWeights.concat(weights);
}
return weights;
},
enumerable: true,
configurable: true
});
StackedRNNCells.prototype.getWeights = function () {
var weights = [];
for (var _i = 0, _a = this.cells; _i < _a.length; _i++) {
var cell = _a[_i];
weights.push.apply(weights, cell.weights);
}
return K.batchGetValue(weights);
};
StackedRNNCells.prototype.setWeights = function (weights) {
var tuples = [];
for (var _i = 0, _a = this.cells; _i < _a.length; _i++) {
var cell = _a[_i];
var numParams = cell.weights.length;
var inputWeights = weights.splice(numParams);
for (var i = 0; i < cell.weights.length; ++i) {
tuples.push([cell.weights[i], inputWeights[i]]);
}
}
K.batchSetValue(tuples);
};
return StackedRNNCells;
}(RNNCell));
exports.StackedRNNCells = StackedRNNCells;
generic_utils.ClassNameMap.register('StackedRNNCells', StackedRNNCells);

@@ -1,2 +0,2 @@

import { Tensor } from 'deeplearn';
import { Tensor } from '@tensorflow/tfjs-core';
import { Layer, LayerConfig } from '../engine/topology';

@@ -3,0 +3,0 @@ import { Shape, TensorInterface } from '../types';

@@ -1,2 +0,2 @@

import { Tensor } from 'deeplearn';
import { Tensor } from '@tensorflow/tfjs-core';
import { LossOrMetricFn } from './types';

@@ -3,0 +3,0 @@ export declare function meanSquaredError(yTrue: Tensor, yPred: Tensor): Tensor;

@@ -1,2 +0,2 @@

import { Tensor } from 'deeplearn';
import { Tensor } from '@tensorflow/tfjs-core';
import { cosineProximity, meanAbsoluteError, meanAbsolutePercentageError, meanSquaredError } from './losses';

@@ -3,0 +3,0 @@ import { LossOrMetricFn } from './types';

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
var deeplearn_1 = require("deeplearn");
var tfjs_core_1 = require("@tensorflow/tfjs-core");
var K = require("./backend/deeplearnjs_backend");

@@ -8,3 +8,3 @@ var errors_1 = require("./errors");

function binaryAccuracy(yTrue, yPred) {
var threshold = K.scalarTimesArray(K.getScalar(0.5), deeplearn_1.onesLike(yPred));
var threshold = K.scalarTimesArray(K.getScalar(0.5), tfjs_core_1.onesLike(yPred));
var yPredThresholded = K.cast(K.greater(yPred, threshold), yTrue.dtype);

@@ -11,0 +11,0 @@ return K.mean(K.equal(yTrue, yPredThresholded), -1);

@@ -1,5 +0,5 @@

import { Scalar, Tensor, WeightsManifestConfig } from 'deeplearn';
import { Scalar, Tensor, WeightsManifestConfig } from '@tensorflow/tfjs-core';
import { History } from './callbacks';
import { Layer } from './engine/topology';
import { Model, ModelCompileConfig, ModelFitConfig, ModelLoggingVerbosity } from './engine/training';
import { Model, ModelCompileConfig, ModelEvaluateConfig, ModelFitConfig, ModelPredictConfig } from './engine/training';
import { Shape } from './types';

@@ -29,8 +29,12 @@ import { ConfigDict, JsonDict } from './types';

updatable: boolean;
evaluate(x: Tensor | Tensor[], y: Tensor | Tensor[], batchSize?: number, verbose?: ModelLoggingVerbosity, sampleWeight?: Tensor, steps?: number): Scalar | Scalar[];
predict(x: Tensor | Tensor[], batchSize?: number, verbose?: boolean): Tensor | Tensor[];
evaluate(x: Tensor | Tensor[], y: Tensor | Tensor[], config?: ModelEvaluateConfig): Promise<Scalar | Scalar[]>;
predict(x: Tensor | Tensor[], config?: ModelPredictConfig): Promise<Tensor | Tensor[]>;
predictOnBatch(x: Tensor): Tensor | Tensor[];
compile(config: ModelCompileConfig): void;
fit(config: ModelFitConfig): Promise<History>;
fit(x: Tensor | Tensor[] | {
[inputName: string]: Tensor;
}, y: Tensor | Tensor[] | {
[inputName: string]: Tensor;
}, config?: ModelFitConfig): Promise<History>;
static fromConfig<T>(cls: generic_utils.Constructor<T>, config: ConfigDict): T;
}

@@ -54,3 +54,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
var deeplearn_1 = require("deeplearn");
var tfjs_core_1 = require("@tensorflow/tfjs-core");
var K = require("./backend/deeplearnjs_backend");

@@ -76,3 +76,3 @@ var topology_1 = require("./engine/topology");

if (!(modelAndWeightsConfig.weightsManifest != null)) return [3, 2];
return [4, deeplearn_1.loadWeights(modelAndWeightsConfig.weightsManifest, modelAndWeightsConfig.pathPrefix, model.weights.map(function (weight) { return weight.name; }))];
return [4, tfjs_core_1.loadWeights(modelAndWeightsConfig.weightsManifest, modelAndWeightsConfig.pathPrefix, model.weights.map(function (weight) { return weight.name; }))];
case 1:

@@ -259,16 +259,23 @@ weightValues = _a.sent();

});
Sequential.prototype.evaluate = function (x, y, batchSize, verbose, sampleWeight, steps) {
if (batchSize === void 0) { batchSize = 32; }
if (!this.built) {
throw new errors_1.RuntimeError('The model needs to be compiled before being used.');
}
return this.model.evaluate(x, y, batchSize, verbose, sampleWeight, steps);
Sequential.prototype.evaluate = function (x, y, config) {
if (config === void 0) { config = {}; }
return __awaiter(this, void 0, void 0, function () {
return __generator(this, function (_a) {
if (!this.built) {
throw new errors_1.RuntimeError('The model needs to be compiled before being used.');
}
return [2, this.model.evaluate(x, y, config)];
});
});
};
Sequential.prototype.predict = function (x, batchSize, verbose) {
if (batchSize === void 0) { batchSize = 32; }
if (verbose === void 0) { verbose = false; }
if (this.model == null) {
this.build();
}
return this.model.predict(x, batchSize, verbose);
Sequential.prototype.predict = function (x, config) {
if (config === void 0) { config = {}; }
return __awaiter(this, void 0, void 0, function () {
return __generator(this, function (_a) {
if (this.model == null) {
this.build();
}
return [2, this.model.predict(x, config)];
});
});
};

@@ -290,3 +297,4 @@ Sequential.prototype.predictOnBatch = function (x) {

};
Sequential.prototype.fit = function (config) {
Sequential.prototype.fit = function (x, y, config) {
if (config === void 0) { config = {}; }
return __awaiter(this, void 0, void 0, function () {

@@ -298,3 +306,3 @@ return __generator(this, function (_a) {

}
return [2, this.model.fit(config)];
return [2, this.model.fit(x, y, config)];
});

@@ -322,15 +330,15 @@ });

__decorate([
deeplearn_1.doc({ heading: 'Models', subheading: 'Classes' })
tfjs_core_1.doc({ heading: 'Models', subheading: 'Classes' })
], Sequential.prototype, "add", null);
__decorate([
deeplearn_1.doc({ heading: 'Models', subheading: 'Classes' })
tfjs_core_1.doc({ heading: 'Models', subheading: 'Classes', configParamIndices: [2] })
], Sequential.prototype, "evaluate", null);
__decorate([
deeplearn_1.doc({ heading: 'Models', subheading: 'Classes' })
tfjs_core_1.doc({ heading: 'Models', subheading: 'Classes', configParamIndices: [1] })
], Sequential.prototype, "predict", null);
__decorate([
deeplearn_1.doc({ heading: 'Models', subheading: 'Classes' })
tfjs_core_1.doc({ heading: 'Models', subheading: 'Classes', configParamIndices: [2] })
], Sequential.prototype, "fit", null);
Sequential = Sequential_1 = __decorate([
deeplearn_1.doc({ heading: 'Models', subheading: 'Classes' })
tfjs_core_1.doc({ heading: 'Models', subheading: 'Classes' })
], Sequential);

@@ -337,0 +345,0 @@ return Sequential;

@@ -1,2 +0,2 @@

import { AdagradOptimizer, AdamOptimizer, Optimizer as CoreOptimizer, RMSPropOptimizer, Scalar, SGDOptimizer } from 'deeplearn';
import { AdagradOptimizer, AdamOptimizer, Optimizer as CoreOptimizer, RMSPropOptimizer, Scalar, SGDOptimizer } from '@tensorflow/tfjs-core';
import { ConfigDict, LayerVariable } from './types';

@@ -3,0 +3,0 @@ import { Constructor } from './utils/generic_utils';

@@ -13,3 +13,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
var deeplearn_1 = require("deeplearn");
var tfjs_core_1 = require("@tensorflow/tfjs-core");
var K = require("./backend/deeplearnjs_backend");

@@ -20,3 +20,3 @@ var errors_1 = require("./errors");

function LayersOptimizer(config) {
if (config instanceof deeplearn_1.Optimizer) {
if (config instanceof tfjs_core_1.Optimizer) {
this.createdFromCoreOptimizer = true;

@@ -84,6 +84,6 @@ this.constructFromCoreOptimizer(config);

}
this.optimizer = deeplearn_1.train.sgd(this.lr);
this.optimizer = tfjs_core_1.train.sgd(this.lr);
};
SGD.prototype.constructFromCoreOptimizer = function (optimizer) {
if (!(optimizer instanceof deeplearn_1.SGDOptimizer)) {
if (!(optimizer instanceof tfjs_core_1.SGDOptimizer)) {
throw new errors_1.ValueError('Cannot construct SGD from a non-SGD core optimizer');

@@ -126,6 +126,6 @@ }

}
this.optimizer = deeplearn_1.train.adam(this.lr, this.beta1, this.beta2, this.epsilon);
this.optimizer = tfjs_core_1.train.adam(this.lr, this.beta1, this.beta2, this.epsilon);
};
Adam.prototype.constructFromCoreOptimizer = function (optimizer) {
if (!(optimizer instanceof deeplearn_1.AdamOptimizer)) {
if (!(optimizer instanceof tfjs_core_1.AdamOptimizer)) {
throw new errors_1.ValueError('Cannot construct Adam from a non-Adam core optimizer');

@@ -164,6 +164,6 @@ }

}
this.optimizer = deeplearn_1.train.rmsprop(this.lr, this.rho, null, this.epsilon);
this.optimizer = tfjs_core_1.train.rmsprop(this.lr, this.rho, null, this.epsilon);
};
RMSProp.prototype.constructFromCoreOptimizer = function (optimizer) {
if (!(optimizer instanceof deeplearn_1.RMSPropOptimizer)) {
if (!(optimizer instanceof tfjs_core_1.RMSPropOptimizer)) {
throw new errors_1.ValueError('Cannot construct RMSProp from a non-RMSProp core optimizer');

@@ -200,6 +200,6 @@ }

}
this.optimizer = deeplearn_1.train.adagrad(this.lr);
this.optimizer = tfjs_core_1.train.adagrad(this.lr);
};
Adagrad.prototype.constructFromCoreOptimizer = function (optimizer) {
if (!(optimizer instanceof deeplearn_1.AdagradOptimizer)) {
if (!(optimizer instanceof tfjs_core_1.AdagradOptimizer)) {
throw new errors_1.ValueError('Cannot construct Adagrad from a non-Adagrad core optimizer');

@@ -206,0 +206,0 @@ }

@@ -1,2 +0,2 @@

import { Tensor } from 'deeplearn';
import { Tensor } from '@tensorflow/tfjs-core';
import { LayerVariable } from './types';

@@ -3,0 +3,0 @@ import { ConfigDict, ConfigDictValue } from './types';

@@ -19,3 +19,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
var deeplearn_1 = require("deeplearn");
var tfjs_core_1 = require("@tensorflow/tfjs-core");
var K = require("./backend/deeplearnjs_backend");

@@ -43,3 +43,3 @@ var generic_utils_1 = require("./utils/generic_utils");

L1L2.prototype.apply = function (x) {
var regularization = deeplearn_1.zeros([1]);
var regularization = tfjs_core_1.zeros([1]);
if (this.hasL1) {

@@ -60,3 +60,3 @@ regularization = K.add(regularization, K.sum(K.scalarTimesArray(this.l1, K.abs(x.read()))));

L1L2 = L1L2_1 = __decorate([
deeplearn_1.doc({ heading: 'Regularizers', namespace: 'regularizers' })
tfjs_core_1.doc({ heading: 'Regularizers', namespace: 'regularizers' })
], L1L2);

@@ -63,0 +63,0 @@ return L1L2;

@@ -1,3 +0,3 @@

import * as dl from 'deeplearn';
import { Tensor } from 'deeplearn';
import * as tfc from '@tensorflow/tfjs-core';
import { Tensor } from '@tensorflow/tfjs-core';
import { Constraint } from './constraints';

@@ -43,3 +43,3 @@ import { Layer } from './engine/topology';

readonly trainable: boolean;
protected readonly val: dl.Variable;
protected readonly val: tfc.Variable;
readonly constraint: Constraint;

@@ -46,0 +46,0 @@ constructor(val: Tensor | ConcreteTensor, dtype?: DType, name?: string, trainable?: boolean, constraint?: Constraint);

@@ -9,3 +9,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
var deeplearn_1 = require("deeplearn");
var tfjs_core_1 = require("@tensorflow/tfjs-core");
var common_1 = require("./common");

@@ -33,3 +33,3 @@ var DType;

SymbolicTensor = __decorate([
deeplearn_1.doc({ heading: 'Models', 'subheading': 'Classes' })
tfjs_core_1.doc({ heading: 'Models', 'subheading': 'Classes' })
], SymbolicTensor);

@@ -79,3 +79,3 @@ return SymbolicTensor;

this.val =
deeplearn_1.variable(getValueTensor(val), this.trainable, this.name, this.dtype);
tfjs_core_1.variable(getValueTensor(val), this.trainable, this.name, this.dtype);
}

@@ -82,0 +82,0 @@ LayerVariable.prototype.read = function () {

@@ -1,2 +0,2 @@

import { Tensor } from 'deeplearn';
import { Tensor } from '@tensorflow/tfjs-core';
import { ConfigDict, ConfigDictValue, DType, Shape } from '../types';

@@ -3,0 +3,0 @@ export declare function pyListRepeat(value: any, numValues: number): any[];

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
var dl = require("deeplearn");
var deeplearn_1 = require("deeplearn");
var tfc = require("@tensorflow/tfjs-core");
var tfjs_core_1 = require("@tensorflow/tfjs-core");
function isInteger(x) {

@@ -25,14 +25,14 @@ return x === parseInt(x.toString(), 10);

array = Array.isArray(array) ? new Float32Array(array) : array;
return deeplearn_1.tensor1d(array);
return tfjs_core_1.tensor1d(array);
}
function min(array) {
return dl.min(toArray1D(array)).dataSync()[0];
return tfc.min(toArray1D(array)).dataSync()[0];
}
exports.min = min;
function max(array) {
return dl.max(toArray1D(array)).dataSync()[0];
return tfc.max(toArray1D(array)).dataSync()[0];
}
exports.max = max;
function sum(array) {
return dl.sum(toArray1D(array)).dataSync()[0];
return tfc.sum(toArray1D(array)).dataSync()[0];
}

@@ -45,6 +45,6 @@ exports.sum = sum;

function variance(array) {
var demeaned = dl.sub(toArray1D(array), deeplearn_1.scalar(mean(array)));
var sumSquare = dl.sum(dl.mulStrict(demeaned, demeaned)).dataSync()[0];
var demeaned = tfc.sub(toArray1D(array), tfjs_core_1.scalar(mean(array)));
var sumSquare = tfc.sum(tfc.mulStrict(demeaned, demeaned)).dataSync()[0];
return sumSquare / array.length;
}
exports.variance = variance;

@@ -1,2 +0,2 @@

import { Tensor } from 'deeplearn';
import { Tensor } from '@tensorflow/tfjs-core';
export declare function expectTensorsClose(actual: Tensor | number[], expected: Tensor | number[], epsilon?: number): void;

@@ -3,0 +3,0 @@ export declare function expectTensorsValuesInRange(actual: Tensor, low: number, high: number): void;

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
var deeplearn_1 = require("deeplearn");
var tfjs_core_1 = require("@tensorflow/tfjs-core");
var deeplearnjs_backend_1 = require("../backend/deeplearnjs_backend");

@@ -18,3 +18,3 @@ var errors_1 = require("../errors");

}
deeplearn_1.test_util.expectArraysClose(actual, expected, epsilon);
tfjs_core_1.test_util.expectArraysClose(actual, expected, epsilon);
}

@@ -26,3 +26,3 @@ exports.expectTensorsClose = expectTensorsClose;

}
deeplearn_1.test_util.expectValuesInRange(actual.dataSync(), low, high);
tfjs_core_1.test_util.expectValuesInRange(actual.dataSync(), low, high);
}

@@ -36,3 +36,3 @@ exports.expectTensorsValuesInRange = expectTensorsValuesInRange;

function describeMathCPU(testName, tests) {
deeplearn_1.test_util.describeWithFlags(testName, deeplearn_1.test_util.CPU_ENVS, function () {
tfjs_core_1.test_util.describeWithFlags(testName, tfjs_core_1.test_util.CPU_ENVS, function () {
beforeEach(function () {

@@ -46,3 +46,3 @@ deeplearnjs_backend_1.setBackend('cpu');

function describeMathGPU(testName, tests) {
deeplearn_1.test_util.describeWithFlags(testName, webgl2Features, function () {
tfjs_core_1.test_util.describeWithFlags(testName, webgl2Features, function () {
beforeEach(function () {

@@ -49,0 +49,0 @@ deeplearnjs_backend_1.setBackend('webgl');

@@ -1,2 +0,2 @@

declare const version = "0.0.3";
declare const version = "0.0.4";
export { version };
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
var version = '0.0.3';
var version = '0.0.4';
exports.version = version;
{
"name": "@tensorflow/tfjs-layers",
"version": "0.0.3",
"version": "0.0.4",
"description": "TensorFlow layers API in JavaScript",

@@ -9,2 +9,3 @@ "private": false,

"devDependencies": {
"@tensorflow/tfjs-core": "0.0.1",
"@types/jasmine": "~2.5.53",

@@ -15,3 +16,2 @@ "@types/underscore": "^1.8.7",

"cross-spawn": "~5.1.0",
"deeplearn": "next",
"deeplearn-src": "PAIR-code/deeplearnjs",

@@ -48,4 +48,4 @@ "http-server": "~0.10.0",

"peerDependencies": {
"deeplearn": "next"
"@tensorflow/tfjs-core": "0.0.1"
}
}

@@ -1,9 +0,94 @@

# TensorFlow.js Layers: easy open source machine learning in the browser
# TensorFlow.js Layers: High-Level Machine Learning Model API
TensorFlow.js Layers lets users build, train and execute machine learning models
in the browser, using the established and familiar [Keras](https://keras.io/)
API, without needing to install any software.
A part of the TensorFlow.js ecosystem, TensorFlow.js Layers is a high-level
API built on [TensorFlow.js Core](https://github.com/tensorflow/tfjs-core),
enabling users to build, train and execute deep learning models in the browser.
TensorFlow.js Layers is modeled after
[Keras](https://keras.io/) and
[tf.keras](https://www.tensorflow.org/api_docs/python/tf/keras) and can
load models saved from those libraries.
TFJS Layers provides higher-level building blocks for neural networks, executing
using the hardware-accelerated
[deeplearn.js](https://github.com/PAIR-code/deeplearnjs) library.
## Importing
There are three ways to import TensorFlow.js Layers
1. You can access TensorFlow.js Layers through the union package
between the TensorFlow.js Core and Layers:
[@tensorflow/tfjs](https://www.npmjs.com/package/@tensorflow/tfjs)
2. You can get [TensorFlow.js] Layers as a module:
[@tensorflow/tfjs-layers](https://www.npmjs.com/package/@tensorflow/tfjs-layers).
Note that `tfjs-layers` has peer dependency on core, so if you import
`@tensorflow/tfjs-layers`, you also need to import
`@tensorflow/tfjs-core`.
3. As a standalone through [unpkg](https://unpkg.com/).
Option 1 is the most convenient, but leads to a larger bundle size (we will be
adding more packages to it in the future). Use option 2 if you care about bundle
size.
## Getting started
### Building, training and executing a model
The following example shows how to build a toy model with only one `dense` layer
to perform linear regression.
```js
import * as tf from '@tensorflow/tfjs';
// A sequential model is a container which you can add layers to.
const model = tf.sequential();
// Add a dense layer with 1 output unit.
model.add(tf.layers.dense({units: 1, inputShape: [1]}));
// Specify the loss type and optimizer for training.
model.compile({loss: 'meanSquaredError', optimizer: 'SGD'});
// Generate some synthetic data for training.
const xs = tf.tensor2d([[1], [2], [3], [4]], [4, 1]);
const ys = tf.tensor2d([[1], [3], [5], [7]], [4, 1]);
// Train the model.
await model.fit(xs, ys, {epochs: 50});
// Ater the training, perform inference.
const output = await model.predict(tf.tensor2d([[5]], [1, 1]));
output.print();
```
### Loading a pretrained Keras model
You can also load a model previously trained and saved from elsewhere (e.g.,
from Python Keras) and use it for inference or transfer learning in the browser.
For example, in Python, save your Keras model using
[tensorflowjs](https://pypi.org/project/tensorflowjs/),
which can be installed using `pip install tensorflowjs`.
```python
import tensorflowjs as tfjs
# ... Create and train your Keras model.
# Save your Keras model in TensorFlow.js format.
tfjs.converter.save_keras_model(model, '/path/to/tfjs_artifacts/')
# Then use your favorite web server to serve the directory at a URL, say
# http://foo.bar/tfjs_artifacts/model.json
```
To load the model with TensorFlow.js Layers:
```js
import * as tf from '@tensorflow/tfjs';
const model = await tf.loadModel('http://foo.bar/tfjs_artifacts/model.json');
// Now the model is ready for inference, evaluation or re-training.
```
## For more information
- [TensorFlow.js API documentation](https://js.tensorflow.org/api/index.html)
- [TensorFlow.js Tutorials](https://js.tensorflow.org/tutorials/)

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc