Comparing version 0.0.2 to 0.0.3
@@ -1,3 +0,7 @@ | ||
## Unreleased - [DIFF](https://github.com/balovbohdan/fwd-ann/compare/v61.0.2...HEAD) | ||
## Unreleased - [DIFF](https://github.com/balovbohdan/fwd-ann/compare/v61.0.3...HEAD) | ||
## 0.0.3 - 2019-10-24 - [DIFF](https://github.com/balovbohdan/fwd-ann/compare/v0.0.2...v0.0.3) | ||
- Updated `exports` from `index` | ||
- Updated `numbers` example | ||
## 0.0.2 - 2019-10-24 - [DIFF](https://github.com/balovbohdan/fwd-ann/compare/v0.0.1...v0.0.2) | ||
@@ -4,0 +8,0 @@ - Added `bugs` prop to `package.json` |
@@ -9,3 +9,3 @@ declare const config: Readonly<{ | ||
needNormalize: boolean; | ||
normalizer: (signals: import("./lib/signals").Signals) => import("./lib/signals").Signals; | ||
normalizer: (signals: import(".").Signals) => import(".").Signals; | ||
}; | ||
@@ -12,0 +12,0 @@ }; |
@@ -18,3 +18,4 @@ "use strict"; | ||
var layers_1 = require("../lib/layers"); | ||
var activation_funcs_1 = require("../lib/activation-funcs/activation-funcs"); | ||
var activation_funcs_1 = require("../lib/activation-funcs"); | ||
var ReLU = activation_funcs_1.activationFuncs.ReLU; | ||
var config = { | ||
@@ -32,3 +33,3 @@ ann: { | ||
num: 6, | ||
ActivationFunction: activation_funcs_1.ReLU, | ||
ActivationFunction: ReLU, | ||
}], | ||
@@ -40,3 +41,3 @@ }, | ||
num: 7, | ||
ActivationFunction: activation_funcs_1.ReLU, | ||
ActivationFunction: ReLU, | ||
}], | ||
@@ -48,3 +49,3 @@ }, | ||
num: 1, | ||
ActivationFunction: activation_funcs_1.ReLU, | ||
ActivationFunction: ReLU, | ||
}], | ||
@@ -51,0 +52,0 @@ }, |
@@ -0,1 +1,5 @@ | ||
export { Signals } from './lib/signals'; | ||
export { Teacher } from './lib/teacher'; | ||
export { LayerType } from './lib/layers'; | ||
export { default, ANN } from './lib/ann'; | ||
export { activationFuncs } from './lib/activation-funcs'; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
var signals_1 = require("./lib/signals"); | ||
exports.Signals = signals_1.Signals; | ||
var teacher_1 = require("./lib/teacher"); | ||
exports.Teacher = teacher_1.Teacher; | ||
var layers_1 = require("./lib/layers"); | ||
exports.LayerType = layers_1.LayerType; | ||
var ann_1 = require("./lib/ann"); | ||
exports.default = ann_1.default; | ||
exports.ANN = ann_1.ANN; | ||
var activation_funcs_1 = require("./lib/activation-funcs"); | ||
exports.activationFuncs = activation_funcs_1.activationFuncs; |
import { ActivationFunction } from './types'; | ||
import * as activationFuncs from './activation-funcs'; | ||
export default activationFuncs; | ||
export { activationFuncs, ActivationFunction }; |
@@ -5,1 +5,2 @@ "use strict"; | ||
exports.activationFuncs = activationFuncs; | ||
exports.default = activationFuncs; |
{ | ||
"name": "fwd-ann", | ||
"version": "0.0.2", | ||
"version": "0.0.3", | ||
"description": "Feedforward Artificial Neural Network Library", | ||
@@ -5,0 +5,0 @@ "main": "dist/index.js", |
@@ -5,4 +5,6 @@ import createANN from '../lib/ann'; | ||
import { LayerType } from '../lib/layers'; | ||
import { ReLU } from '../lib/activation-funcs/activation-funcs'; | ||
import { activationFuncs } from '../lib/activation-funcs'; | ||
const { ReLU } = activationFuncs; | ||
const config = { | ||
@@ -9,0 +11,0 @@ ann: { |
@@ -0,1 +1,5 @@ | ||
export { Signals } from './lib/signals'; | ||
export { Teacher } from './lib/teacher'; | ||
export { LayerType } from './lib/layers'; | ||
export { default, ANN } from './lib/ann'; | ||
export { activationFuncs } from './lib/activation-funcs'; |
import { ActivationFunction } from './types'; | ||
import * as activationFuncs from './activation-funcs'; | ||
export default activationFuncs; | ||
export { activationFuncs, ActivationFunction }; |
170903
4263