const _ = require("lodash");
const methods = require("../methods/methods");
const Group = require("./group");
const Node = require("./node");
/**
* Layers are pre-built architectures that allow you to combine different network architectures into óne network.
*
* Always start your network with a `Dense` layer and always end it with a `Dense` layer. You can connect layers with each other just like you can connect [Nodes](Node) and [Groups](Group) with each other.
*
* @constructs Layer
*
* @prop {Node[]} output Output nodes
* @prop {Node[]} nodes Nodes within the layer
* @prop {Group[]|Node[]} connections.in Income connections
* @prop {Group[]|Node[]} connections.out Outgoing connections
* @prop {Group[]|Node[]} connections.self Self connections
*
* @example <caption>Custom architecture built with layers</caption>
* let { Layer } = require("@liquid-carrot/carrot");
*
* let input = new Layer.Dense(1);
* let hidden1 = new Layer.LSTM(5);
* let hidden2 = new Layer.GRU(1);
* let output = new Layer.Dense(1);
*
* // connect however you want
* input.connect(hidden1);
* hidden1.connect(hidden2);
* hidden2.connect(output);
*
* let network = architect.Construct([input, hidden1, hidden2, output]);
*/
function Layer() {
const self = this
self.output = null;
self.nodes = [];
self.connections = {
in: [],
out: [],
self: [],
// (BETA)
incoming: [],
outgoing: []
};
/**
* Activates all the nodes in the group
*
* @function activate
* @memberof Layer
*
* @param {object[]} value Array with length equal to amount of nodes
* @returns {number[]} Layer output values
*/
self.activate = function(inputs) {
const values = [];
if(inputs != undefined && inputs.length !== self.nodes.length) throw new Error('Array with values should be same as the amount of nodes!');
for(let index = 0; index < self.nodes.length; index++) {
const activation = (inputs == undefined) ? self.nodes[index].activate() : self.nodes[index].activate(inputs[index]);
values.push(activation);
}
return values;
},
/**
* Propagates all the node in the group
*
* @function propagate
* @memberof Layer
*
* @param {number[]} [target] Ideal/target values - _required for output layers_
* @param {Object} [options]
* @param {number} [options.rate=0.3] Sets the [learning rate](https://towardsdatascience.com/understanding-learning-rates-and-how-it-improves-performance-in-deep-learning-d0d4059c1c10) of the backpropagation process
* @param {number} [options.momentum=0] [Momentum](https://www.willamette.edu/~gorr/classes/cs449/momrate.html). Adds a fraction of the previous weight update to the current one.
* @param {boolean} [options.update=true]
*/
self.propagate = function(targets, options) {
if(targets != undefined && targets.length !== self.nodes.length) throw new Error('Array with values should be same as the amount of nodes!');
for(let index = self.nodes.length - 1; index >= 0; index--) {
if(targets == undefined) self.nodes[index].propagate(options);
else self.nodes[index].propagate(targets[index], options);
}
},
/**
* Connects the nodes in this group to nodes in another group or just a node
*
* @function connect
* @memberof Layer
*
* @param {Group|Node|Layer} target Node(s) to form connections with
* @param {connection} method [Connection Methods](connection)
* @param {number} weight An initial weight to build the connections with
*
* @returns {Connection[]} An array of connections between the nodes in this layer and target
*/
self.connect = function(target, method, weight) {
if(target instanceof Group || target instanceof Node) return self.output.connect(target, method, weight);
else if(target instanceof Layer) return target.input(self, method, weight);
},
/**
* Make nodes from this group gate the given connection(s)
*
* @see [Synaptic Gating on Wikipedia](https://en.wikipedia.org/wiki/Synaptic_gating)
*
* @function gate
* @memberof Layer
*
* @param {Connection[]} connections Connections to gate
* @param {gating_method} method [Gating Method](gating)
*/
self.gate = function(connections, method) {
self.output.gate(connections, method);
},
/**
* Sets the value of a property for every node
*
* @function set
* @memberof Layer
*
* @param {object[]} values An object with (all optional) bias, squash, and type properties to overwrite in the node
*/
self.set = function(values) {
for(let i = 0; i < self.nodes.length; i++) {
const node = self.nodes[i];
if(node instanceof Node) Object.assign(node, { ...values });
else if(node instanceof Group) node.set(values);
}
},
/**
* Disconnects all nodes from this group from another given group/node
*
* @function disconnect
* @memberof Layer
*
* @param {Group|Node|Layer} target A Group, Node, or Layer to disconnect from
* @param {boolean} [twosided=false] Flag indicating incoming connections
*/
self.disconnect = function(target, twosided) {
twosided = twosided || false;
if(target instanceof Group) {
for(let i = 0; i < self.nodes.length; i++) {
for(let j = 0; j < target.nodes.length; j++) {
self.nodes[i].disconnect(target.nodes[j], twosided);
if(twosided) self.connections.in = self.connections.in .filter(connection => !(connection.from === target.nodes[j] && connection.to === self.nodes[i]))
self.connections.out = self.connections.out.filter(connection => !(connection.from === self.nodes[i] && connection.to === target.nodes[j]))
}
}
} else if(target instanceof Node) {
for(let i = 0; i < self.nodes.length; i++) {
self.nodes[i].disconnect(target, twosided);
if(twosided) self.connections.in = self.connections.in .filter(connection => !(connection.from === target && connection.to === self.nodes[i]))
self.connections.out = self.connections.out.filter(connection => !(connection.from === self.nodes[i] && connection.to === target))
}
}
},
/**
* Clear the context of this group
*
* @function clear
* @memberof Layer
*/
self.clear = function() {
for(let index = 0; index < self.nodes.length; index++) {
self.nodes[index].clear();
}
}
}
/**
* Creates a regular (dense) layer.
*
* @param {number} size Amount of nodes to build the layer with
*
* @returns {Layer} Plain layer
*
* @example
* let { Layer } = require("@liquid-carrot/carrot");
*
* let layer = new Layer.Dense(size);
*/
Layer.Dense = function(size) {
// Create the layer
const layer = new Layer();
// Init required nodes (in activation order)
const block = new Group(size);
layer.nodes.push(block);
layer.output = block;
layer.input = function(from, method, weight) {
if(from instanceof Layer) from = from.output;
method = method || methods.connection.ALL_TO_ALL;
return from.connect(block, method, weight);
};
return layer;
};
/**
* Creates an LSTM layer.
*
* LSTM layers are useful for detecting and predicting patterns over long time lags. This is a recurrent layer.
*
* Note: architect.LSTM currently performs better than an equivalent network built with LSTM Layers.
*
* @param {number} size Amount of nodes to build the layer with
*
* @returns {Layer} LSTM layer
*
* @example
* let { Layer } = require("@liquid-carrot/carrot");
*
* let layer = new Layer.LSTM(size);
*/
Layer.LSTM = function(size) {
// Create the layer
const layer = new Layer();
// Init required nodes (in activation order)
const input_gate = new Group(size);
const forget_gate = new Group(size);
const memory_cell = new Group(size);
const output_gate = new Group(size);
const output_block = new Group(size);
input_gate.set({
bias: 1
});
forget_gate.set({
bias: 1
});
output_gate.set({
bias: 1
});
// Set up internal connections
memory_cell.connect(input_gate, methods.connection.ALL_TO_ALL);
memory_cell.connect(forget_gate, methods.connection.ALL_TO_ALL);
memory_cell.connect(output_gate, methods.connection.ALL_TO_ALL);
const forget = memory_cell.connect(memory_cell, methods.connection.ONE_TO_ONE);
const output = memory_cell.connect(output_block, methods.connection.ALL_TO_ALL);
// Set up gates
forget_gate.gate(forget, methods.gating.SELF);
output_gate.gate(output, methods.gating.OUTPUT);
// Add to nodes array
layer.nodes = [input_gate, forget_gate, memory_cell, output_gate, output_block];
// Define output
layer.output = output_block;
layer.input = function(from, method, weight) {
if(from instanceof Layer) from = from.output;
method = method || methods.connection.ALL_TO_ALL;
const input = from.connect(memory_cell, method, weight);
const connections = [
input,
from.connect(input_gate, method, weight),
from.connect(output_gate, method, weight),
from.connect(forget_gate, method, weight)
];
input_gate.gate(input, methods.gating.INPUT);
return connections;
};
return layer;
};
/**
* Creates a GRU layer.
*
* The GRU layer is similar to the LSTM layer, however it has no memory cell and only two gates. It is also a recurrent layer that is excellent for timeseries prediction.
*
* @param {number} size Amount of nodes to build the layer with
*
* @returns {Layer} GRU layer
*
* @example
* let { Layer } = require("@liquid-carrot/carrot");
*
* let layer = new Layer.GRU(size);
*/
Layer.GRU = function(size) {
// Create the layer
const layer = new Layer();
const update_gate = new Group(size);
const inverse_update_gate = new Group(size);
const reset_gate = new Group(size);
const memory_cell = new Group(size);
const output = new Group(size);
const previous_output = new Group(size);
previous_output.set({
bias: 0,
squash: methods.activation.IDENTITY,
type: 'constant'
});
memory_cell.set({
squash: methods.activation.TANH
});
inverse_update_gate.set({
bias: 0,
squash: methods.activation.INVERSE,
type: 'constant'
});
update_gate.set({
bias: 1
});
reset_gate.set({
bias: 0
});
// Update gate calculation
previous_output.connect(update_gate, methods.connection.ALL_TO_ALL);
// Inverse update gate calculation
update_gate.connect(inverse_update_gate, methods.connection.ONE_TO_ONE, 1);
// Reset gate calculation
previous_output.connect(reset_gate, methods.connection.ALL_TO_ALL);
// Memory calculation
const reset = previous_output.connect(memory_cell, methods.connection.ALL_TO_ALL);
reset_gate.gate(reset, methods.gating.OUTPUT); // gate
// Output calculation
const update1 = previous_output.connect(output, methods.connection.ALL_TO_ALL);
const update2 = memory_cell.connect(output, methods.connection.ALL_TO_ALL);
update_gate.gate(update1, methods.gating.OUTPUT);
inverse_update_gate.gate(update2, methods.gating.OUTPUT);
// Previous output calculation
output.connect(previous_output, methods.connection.ONE_TO_ONE, 1);
// Add to nodes array
layer.nodes = [update_gate, inverse_update_gate, reset_gate, memory_cell, output, previous_output];
layer.output = output;
layer.input = function(from, method, weight) {
if(from instanceof Layer) from = from.output;
method = method || methods.connection.ALL_TO_ALL;
const connections = [
from.connect(updateGate, method, weight),
from.connect(resetGate, method, weight),
from.connect(memoryCell, method, weight)
];
return connections;
};
return layer;
};
/**
* Creates a Memory layer.
*
* The Memory layer makes networks remember a number of previous inputs in an absolute way. For example, if you set the memory option to 3, it will remember the last 3 inputs in the same state as they were inputted.
*
* @param {number} size Amount of nodes to build the layer with
* @param {number} memory Number of previous inputs to remember
*
* @returns {Layer} Layer with nodes that store previous inputs
*
* @example
* let { Layer } = require("@liquid-carrot/carrot");
*
* let layer = new Layer.Memory(size, memory);
*/
Layer.Memory = function(size, memory) {
// Create the layer
const layer = new Layer();
// Because the output can only be one group, we have to put the nodes all in óne group
let previous;
for (let index = 0; index < memory; index++) {
const block = new Group(size);
block.set({
squash: methods.activation.IDENTITY,
bias: 0,
type: 'constant'
});
if (previous != undefined) previous.connect(block, methods.connection.ONE_TO_ONE, 1);
layer.nodes.push(block);
previous = block;
}
layer.nodes.reverse();
// Because output can only be óne group, fit all memory nodes in óne group
const output_group = new Group(0);
for (let index = 0; index < layer.nodes.length; index++) {
layer.nodes[index].nodes.reverse();
output_group.nodes = output_group.nodes.concat(layer.nodes[index].nodes);
}
layer.output = output_group;
layer.input = function(from, method, weight) {
if (from instanceof Layer) from = from.output;
method = method || methods.connection.ALL_TO_ALL;
if (from.nodes.length !== layer.nodes[layer.nodes.length - 1].nodes.length) throw new Error('Previous layer size must be same as memory size');
return from.connect(layer.nodes[layer.nodes.length - 1], methods.connection.ONE_TO_ONE, 1);
};
return layer;
};
module.exports = Layer;