architecture/layer.js

  1. const _ = require("lodash");
  2. const methods = require("../methods/methods");
  3. const Group = require("./group");
  4. const Node = require("./node");
  5. /**
  6. * Layers are pre-built architectures that allow you to combine different network architectures into óne network.
  7. *
  8. * Always start your network with a `Dense` layer and always end it with a `Dense` layer. You can connect layers with each other just like you can connect [Nodes](Node) and [Groups](Group) with each other.
  9. *
  10. * @constructs Layer
  11. *
  12. * @prop {Node[]} output Output nodes
  13. * @prop {Node[]} nodes Nodes within the layer
  14. * @prop {Group[]|Node[]} connections.in Income connections
  15. * @prop {Group[]|Node[]} connections.out Outgoing connections
  16. * @prop {Group[]|Node[]} connections.self Self connections
  17. *
  18. * @example <caption>Custom architecture built with layers</caption>
  19. * let { Layer } = require("@liquid-carrot/carrot");
  20. *
  21. * let input = new Layer.Dense(1);
  22. * let hidden1 = new Layer.LSTM(5);
  23. * let hidden2 = new Layer.GRU(1);
  24. * let output = new Layer.Dense(1);
  25. *
  26. * // connect however you want
  27. * input.connect(hidden1);
  28. * hidden1.connect(hidden2);
  29. * hidden2.connect(output);
  30. *
  31. * let network = architect.Construct([input, hidden1, hidden2, output]);
  32. */
  33. function Layer() {
  34. const self = this
  35. self.output = null;
  36. self.nodes = [];
  37. self.connections = {
  38. in: [],
  39. out: [],
  40. self: [],
  41. // (BETA)
  42. incoming: [],
  43. outgoing: []
  44. };
  45. /**
  46. * Activates all the nodes in the group
  47. *
  48. * @function activate
  49. * @memberof Layer
  50. *
  51. * @param {object[]} value Array with length equal to amount of nodes
  52. * @returns {number[]} Layer output values
  53. */
  54. self.activate = function(inputs) {
  55. const values = [];
  56. if(inputs != undefined && inputs.length !== self.nodes.length) throw new Error('Array with values should be same as the amount of nodes!');
  57. for(let index = 0; index < self.nodes.length; index++) {
  58. const activation = (inputs == undefined) ? self.nodes[index].activate() : self.nodes[index].activate(inputs[index]);
  59. values.push(activation);
  60. }
  61. return values;
  62. },
  63. /**
  64. * Propagates all the node in the group
  65. *
  66. * @function propagate
  67. * @memberof Layer
  68. *
  69. * @param {number[]} [target] Ideal/target values - _required for output layers_
  70. * @param {Object} [options]
  71. * @param {number} [options.rate=0.3] Sets the [learning rate](https://towardsdatascience.com/understanding-learning-rates-and-how-it-improves-performance-in-deep-learning-d0d4059c1c10) of the backpropagation process
  72. * @param {number} [options.momentum=0] [Momentum](https://www.willamette.edu/~gorr/classes/cs449/momrate.html). Adds a fraction of the previous weight update to the current one.
  73. * @param {boolean} [options.update=true]
  74. */
  75. self.propagate = function(targets, options) {
  76. if(targets != undefined && targets.length !== self.nodes.length) throw new Error('Array with values should be same as the amount of nodes!');
  77. for(let index = self.nodes.length - 1; index >= 0; index--) {
  78. if(targets == undefined) self.nodes[index].propagate(options);
  79. else self.nodes[index].propagate(targets[index], options);
  80. }
  81. },
  82. /**
  83. * Connects the nodes in this group to nodes in another group or just a node
  84. *
  85. * @function connect
  86. * @memberof Layer
  87. *
  88. * @param {Group|Node|Layer} target Node(s) to form connections with
  89. * @param {connection} method [Connection Methods](connection)
  90. * @param {number} weight An initial weight to build the connections with
  91. *
  92. * @returns {Connection[]} An array of connections between the nodes in this layer and target
  93. */
  94. self.connect = function(target, method, weight) {
  95. if(target instanceof Group || target instanceof Node) return self.output.connect(target, method, weight);
  96. else if(target instanceof Layer) return target.input(self, method, weight);
  97. },
  98. /**
  99. * Make nodes from this group gate the given connection(s)
  100. *
  101. * @see [Synaptic Gating on Wikipedia](https://en.wikipedia.org/wiki/Synaptic_gating)
  102. *
  103. * @function gate
  104. * @memberof Layer
  105. *
  106. * @param {Connection[]} connections Connections to gate
  107. * @param {gating_method} method [Gating Method](gating)
  108. */
  109. self.gate = function(connections, method) {
  110. self.output.gate(connections, method);
  111. },
  112. /**
  113. * Sets the value of a property for every node
  114. *
  115. * @function set
  116. * @memberof Layer
  117. *
  118. * @param {object[]} values An object with (all optional) bias, squash, and type properties to overwrite in the node
  119. */
  120. self.set = function(values) {
  121. for(let i = 0; i < self.nodes.length; i++) {
  122. const node = self.nodes[i];
  123. if(node instanceof Node) Object.assign(node, { ...values });
  124. else if(node instanceof Group) node.set(values);
  125. }
  126. },
  127. /**
  128. * Disconnects all nodes from this group from another given group/node
  129. *
  130. * @function disconnect
  131. * @memberof Layer
  132. *
  133. * @param {Group|Node|Layer} target A Group, Node, or Layer to disconnect from
  134. * @param {boolean} [twosided=false] Flag indicating incoming connections
  135. */
  136. self.disconnect = function(target, twosided) {
  137. twosided = twosided || false;
  138. if(target instanceof Group) {
  139. for(let i = 0; i < self.nodes.length; i++) {
  140. for(let j = 0; j < target.nodes.length; j++) {
  141. self.nodes[i].disconnect(target.nodes[j], twosided);
  142. if(twosided) self.connections.in = self.connections.in .filter(connection => !(connection.from === target.nodes[j] && connection.to === self.nodes[i]))
  143. self.connections.out = self.connections.out.filter(connection => !(connection.from === self.nodes[i] && connection.to === target.nodes[j]))
  144. }
  145. }
  146. } else if(target instanceof Node) {
  147. for(let i = 0; i < self.nodes.length; i++) {
  148. self.nodes[i].disconnect(target, twosided);
  149. if(twosided) self.connections.in = self.connections.in .filter(connection => !(connection.from === target && connection.to === self.nodes[i]))
  150. self.connections.out = self.connections.out.filter(connection => !(connection.from === self.nodes[i] && connection.to === target))
  151. }
  152. }
  153. },
  154. /**
  155. * Clear the context of this group
  156. *
  157. * @function clear
  158. * @memberof Layer
  159. */
  160. self.clear = function() {
  161. for(let index = 0; index < self.nodes.length; index++) {
  162. self.nodes[index].clear();
  163. }
  164. }
  165. }
  166. /**
  167. * Creates a regular (dense) layer.
  168. *
  169. * @param {number} size Amount of nodes to build the layer with
  170. *
  171. * @returns {Layer} Plain layer
  172. *
  173. * @example
  174. * let { Layer } = require("@liquid-carrot/carrot");
  175. *
  176. * let layer = new Layer.Dense(size);
  177. */
  178. Layer.Dense = function(size) {
  179. // Create the layer
  180. const layer = new Layer();
  181. // Init required nodes (in activation order)
  182. const block = new Group(size);
  183. layer.nodes.push(block);
  184. layer.output = block;
  185. layer.input = function(from, method, weight) {
  186. if(from instanceof Layer) from = from.output;
  187. method = method || methods.connection.ALL_TO_ALL;
  188. return from.connect(block, method, weight);
  189. };
  190. return layer;
  191. };
  192. /**
  193. * Creates an LSTM layer.
  194. *
  195. * LSTM layers are useful for detecting and predicting patterns over long time lags. This is a recurrent layer.
  196. *
  197. * Note: architect.LSTM currently performs better than an equivalent network built with LSTM Layers.
  198. *
  199. * @param {number} size Amount of nodes to build the layer with
  200. *
  201. * @returns {Layer} LSTM layer
  202. *
  203. * @example
  204. * let { Layer } = require("@liquid-carrot/carrot");
  205. *
  206. * let layer = new Layer.LSTM(size);
  207. */
  208. Layer.LSTM = function(size) {
  209. // Create the layer
  210. const layer = new Layer();
  211. // Init required nodes (in activation order)
  212. const input_gate = new Group(size);
  213. const forget_gate = new Group(size);
  214. const memory_cell = new Group(size);
  215. const output_gate = new Group(size);
  216. const output_block = new Group(size);
  217. input_gate.set({
  218. bias: 1
  219. });
  220. forget_gate.set({
  221. bias: 1
  222. });
  223. output_gate.set({
  224. bias: 1
  225. });
  226. // Set up internal connections
  227. memory_cell.connect(input_gate, methods.connection.ALL_TO_ALL);
  228. memory_cell.connect(forget_gate, methods.connection.ALL_TO_ALL);
  229. memory_cell.connect(output_gate, methods.connection.ALL_TO_ALL);
  230. const forget = memory_cell.connect(memory_cell, methods.connection.ONE_TO_ONE);
  231. const output = memory_cell.connect(output_block, methods.connection.ALL_TO_ALL);
  232. // Set up gates
  233. forget_gate.gate(forget, methods.gating.SELF);
  234. output_gate.gate(output, methods.gating.OUTPUT);
  235. // Add to nodes array
  236. layer.nodes = [input_gate, forget_gate, memory_cell, output_gate, output_block];
  237. // Define output
  238. layer.output = output_block;
  239. layer.input = function(from, method, weight) {
  240. if(from instanceof Layer) from = from.output;
  241. method = method || methods.connection.ALL_TO_ALL;
  242. const input = from.connect(memory_cell, method, weight);
  243. const connections = [
  244. input,
  245. from.connect(input_gate, method, weight),
  246. from.connect(output_gate, method, weight),
  247. from.connect(forget_gate, method, weight)
  248. ];
  249. input_gate.gate(input, methods.gating.INPUT);
  250. return connections;
  251. };
  252. return layer;
  253. };
  254. /**
  255. * Creates a GRU layer.
  256. *
  257. * The GRU layer is similar to the LSTM layer, however it has no memory cell and only two gates. It is also a recurrent layer that is excellent for timeseries prediction.
  258. *
  259. * @param {number} size Amount of nodes to build the layer with
  260. *
  261. * @returns {Layer} GRU layer
  262. *
  263. * @example
  264. * let { Layer } = require("@liquid-carrot/carrot");
  265. *
  266. * let layer = new Layer.GRU(size);
  267. */
  268. Layer.GRU = function(size) {
  269. // Create the layer
  270. const layer = new Layer();
  271. const update_gate = new Group(size);
  272. const inverse_update_gate = new Group(size);
  273. const reset_gate = new Group(size);
  274. const memory_cell = new Group(size);
  275. const output = new Group(size);
  276. const previous_output = new Group(size);
  277. previous_output.set({
  278. bias: 0,
  279. squash: methods.activation.IDENTITY,
  280. type: 'constant'
  281. });
  282. memory_cell.set({
  283. squash: methods.activation.TANH
  284. });
  285. inverse_update_gate.set({
  286. bias: 0,
  287. squash: methods.activation.INVERSE,
  288. type: 'constant'
  289. });
  290. update_gate.set({
  291. bias: 1
  292. });
  293. reset_gate.set({
  294. bias: 0
  295. });
  296. // Update gate calculation
  297. previous_output.connect(update_gate, methods.connection.ALL_TO_ALL);
  298. // Inverse update gate calculation
  299. update_gate.connect(inverse_update_gate, methods.connection.ONE_TO_ONE, 1);
  300. // Reset gate calculation
  301. previous_output.connect(reset_gate, methods.connection.ALL_TO_ALL);
  302. // Memory calculation
  303. const reset = previous_output.connect(memory_cell, methods.connection.ALL_TO_ALL);
  304. reset_gate.gate(reset, methods.gating.OUTPUT); // gate
  305. // Output calculation
  306. const update1 = previous_output.connect(output, methods.connection.ALL_TO_ALL);
  307. const update2 = memory_cell.connect(output, methods.connection.ALL_TO_ALL);
  308. update_gate.gate(update1, methods.gating.OUTPUT);
  309. inverse_update_gate.gate(update2, methods.gating.OUTPUT);
  310. // Previous output calculation
  311. output.connect(previous_output, methods.connection.ONE_TO_ONE, 1);
  312. // Add to nodes array
  313. layer.nodes = [update_gate, inverse_update_gate, reset_gate, memory_cell, output, previous_output];
  314. layer.output = output;
  315. layer.input = function(from, method, weight) {
  316. if(from instanceof Layer) from = from.output;
  317. method = method || methods.connection.ALL_TO_ALL;
  318. const connections = [
  319. from.connect(updateGate, method, weight),
  320. from.connect(resetGate, method, weight),
  321. from.connect(memoryCell, method, weight)
  322. ];
  323. return connections;
  324. };
  325. return layer;
  326. };
  327. /**
  328. * Creates a Memory layer.
  329. *
  330. * The Memory layer makes networks remember a number of previous inputs in an absolute way. For example, if you set the memory option to 3, it will remember the last 3 inputs in the same state as they were inputted.
  331. *
  332. * @param {number} size Amount of nodes to build the layer with
  333. * @param {number} memory Number of previous inputs to remember
  334. *
  335. * @returns {Layer} Layer with nodes that store previous inputs
  336. *
  337. * @example
  338. * let { Layer } = require("@liquid-carrot/carrot");
  339. *
  340. * let layer = new Layer.Memory(size, memory);
  341. */
  342. Layer.Memory = function(size, memory) {
  343. // Create the layer
  344. const layer = new Layer();
  345. // Because the output can only be one group, we have to put the nodes all in óne group
  346. let previous;
  347. for (let index = 0; index < memory; index++) {
  348. const block = new Group(size);
  349. block.set({
  350. squash: methods.activation.IDENTITY,
  351. bias: 0,
  352. type: 'constant'
  353. });
  354. if (previous != undefined) previous.connect(block, methods.connection.ONE_TO_ONE, 1);
  355. layer.nodes.push(block);
  356. previous = block;
  357. }
  358. layer.nodes.reverse();
  359. // Because output can only be óne group, fit all memory nodes in óne group
  360. const output_group = new Group(0);
  361. for (let index = 0; index < layer.nodes.length; index++) {
  362. layer.nodes[index].nodes.reverse();
  363. output_group.nodes = output_group.nodes.concat(layer.nodes[index].nodes);
  364. }
  365. layer.output = output_group;
  366. layer.input = function(from, method, weight) {
  367. if (from instanceof Layer) from = from.output;
  368. method = method || methods.connection.ALL_TO_ALL;
  369. if (from.nodes.length !== layer.nodes[layer.nodes.length - 1].nodes.length) throw new Error('Previous layer size must be same as memory size');
  370. return from.connect(layer.nodes[layer.nodes.length - 1], methods.connection.ONE_TO_ONE, 1);
  371. };
  372. return layer;
  373. };
  374. module.exports = Layer;