@@ -32,6 +32,7 @@ export default class NeuralNetwork {
3232
3333 static get defaults ( ) {
3434 return {
35+ leakyReluAlpha : 0.01 ,
3536 binaryThresh : 0.5 ,
3637 hiddenLayers : [ 3 ] , // array of ints for the sizes of the hidden layers in the network
3738 activation : 'sigmoid' // Supported activation types ['sigmoid', 'relu', 'leaky-relu', 'tanh']
@@ -249,7 +250,7 @@ export default class NeuralNetwork {
249250
250251 _runInputLeakyRelu ( input ) {
251252 this . outputs [ 0 ] = input ; // set output state of input layer
252-
253+ let alpha = this . leakyReluAlpha ;
253254 let output = null ;
254255 for ( let layer = 1 ; layer <= this . outputLayer ; layer ++ ) {
255256 for ( let node = 0 ; node < this . sizes [ layer ] ; node ++ ) {
@@ -260,7 +261,7 @@ export default class NeuralNetwork {
260261 sum += weights [ k ] * input [ k ] ;
261262 }
262263 //leaky relu
263- this . outputs [ layer ] [ node ] = ( sum < 0 ? 0 : 0.01 * sum ) ;
264+ this . outputs [ layer ] [ node ] = ( sum < 0 ? 0 : alpha * sum ) ;
264265 }
265266 output = input = this . outputs [ layer ] ;
266267 }
@@ -557,6 +558,7 @@ export default class NeuralNetwork {
557558 * @param target
558559 */
559560 _calculateDeltasLeakyRelu ( target ) {
561+ let alpha = this . leakyReluAlpha ;
560562 for ( let layer = this . outputLayer ; layer >= 0 ; layer -- ) {
561563 for ( let node = 0 ; node < this . sizes [ layer ] ; node ++ ) {
562564 let output = this . outputs [ layer ] [ node ] ;
@@ -572,7 +574,7 @@ export default class NeuralNetwork {
572574 }
573575 }
574576 this . errors [ layer ] [ node ] = error ;
575- this . deltas [ layer ] [ node ] = output > 0 ? error : 0.01 * error ;
577+ this . deltas [ layer ] [ node ] = output > 0 ? error : alpha * error ;
576578 }
577579 }
578580 }
@@ -933,6 +935,7 @@ export default class NeuralNetwork {
933935 */
934936 toFunction ( ) {
935937 const activation = this . activation ;
938+ const leakyReluAlpha = this . leakyReluAlpha ;
936939 let needsVar = false ;
937940 function nodeHandle ( layers , layerNumber , nodeKey ) {
938941 if ( layerNumber === 0 ) {
@@ -962,7 +965,7 @@ export default class NeuralNetwork {
962965 }
963966 case 'leaky-relu' : {
964967 needsVar = true ;
965- return `((v=${ result . join ( '' ) } )<0?0:0.01 *v)` ;
968+ return `((v=${ result . join ( '' ) } )<0?0:${ leakyReluAlpha } *v)` ;
966969 }
967970 case 'tanh' :
968971 return `Math.tanh(${ result . join ( '' ) } )` ;
@@ -988,4 +991,4 @@ export default class NeuralNetwork {
988991
989992 return new Function ( 'input' , `${ needsVar ? 'var v;' : '' } return ${ result } ;` ) ;
990993 }
991- }
994+ }
0 commit comments