Skip to content

Commit

Permalink
Merge upstream fork
Browse files Browse the repository at this point in the history
  • Loading branch information
zafarali committed Dec 1, 2016
2 parents 45bbd8a + f9df52a commit 62f8f30
Show file tree
Hide file tree
Showing 2 changed files with 5 additions and 6 deletions.
2 changes: 1 addition & 1 deletion src/feedforwardNeuralNetwork.js
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ class FeedforwardNeuralNetwork {
layerOptions = layerOptions || new Array(layersSize.length-1);

// output layer must be a sigmoid to give probabilities
layerOptions.push({nonLinearity:'sigmoid'})
layerOptions.push({nonLinearity: 'sigmoid'})

if(layerOptions.length !== layersSize.length){
throw Error('Must have the same number of layer options as layer size');
Expand Down
9 changes: 4 additions & 5 deletions src/layer.js
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,7 @@ function sigmoidGradient(value) {
* @returns {number}
**/

function tanh(value){
function tanh(value) {
return Math.tanh(value);
}

Expand All @@ -149,8 +149,9 @@ function tanh(value){
* @returns {number}
**/

function tanhGradient(value){
return 1-Math.pow(value, 2);

function tanhGradient(value) {
return 1 - Math.pow(value, 2);
}

/**
Expand Down Expand Up @@ -180,8 +181,6 @@ function reluGradient(value) {
} else {
return 1;
}
}


/**
* Function that caclulates the leaky rectified linear unit (leaky RELU)
Expand Down

0 comments on commit 62f8f30

Please sign in to comment.