Skip to content

Commit

Permalink
Calculate bias...? I think
Browse files Browse the repository at this point in the history
  • Loading branch information
Rhydian Jenkins committed Dec 23, 2023
1 parent ebe443d commit 2f6ed17
Show file tree
Hide file tree
Showing 2 changed files with 13 additions and 6 deletions.
2 changes: 1 addition & 1 deletion server/src/neural_network/network.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ use indicatif::ProgressBar;
use serde::Serialize;

pub const NUM_RAW_INPUTS: usize = 784;
const NUM_HIDDEN_NEURONS: usize = 64;
const NUM_HIDDEN_NEURONS: usize = 16;
const NUM_OUTPUTS: usize = 10;

#[derive(Clone, Debug, Serialize)]
Expand Down
17 changes: 12 additions & 5 deletions server/src/neural_network/perceptron.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use std::f64::consts::E;
// use std::f64::consts::E;

use serde::Serialize;

Expand Down Expand Up @@ -30,7 +30,7 @@ impl Perceptron {
.iter()
.zip(self.weights.iter())
.map(|(input, weight)| input * weight)
.sum::<f64>();
.sum();

sigmoid(weighted_sum + self.bias)
}
Expand All @@ -47,17 +47,24 @@ impl Perceptron {
})
.collect::<Vec<f64>>();

// TODO update bias of prev layer
// TODO zippnig error signals with weights of prev layer makes `new_weights` 10 long
let perceptron_output = self.activate(prev_layer_results);
self.bias = calculate_new_bias(self.bias, average_error, perceptron_output);

debug_assert_eq!(new_weights.len(), self.weights.len());

self.weights = new_weights;
}
}

fn calculate_new_bias(prev_bias: f64, average_error: f64, perceptron_output: f64) -> f64 {
let derivative = perceptron_output * (1.0 - perceptron_output);
let new_bias = prev_bias + LEARNING_RATE * average_error * derivative;

new_bias
}

fn sigmoid(x: f64) -> f64 {
1.0 / (1.0 + E.powf(-x))
1.0 / (1.0 + (-x).exp())
}

#[cfg(test)]
Expand Down

0 comments on commit 2f6ed17

Please sign in to comment.