Skip to content

Commit

Permalink
Merge pull request #20 from BradenEverson/graph
Browse files Browse the repository at this point in the history
Clippy fixes
  • Loading branch information
BradenEverson authored Feb 19, 2024
2 parents 5b5ba76 + 8bdec6e commit a3feb17
Show file tree
Hide file tree
Showing 11 changed files with 53 additions and 53 deletions.
4 changes: 2 additions & 2 deletions examples/mnist.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ use unda::{core::{data::{input::Input, matrix::Matrix}, network::Sequential, lay

fn main() {
let mut inputs: Vec<&dyn Input> = vec![];
let outputs: Vec<Vec<f32>>;

let mut true_outputs: Vec<Vec<f32>> = vec![];

let inputs_undyn: Vec<Matrix>;
Expand All @@ -12,7 +12,7 @@ fn main() {
(inputs_undyn, outputs_uncat) = MnistEntry::generate_mnist();
println!("Done Generating MNIST");

outputs = to_categorical(outputs_uncat);
let outputs: Vec<Vec<f32>> = to_categorical(outputs_uncat);
for i in 0..600{
inputs.push(&inputs_undyn[i]);
true_outputs.push(outputs[i].clone());
Expand Down
6 changes: 3 additions & 3 deletions src/core/data/input.rs
Original file line number Diff line number Diff line change
Expand Up @@ -116,8 +116,8 @@ impl Input for Matrix3D {
}
}

impl Into<Box<dyn Input>> for Vec<f32> {
fn into(self) -> Box<dyn Input> {
Box::new(self)
impl From<Vec<f32>> for Box<dyn Input> {
fn from(val: Vec<f32>) -> Self {
Box::new(val)
}
}
10 changes: 5 additions & 5 deletions src/core/data/matrix.rs
Original file line number Diff line number Diff line change
Expand Up @@ -310,7 +310,7 @@ impl Matrix{
}
pub fn new_empty(rows: usize, cols: usize) -> Matrix{
Matrix{
rows: rows,
rows,
columns: cols,
data: vec![vec![0.0; cols]; rows]
}
Expand All @@ -329,8 +329,8 @@ impl Matrix{
pub fn sample_noise(&self, noise: &Range<f32>, rng: &mut Box<dyn RngCore>) -> Matrix {
let noise_dist: Distributions = Distributions::Ranged(noise.clone());

let res = self.clone() + noise_dist.sample(rng);
res

self.clone() + noise_dist.sample(rng)
}

/*pub fn add(&mut self, other: &Matrix) -> Matrix {
Expand All @@ -346,7 +346,7 @@ impl Matrix{
res
}*/
pub fn dot_multiply(&mut self, other: &Matrix) -> Matrix {
if self.rows != other.rows || self.columns != self.columns{
if self.rows != other.rows || self.columns != other.columns{
panic!("Invalid matrix dot multiplaction, mismatched dimensions:\n{}x{}\n{}x{}",
self.rows,
self.columns,
Expand Down Expand Up @@ -386,7 +386,7 @@ impl Matrix{
.into_iter()
.map(|row| row
.into_iter()
.map(|value| function(value))
.map(function)
.collect())
.collect())
}
Expand Down
2 changes: 1 addition & 1 deletion src/core/data/matrix3d.rs
Original file line number Diff line number Diff line change
Expand Up @@ -239,7 +239,7 @@ impl Matrix3D{
.into_iter()
.map(|row| row
.into_iter()
.map(|value| function(value))
.map(function)
.collect())
.collect())
.collect())
Expand Down
2 changes: 1 addition & 1 deletion src/core/layer/conv.rs
Original file line number Diff line number Diff line change
Expand Up @@ -149,7 +149,7 @@ impl Layer for Convolutional {
errors_mat.to_param().iter().for_each(|error| {
self.loss += error.powi(2);
});
self.loss = self.loss / errors_mat.to_param().len() as f32;
self.loss /= errors_mat.to_param().len() as f32;

println!("{}", gradients_mat);

Expand Down
4 changes: 2 additions & 2 deletions src/core/layer/dense.rs
Original file line number Diff line number Diff line change
Expand Up @@ -177,7 +177,7 @@ impl Layer for Dense{
self.loss += error.powi(2);
});

self.loss = self.loss / errors_mat.to_param().len() as f32;
self.loss /= errors_mat.to_param().len() as f32;

self.time += 1;

Expand Down Expand Up @@ -210,7 +210,7 @@ impl Layer for Dense{
}

fn get_activation(&self) -> Option<Activations> {
Some(self.activation_fn.clone())
Some(self.activation_fn)
}
fn shape(&self) -> (usize, usize, usize){
(self.weights.columns, 1, 1)
Expand Down
12 changes: 6 additions & 6 deletions src/core/layer/layers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -70,14 +70,14 @@ pub enum InputTypes{

impl LayerTypes{
pub fn to_layer(&self, prev_rows: usize, rand: &mut Box<dyn RngCore>) -> Box<dyn Layer> {
return match self {
LayerTypes::DENSE(rows, activation, learning) => Box::new(Dense::new(prev_rows, *rows, activation.clone(), learning.clone(), rand)),
match self {
LayerTypes::DENSE(rows, activation, learning) => Box::new(Dense::new(prev_rows, *rows, *activation, *learning, rand)),
LayerTypes::CONV(shape, kernels, stride, filters, activation, learning) => Box::new(Convolutional::new(*filters, *kernels, *shape, *stride, *activation, *learning, rand))
//LayerTypes::CONV(shape, stride, learning) => Box::new()
};
}
}
pub fn get_size(&self) -> usize{
return match self{
match self{
LayerTypes::DENSE(rows, _, _) => *rows,
LayerTypes::CONV(shape, _, _, _, _, _) => shape.0 * shape.1,
}
Expand All @@ -86,13 +86,13 @@ impl LayerTypes{

impl InputTypes {
pub fn to_layer(&self) -> LayerTypes {
return match self {
match self {
InputTypes::DENSE(size) => LayerTypes::DENSE(*size, Activations::SIGMOID, 1.0),
InputTypes::CONV(shape, kernel_shape, stride, filters) => LayerTypes::CONV(*shape, *kernel_shape, *stride, *filters, Activations::SIGMOID, 1.0)
}
}
pub fn get_size(&self) -> usize {
return match self {
match self {
InputTypes::DENSE(size) => *size,
InputTypes::CONV(shape, _, _, _) => shape.0 * shape.1,
}
Expand Down
38 changes: 19 additions & 19 deletions src/core/layer/methods/activations.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,13 +21,13 @@ pub enum Activations{
}
impl Activations{
fn get_function(&self) -> Option<Activation>{
return match self{
match self{
Activations::SIGMOID => Some(SIGMOID),
Activations::TANH => Some(TANH),
Activations::RELU => Some(RELU),
Activations::LEAKYRELU => Some(LEAKY_RELU),
_ => None
};
}
}
pub fn apply_fn(&self, mut data: Matrix) -> Matrix {
match self{
Expand All @@ -45,16 +45,16 @@ impl Activations{
//println!("\n\n{:?}", data.to_param());
let res = Matrix::from_sized(exp_logits.iter().map(|x| x / sum_exp).collect::<Vec<f32>>(), data.rows, data.columns);
//println!("{}", res);
return res;
res
},
Activations::ELU(alpha) => {
let data_elu = data.to_param()
.iter()
.map(|&x| elu(*alpha, x)).collect();
return Matrix::from_sized(data_elu, data.rows, data.columns);
Matrix::from_sized(data_elu, data.rows, data.columns)
}

};
}
}
pub fn apply_derivative(&self, mut data: Matrix) -> Matrix {
match self{
Expand All @@ -68,41 +68,41 @@ impl Activations{
//.map(|(s,ds)| s * ds)
//.collect();
//println!("{:?}", data.to_param());
let res = Matrix::from_sized(vec![1.0; data.rows * data.columns], data.rows, data.columns);

//println!("{}", res);
return res;
Matrix::from_sized(vec![1.0; data.rows * data.columns], data.rows, data.columns)
},
Activations::ELU(alpha) => {
let data_elu = data.to_param()
.iter()
.map(|&x| d_elu(*alpha, x)).collect();
return Matrix::from_sized(data_elu, data.rows, data.columns);
Matrix::from_sized(data_elu, data.rows, data.columns)
}
};
}
}
}


pub(super) const SIGMOID: Activation = Activation {
function: &|x| {
let res = 1.0 / (1.0 + E.powf(-x));
return res;

1.0 / (1.0 + E.powf(-x))
},
derivative: &|x| (1.0 / (1.0 + E.powf(-x))) * (1.0 - (1.0 / (1.0 + E.powf(-x))))
};

pub(super) const TANH: Activation = Activation {
function: &|x| {
let res = f32::tanh(x);
return res;

f32::tanh(x)
},
derivative: &|x| 1.0 - f32::tanh(x).powf(2.0)
};

pub(super) const RELU: Activation = Activation {
function: &|x| {
let res = x.max(0.0);
return res;

x.max(0.0)
},
derivative: &|x| {
!x.is_negative() as i32 as f32
Expand All @@ -114,29 +114,29 @@ pub(super) const LEAKY_RELU: Activation = Activation{
if x > 0.0{
return x;
}
return 0.001 * x;
0.001 * x
},
derivative: &|x| {
if x.max(0.0) == x{
return 1.0
}
return 0.001;
0.001
}
};

fn d_elu(alpha: f32, x: f32) -> f32 {
if x > 0.0 {
return 1.0;
}
return alpha * E.powf(x);
alpha * E.powf(x)

}

fn elu(alpha: f32, x: f32) -> f32 {
if x.max(0.0) == x{
return x;
}
return alpha * (E.powf(x) - 1.0);
alpha * (E.powf(x) - 1.0)
}


8 changes: 4 additions & 4 deletions src/core/layer/methods/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,19 +15,19 @@ mod test {
fn test_sigmoid() {
let sigmoid_fn = SIGMOID.function;
let res = sigmoid_fn(0.8);
assert_eq!(res, 0.689974481128);
assert_eq!(res, 0.689_974_5);
let res = sigmoid_fn(1.0);
assert_eq!(res, 0.73105857863);
assert_eq!(res, 0.731_058_6);
let res = sigmoid_fn(0.0);
assert_eq!(res, 0.5);
}
#[test]
fn test_sigmoid_der() {
let sigmoid_der = SIGMOID.derivative;
let res = sigmoid_der(0.8);
assert_eq!(res, 0.21390969652);
assert_eq!(res, 0.213_909_7);
let res = sigmoid_der(1.0);
assert_eq!(res, 0.196611933241);
assert_eq!(res, 0.196_611_93);
let res = sigmoid_der(0.0);
assert_eq!(res, 0.25);
}
Expand Down
10 changes: 5 additions & 5 deletions src/core/network.rs
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ impl Sequential {
///On the backend, all this is really doing is creating a LayerTypes enum with
///dummy Activation and LearningRate values
pub fn set_input(&mut self, input: InputTypes){
if self.layer_sizes.len() > 0 {
if !self.layer_sizes.is_empty() {
self.layer_sizes[0] = input.get_size();
self.uncompiled_layers[0] = input.to_layer();
} else {
Expand Down Expand Up @@ -245,7 +245,7 @@ impl Sequential {
let mut res = vec![];
let parsed = Matrix::from(data[data.len()-1].to_param_2d());

if let None = self.layers[self.layers.len()-1].get_activation() {
if self.layers[self.layers.len()-1].get_activation().is_none() {
panic!("Output layer is not a dense layer");
}

Expand Down Expand Up @@ -362,7 +362,7 @@ impl Sequential {
.collect::<FuturesUnordered<_>>();
let res = all_gradients.await;
for gradient_pair in res.iter() {
self.update_gradients(&gradient_pair);
self.update_gradients(gradient_pair);
}
}
println!("]");
Expand All @@ -376,7 +376,7 @@ impl Sequential {
let mut minibatch: Vec<(Box<dyn Input>, Vec<f32>)>;

let mut iterations: usize;
while inputs.len() > 0 {
while !inputs.is_empty() {
minibatch = vec![];
iterations = inputs.len().min(self.batch_size);
for _ in 0..iterations {
Expand Down Expand Up @@ -439,7 +439,7 @@ impl Sequential {
}
pub fn deserialize_unda_fmt_string(format_string: String) -> Self {
let mut net: Self = Self::new(0);
let parse_triton = format_string.split("#");
let parse_triton = format_string.split('#');
for layer in parse_triton {
let new_layer: Box<dyn Layer> = SerializedLayer::from_string(layer.to_string()).from();
net.layers.push(new_layer);
Expand Down
10 changes: 5 additions & 5 deletions src/core/serialize/ser_layer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,13 +20,13 @@ impl SerializedLayer {
pub fn from(&self) -> Box<dyn Layer> {
match self.name {
'D' => {
let weights_f32: Vec<f32> = self.weights.split(" ").into_iter().map(|val| val.parse().unwrap()).collect();
let bias_f32: Vec<f32> = self.bias.split(" ").into_iter().map(|val| val.parse().unwrap()).collect();
let weights_f32: Vec<f32> = self.weights.split(' ').map(|val| val.parse().unwrap()).collect();
let bias_f32: Vec<f32> = self.bias.split(' ').map(|val| val.parse().unwrap()).collect();
let dense_layer: Dense = Dense::new_ser(self.rows, self.cols, weights_f32, bias_f32);
return Box::new(dense_layer)
Box::new(dense_layer)
},
_ => panic!("Not a supported type"),
};
}
}
fn flatten_string(data: &Vec<Vec<f32>>) -> String {
data.to_param()
Expand All @@ -38,7 +38,7 @@ impl SerializedLayer {
format!("{}|{}|{}|{}|{}", self.name, self.rows, self.cols, self.weights, self.bias)
}
pub fn from_string(data: String) -> Self {
let mut parse_res = data.split("|");
let mut parse_res = data.split('|');
let name: char = parse_res.next().unwrap().chars().next().unwrap();
let rows: usize = str::parse(parse_res.next().unwrap()).unwrap();
let cols: usize = str::parse(parse_res.next().unwrap()).unwrap();
Expand Down

0 comments on commit a3feb17

Please sign in to comment.