Skip to content

Commit

Permalink
Merge pull request #8 from MikhailKravets/0.2.0
Browse files Browse the repository at this point in the history
Update the dependencies
  • Loading branch information
MikhailKravets authored Aug 18, 2023
2 parents 7896e13 + 61cd8fe commit c4df80b
Show file tree
Hide file tree
Showing 12 changed files with 137 additions and 109 deletions.
46 changes: 46 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -3,3 +3,49 @@ Cargo.lock
**/*.rs.bk
.idea
*.iml
test.flow

# Created by https://www.toptal.com/developers/gitignore/api/rust,rust-analyzer,visualstudiocode
# Edit at https://www.toptal.com/developers/gitignore?templates=rust,rust-analyzer,visualstudiocode

### Rust ###
# Generated by Cargo
# will have compiled files and executables
debug/
target/

# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries
# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html
Cargo.lock

# These are backup files generated by rustfmt
**/*.rs.bk

# MSVC Windows builds of rustc generate these, which store debugging information
*.pdb

### rust-analyzer ###
# Can be generated by other build systems other than cargo (ex: bazelbuild/rust_rules)
rust-project.json


### VisualStudioCode ###
.vscode/*
!.vscode/settings.json
!.vscode/tasks.json
!.vscode/launch.json
!.vscode/extensions.json
!.vscode/*.code-snippets

# Local History for Visual Studio Code
.history/

# Built Visual Studio Code Extensions
*.vsix

### VisualStudioCode Patch ###
# Ignore all local history of files
.history
.ionide

# End of https://www.toptal.com/developers/gitignore/api/rust,rust-analyzer,visualstudiocode
18 changes: 9 additions & 9 deletions Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "neuroflow"
version = "0.1.3"
version = "0.2.0"

description = "The neural network library implemented in Rust"
repository = "https://github.com/MikhailKravets/NeuroFlow"
Expand All @@ -13,16 +13,16 @@ authors = ["Mikhail Kravets <[email protected]>"]
exclude = ["examples/*", "tests/*"]

[badges]
travis-ci = { repository = "MikhailKravets/NeuroFlow", branch = "master" }
codecov = { repository = "MikhailKravets/NeuroFlow", branch = "master", service = "github" }

[dependencies]
rand = "0.3"
serde = "1.0"
serde_derive = "1.0"
serde_json = "1.0"
bincode = "^0.9"
csv = "^1.0.0-beta.5"
rand = "~0.8"
serde = "~1.0"
serde_derive = "~1.0"
serde_json = "~1.0"
bincode = "~1.3"
csv = "~1.2"
rand_distr = "0.4.3"

[dev-dependencies]
time = "0.1"
time = "0.1"
41 changes: 21 additions & 20 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,19 +1,20 @@
<div align="center">
<img src="https://raw.githubusercontent.com/MikhailKravets/DataFlow/master/logo.png"><br><br>
<img src="https://raw.githubusercontent.com/MikhailKravets/NeuroFlow/master/logo.png"><br><br>
</div>

[![Build status](https://travis-ci.org/MikhailKravets/NeuroFlow.svg?branch=master)](https://travis-ci.org/MikhailKravets/NeuroFlow)
[![codecov](https://codecov.io/gh/MikhailKravets/NeuroFlow/branch/master/graph/badge.svg)](https://codecov.io/gh/MikhailKravets/NeuroFlow)
[![crates](https://img.shields.io/crates/v/neuroflow.svg)](https://crates.io/crates/neuroflow)

NeuroFlow is fast neural networks (deep learning) Rust crate.
It relies on three pillars: speed, reliability, and speed again.
> NeuroFlow is fast Neural Networks (deep learning) Rust crate.
> It relies on three pillars: speed, reliability, and speed again.
> Hello, everyone! Work on the crate is currently suspended because I am a little busy to do it :( Thanks you all
...I would write if this library was going to be the second PyTorch from the Rust world.
However, this repository found its place in the educational area and can be
used by young Rustaceans to enter the world of Neural Networks.

## How to use

Let's try to approximate very simple function `0.5*sin(e^x) - cos(e^(-x))`.
Let's try to approximate a very simple function `0.5*sin(e^x) - cos(e^(-x))`.

```rust
extern crate neuroflow;
Expand All @@ -25,16 +26,16 @@ use neuroflow::activators::Type::Tanh;

fn main(){
/*
Define neural network with 1 neuron in input layers. Network contains 4 hidden layers.
And, such as our function returns single value, it is reasonable to have 1 neuron in the output layer.
Define a neural network with 1 neuron in input layers. The network contains 4 hidden layers.
And, such as our function returns a single value, it is reasonable to have 1 neuron in the output layer.
*/
let mut nn = FeedForward::new(&[1, 7, 8, 8, 7, 1]);

/*
Define DataSet.
DataSet is the Type that significantly simplifies work with neural network.
Majority of its functionality is still under development :(
DataSet is the Type that significantly simplifies work with neural networks.
The majority of its functionality is still under development :(
*/
let mut data: DataSet = DataSet::new();
let mut i = -3.0;
Expand All @@ -45,7 +46,7 @@ fn main(){
i += 0.05;
}

// Here, we set necessary parameters and train neural network by our DataSet with 50 000 iterations
// Here, we set the necessary parameters and train the neural network by our DataSet with 50 000 iterations
nn.activation(Tanh)
.learning_rate(0.01)
.train(&data, 50_000);
Expand Down Expand Up @@ -79,8 +80,8 @@ neural networks from files.
/*
In order to save neural network into file call function save from neuroflow::io module.
First argument is link on the saving neural network;
Second argument is path to the file.
The first argument is the link to the saving neural network;
The second argument is the path to the file.
*/
neuroflow::io::save(&mut nn, "test.flow").unwrap();

Expand All @@ -89,7 +90,7 @@ neural networks from files.
of load function from neuroflow::io module.
We must specify the type of new_nn variable.
The only argument of load function is the path to file containing
The only argument of the load function is the path to a file containing
the neural network
*/
let mut new_nn: FeedForward = neuroflow::io::load("test.flow").unwrap();
Expand All @@ -99,7 +100,7 @@ neural networks from files.

Classic XOR problem (with no classic input of data)

Let's create file named `TerribleTom.csv` in the root of project. This file should have following innards:
Let's create a file named `TerribleTom.csv` at the root of the project. This file should have the following innards:

```
0,0,-,0
Expand All @@ -108,7 +109,7 @@ Let's create file named `TerribleTom.csv` in the root of project. This file shou
1,1,-,0
```

where `-` is the delimiter that separates input vector from its desired output vector.
where `-` is the delimiter that separates the input vector from its desired output vector.

```rust
extern crate neuroflow;
Expand All @@ -120,9 +121,9 @@ use neuroflow::activators::Type::Tanh;

fn main(){
/*
Define neural network with 2 neurons in input layers,
Define a neural network with 2 neurons in input layers,
1 hidden layer (with 2 neurons),
1 neuron in output layer
1 neuron in the output layer
*/
let mut nn = FeedForward::new(&[2, 2, 1]);

Expand Down Expand Up @@ -156,7 +157,7 @@ for [1.000, 1.000], [0.000] -> [0.000]
Insert into your project's cargo.toml block next line
```toml
[dependencies]
neuroflow = "0.1.3"
neuroflow = "~0.2"
```

Then in project root file
Expand All @@ -168,5 +169,5 @@ extern crate neuroflow;
MIT License

### Attribution
The origami bird from logo is made by [Freepik](https://www.freepik.com/)
The origami bird from the logo is made by [Freepik](https://www.freepik.com/)

16 changes: 4 additions & 12 deletions examples/approximation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,22 +3,14 @@ extern crate time;
extern crate rand;

use neuroflow::FeedForward;
use neuroflow::data::{DataSet, Extractable};
use neuroflow::data::DataSet;

use rand::distributions::IndependentSample;
use rand::distributions::range::Range;
use rand::distributions::normal::Normal;

use neuroflow::activators;
use neuroflow::activators::tanh;
use neuroflow::activators::Type::Tanh;
use neuroflow::activators::Type::Sigmoid;
use neuroflow::estimators;



fn main(){
let mut nn = FeedForward::new(&[1, 7, 8, 8, 7, 1]);
let mut nn = FeedForward::new(&[1, 5, 3, 1]);
let mut data: DataSet = DataSet::new();
let mut i = -3.0;

Expand All @@ -30,8 +22,8 @@ fn main(){
let prev = time::now_utc();

nn.activation(Tanh)
.learning_rate(0.01)
.train(&data, 50_000);
.learning_rate(0.007)
.train(&data, 60_000);

let mut res;

Expand Down
47 changes: 21 additions & 26 deletions examples/classification.rs
Original file line number Diff line number Diff line change
@@ -1,61 +1,54 @@
extern crate neuroflow;
extern crate time;
extern crate rand;
extern crate rand_distr;

use neuroflow::FeedForward;
use neuroflow::data::{DataSet, Extractable};

use rand::distributions::IndependentSample;
use rand::distributions::range::Range;
use rand::distributions::normal::Normal;
use rand::distributions::Uniform;
use rand_distr::Normal;

use neuroflow::activators;
use neuroflow::activators::tanh;
use neuroflow::activators::Type::Tanh;
use neuroflow::activators::Type::Sigmoid;
use neuroflow::estimators;
use rand::{thread_rng, Rng};


fn main(){
let allowed_error = 0.08; // Max allowed error is 8%
fn main() -> Result<(), Box<dyn std::error::Error>> {
let mut nn = FeedForward::new(&[2, 3, 4, 3]);
let mut sample;
let mut training_set: Vec<(Vec<f64>, Vec<f64>)> = Vec::new();
let training_amount = (20f64 * estimators::widrows(&[3, 4, 3], 0.8)) as i32;

let c1 = Normal::new(1f64, 0.5);
let c2 = Normal::new(2f64, 1.0);
let c3 = Normal::new(3f64, 0.35);
let mut rng = thread_rng();
let c1 = Normal::new(1f64, 0.5)?;
let c2 = Normal::new(2f64, 1.0)?;
let c3 = Normal::new(3f64, 0.35)?;

let mut k = 0;
for _ in 0..training_amount{
if k == 0{
training_set.push((vec![c1.ind_sample(&mut rand::thread_rng()), c1.ind_sample(&mut rand::thread_rng())],
vec![1f64, 0f64, 0f64]));
training_set.push((vec![rng.sample(c1), rng.sample(c1)], vec![1f64, 0f64, 0f64]));
k += 1;
}
else if k == 1 {
training_set.push((vec![c2.ind_sample(&mut rand::thread_rng()), c2.ind_sample(&mut rand::thread_rng())],
vec![0f64, 1f64, 0f64]));
training_set.push((vec![rng.sample(c2), rng.sample(c2)], vec![0f64, 1f64, 0f64]));
k += 1;
}
else if k == 2 {
training_set.push((vec![c3.ind_sample(&mut rand::thread_rng()), c3.ind_sample(&mut rand::thread_rng())],
vec![0f64, 0f64, 1f64]));
training_set.push((vec![rng.sample(c3), rng.sample(c3)], vec![0f64, 0f64, 1f64]));
k += 1;
}
else {
k = 0;
}
}

let rnd_range = Range::new(0, training_set.len());
let rnd_range = Uniform::new(0, training_set.len());

let prev = time::now_utc();
nn.activation(neuroflow::activators::Type::Tanh);

for _ in 0..50_000{
k = rnd_range.ind_sample(&mut rand::thread_rng());
k = rng.sample(rnd_range);
nn.fit(&training_set[k].0, &training_set[k].1);
}

Expand All @@ -72,26 +65,28 @@ fn main(){
}

{
sample = [c1.ind_sample(&mut rand::thread_rng()), c1.ind_sample(&mut rand::thread_rng())];
sample = [rng.sample(c1), rng.sample(c1)];
let res = nn.calc(&sample);
println!("for: [{:?}], [1, 0, 0] -> {:?}", sample, res);
assert!(check(&res, 0));
}

{
sample = [c2.ind_sample(&mut rand::thread_rng()), c2.ind_sample(&mut rand::thread_rng())];
sample = [rng.sample(c2), rng.sample(c2)];
let res = nn.calc(&sample);
println!("for: [{:?}], [0, 1, 0] -> {:?}", sample, res);
assert!(check(res, 1));
assert!(check(&res, 1));
}

{
sample = [c3.ind_sample(&mut rand::thread_rng()), c3.ind_sample(&mut rand::thread_rng())];
sample = [rng.sample(c3), rng.sample(c3)];
let res = nn.calc(&sample);
println!("for: [{:?}], [0, 0, 1] -> {:?}", sample, res);
assert!(check(res, 2));
assert!(check(&res, 2));
}

println!("\nSpend time: {}", (time::now_utc() - prev));
assert!(true);

Ok(())
}
Binary file modified logo.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading

0 comments on commit c4df80b

Please sign in to comment.