Skip to content

Commit

Permalink
started covering project with tests; NeuralNetwork.class is now covered
Browse files Browse the repository at this point in the history
  • Loading branch information
RusFortunat committed Dec 11, 2024
1 parent af8aaf6 commit 13963cf
Show file tree
Hide file tree
Showing 5 changed files with 210 additions and 28 deletions.
6 changes: 4 additions & 2 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -51,12 +51,14 @@
<scope>runtime</scope>
<optional>true</optional>
</dependency>
<dependency>

<!-- for testing; jupiter.junit is inside -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
</dependencies>

<build>
<plugins>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ public String hanglePostMapping(@ModelAttribute("NeuralNetwork") NeuralNetwork o
double[] inputVector = drawnImages.processRawInput();

// load network parameters from the file
Resource resource= resourceLoader.getResource(
Resource resource = resourceLoader.getResource(
"classpath:net_params_size784_256_10_lr0.001_trainEps100.txt");
InputStream inputStream = resource.getInputStream();
ourNeuralNetwork.loadNetworkParameters(inputStream);
Expand Down
100 changes: 90 additions & 10 deletions src/main/java/com/guessNumbersWithAI/model/NeuralNetwork.java
Original file line number Diff line number Diff line change
Expand Up @@ -5,15 +5,18 @@

package com.guessNumbersWithAI.model;

import org.springframework.stereotype.Component;

import java.io.File;
import java.io.InputStream;
import java.util.Scanner;

@Component
public class NeuralNetwork {

private int inputSize;
private int hiddenSize;
private int outputSize;
private int inputSize = 28*28; // MNIST training images are of 28x28 pixel size
private int hiddenSize = 256; // arbitrary, should not be too small or too big
private int outputSize = 10; // 0-9 digits that network will be guessing
private double[] outputVector;

// the neural network parameters that will be loaded from the file
Expand All @@ -29,9 +32,6 @@ public class NeuralNetwork {
// I will let the user to choose which neural network to use and load parameters later
public NeuralNetwork() {

this.inputSize = 28*28; // MNIST training images are of 28x28 pixel size
this.hiddenSize = 256; // arbitrary, should not be too small or too big
this.outputSize = 10; // 0-9 digits that network will be guessing
this.outputVector = new double[outputSize];

this.firstLayerWeights = new double[hiddenSize][inputSize];
Expand Down Expand Up @@ -87,16 +87,17 @@ public void forward(double[] input) throws RuntimeException{
}

// if one of the values is negative, shift the entire vector
System.out.println("output vector before softmax:");
double totalSum = 0;
for(int i = 0; i < outputSize; i++){
if(smallestValue < 0) outputVector[i] += Math.abs(smallestValue);
totalSum += outputVector[i];
}

// normalize the output vector
for(int i = 0; i < outputSize; i++){
outputVector[i] =outputVector[i] / totalSum;
if(totalSum != 0){
for(int i = 0; i < outputSize; i++){
outputVector[i] =outputVector[i] / totalSum;
}
}

// now let's magnify the difference between the output values,
Expand Down Expand Up @@ -190,6 +191,86 @@ public void setOutputVector(double[] outputVector) {
this.outputVector = outputVector;
}

public int getInputSize() {
return inputSize;
}

public void setInputSize(int inputSize) {
this.inputSize = inputSize;
}

public int getHiddenSize() {
return hiddenSize;
}

public void setHiddenSize(int hiddenSize) {
this.hiddenSize = hiddenSize;
}

public int getOutputSize() {
return outputSize;
}

public void setOutputSize(int outputSize) {
this.outputSize = outputSize;
}

public double[][] getFirstLayerWeights() {
return firstLayerWeights;
}

public void setFirstLayerWeights(double[][] firstLayerWeights) {
this.firstLayerWeights = firstLayerWeights;
}

public double[] getFirstLayerBiases() {
return firstLayerBiases;
}

public void setFirstLayerBiases(double[] firstLayerBiases) {
this.firstLayerBiases = firstLayerBiases;
}

public double[][] getSecondLayerWeights() {
return secondLayerWeights;
}

public void setSecondLayerWeights(double[][] secondLayerWeights) {
this.secondLayerWeights = secondLayerWeights;
}

public double[] getSecondLayerBiases() {
return secondLayerBiases;
}

public void setSecondLayerBiases(double[] secondLayerBiases) {
this.secondLayerBiases = secondLayerBiases;
}

// set random parameters, for testing purposes
public void setRandomNetworkParameters(){
for(int i = 0; i < hiddenSize; i++){
for(int j = 0; j < inputSize; j++) {
firstLayerWeights[i][j] = Math.random();
}
}

for(int i = 0; i < hiddenSize; i++){
firstLayerBiases[i] = Math.random();
}

for(int i = 0; i < outputSize; i++){
for(int j = 0; j < hiddenSize; j++) {
secondLayerWeights[i][j] = Math.random();
}
}

for(int i = 0; i < outputSize; i++){
secondLayerBiases[i] = Math.random();
}
}


// printers, for debug purposes
public void printNetworkParameteres(){
System.out.println("firstLayerWeights:");
Expand Down Expand Up @@ -218,5 +299,4 @@ public void printNetworkParameteres(){
}
System.out.println("\n");
}

}

This file was deleted.

115 changes: 115 additions & 0 deletions src/test/java/com/guessNumbersWithAI/NeuralNetworkTest.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,115 @@
package com.guessNumbersWithAI;

import com.guessNumbersWithAI.model.NeuralNetwork;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.core.io.Resource;
import org.springframework.core.io.ResourceLoader;

import java.io.FileNotFoundException;
import java.io.InputStream;
import java.util.Arrays;
import java.util.stream.DoubleStream;

import static org.junit.jupiter.api.Assertions.*;


@SpringBootTest
public class NeuralNetworkTest {

@Autowired
ResourceLoader resourceLoader;

@Autowired
NeuralNetwork neuralNetwork;

@BeforeEach
public void setupNeuralNetwork(){
neuralNetwork = new NeuralNetwork(); // reset all neural network parameters to zero
}

@DisplayName("Test forward() method")
@Test
public void testForward(){

// generate random input vector
double[] input = new double[28*28];
for(int i = 0; i < input.length; i++){
input[i] = Math.random();
}

// passing input vector to NeuralNetwork with all parameters set to null should return null output vector
neuralNetwork.forward(input);
double[] output = neuralNetwork.getOutputVector();
double sum = DoubleStream.of(output).sum();

assertEquals(0.0, sum, "passing input to empty network should return empty vector");


// set random parameters and check that forward() produces non-zero output vector
neuralNetwork.setRandomNetworkParameters();
neuralNetwork.forward(input);
output = neuralNetwork.getOutputVector();
assertNotEquals(0.0, DoubleStream.of(output).sum(),
"the output vector should not contain only zeros");


// output vector should not contain negative values
boolean contains = DoubleStream.of(output).anyMatch(x -> x < 0);
assertFalse(contains, "the output vector should not contain any negative elements");


// the answer field should be equal to the biggest element value from output vector
double networkAnswer = output[neuralNetwork.getAnswer()]; // getAnswer() returns array's id
Arrays.sort(output);
double answer = output[output.length-1];

assertEquals(answer, networkAnswer,
"The answer that forward() produces must be the element of output with max value");
}


@DisplayName("Test loading of trained neural network parameters")
@Test
public void testLoadNetworkParameters() throws Exception{

// provide non-existing file and test for I/O FileNotFoundException
Resource resource1 = resourceLoader.getResource(
"classpath:net_params.txt");
assertThrows(FileNotFoundException.class, () -> {InputStream inputStream = resource1.getInputStream();},
"trying to load non-existing file should throw I/O FileNotFoundException" );

// load trained neural network parameters
Resource resource2 = resourceLoader.getResource(
"classpath:net_params_size784_256_10_lr0.001_trainEps100.txt");
InputStream inputStream = resource2.getInputStream();
neuralNetwork.loadNetworkParameters(inputStream);

// check that arrays are not empty now
double[] firstLayerBiases = neuralNetwork.getFirstLayerBiases();
boolean firstLayerBiasesHasNonzeroElem = DoubleStream.of(firstLayerBiases).anyMatch(x -> x!=0);
assertTrue(firstLayerBiasesHasNonzeroElem,
"all elements cannot be zero in double[] firstLayerBiases");

double[][] firstLayerWeights = neuralNetwork.getFirstLayerWeights();
DoubleStream weightsStream1 = Arrays.stream(firstLayerWeights).flatMapToDouble(x -> Arrays.stream(x));
boolean firstLayerWeightsHasNonzeroElem = weightsStream1.anyMatch(x -> x!=0);
assertTrue(firstLayerWeightsHasNonzeroElem,
"all elements cannot be zero in double[][] firstLayerWeights");

double[] secondLayerBiases = neuralNetwork.getSecondLayerBiases();
boolean secondLayerBiasesHasNonzeroElem = DoubleStream.of(secondLayerBiases).anyMatch(x -> x!=0);
assertTrue(secondLayerBiasesHasNonzeroElem,
"all elements cannot be zero in double[] secondLayerBiases");

double[][] secondLayerWeights = neuralNetwork.getSecondLayerWeights();
DoubleStream weightsStream2 = Arrays.stream(secondLayerWeights).flatMapToDouble(x -> Arrays.stream(x));
boolean secondLayerWeightsHasNonzeroElem = weightsStream2.anyMatch(x -> x!=0);
assertTrue(secondLayerWeightsHasNonzeroElem,
"all elements cannot be zero in double[][] secondLayerWeights");
}

}

0 comments on commit 13963cf

Please sign in to comment.