Skip to content

Commit

Permalink
adding dropout as funtion of the layer rather that making it its own …
Browse files Browse the repository at this point in the history
…layer (wip)

it is always a dilemma, because adding funtionality to a layer make it more inflexible to use
  • Loading branch information
JulioJerez committed Nov 15, 2023
1 parent 4e714cc commit 5518d7a
Show file tree
Hide file tree
Showing 11 changed files with 24 additions and 167 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -387,7 +387,8 @@ static void Cifar10TrainingSet()
const ndBrainLayerConvolutionalMaxPooling_2d* pooling;

#if 0
#define ACTIVATION_TYPE ndBrainLayerReluActivation
//#define ACTIVATION_TYPE ndBrainLayerReluActivation
#define DIGIT_ACTIVATION_TYPE ndBrainLayerLeakyReluActivation
#else
#define ACTIVATION_TYPE ndBrainLayerTanhActivation
#endif
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -273,6 +273,7 @@ static void MnistTrainingSet()
}

for (ndInt32 epoch = 0; epoch < 1000; ++epoch)
//for (ndInt32 epoch = 0; epoch < 1; ++epoch)
{
ndInt32 start = 0;
ndMemSet(failCount, ndUnsigned32(0), D_MAX_THREADS_COUNT);
Expand Down Expand Up @@ -410,37 +411,31 @@ static void MnistTrainingSet()

layers.PushBack(new ndBrainLayerConvolutional_2d(width, height, 1, 3, 16));
conv = (ndBrainLayerConvolutional_2d*)(layers[layers.GetCount() - 1]);
layers.PushBack(new ndBrainLayerDropOut(layers[layers.GetCount() - 1]->GetOutputSize()));
layers.PushBack(new DIGIT_ACTIVATION_TYPE(conv->GetOutputSize()));
layers.PushBack(new ndBrainLayerConvolutionalMaxPooling_2d(conv->GetOutputWidth(), conv->GetOutputHeight(), conv->GetOutputChannels()));
pooling = (ndBrainLayerConvolutionalMaxPooling_2d*)(layers[layers.GetCount() - 1]);

layers.PushBack(new ndBrainLayerConvolutional_2d(pooling->GetOutputWidth(), pooling->GetOutputHeight(), pooling->GetOutputChannels(), 3, 32));
conv = (ndBrainLayerConvolutional_2d*)(layers[layers.GetCount() - 1]);
layers.PushBack(new ndBrainLayerDropOut(layers[layers.GetCount() - 1]->GetOutputSize()));
layers.PushBack(new DIGIT_ACTIVATION_TYPE(conv->GetOutputSize()));
layers.PushBack(new ndBrainLayerConvolutionalMaxPooling_2d(conv->GetOutputWidth(), conv->GetOutputHeight(), conv->GetOutputChannels()));
pooling = (ndBrainLayerConvolutionalMaxPooling_2d*)(layers[layers.GetCount() - 1]);

layers.PushBack(new ndBrainLayerConvolutional_2d(pooling->GetOutputWidth(), pooling->GetOutputHeight(), pooling->GetOutputChannels(), 3, 32));
layers.PushBack(new ndBrainLayerDropOut(layers[layers.GetCount() - 1]->GetOutputSize()));
conv = (ndBrainLayerConvolutional_2d*)(layers[layers.GetCount() - 1]);
layers.PushBack(new DIGIT_ACTIVATION_TYPE(conv->GetOutputSize()));

#else

layers.PushBack(new ndBrainLayerLinear(trainingDigits->GetColumns(), neuronsPerLayers));
layers.PushBack(new ndBrainLayerDropOut(layers[layers.GetCount() - 1]->GetOutputSize()));
layers.PushBack(new ndBrainLayerTanhActivation(layers[layers.GetCount() - 1]->GetOutputSize()));
layers.PushBack(new DIGIT_ACTIVATION_TYPE(layers[layers.GetCount() - 1]->GetOutputSize()));

#endif

layers.PushBack(new ndBrainLayerLinear(layers[layers.GetCount() - 1]->GetOutputSize(), neuronsPerLayers));
layers.PushBack(new ndBrainLayerDropOut(layers[layers.GetCount() - 1]->GetOutputSize()));
layers.PushBack(new DIGIT_ACTIVATION_TYPE(layers[layers.GetCount() - 1]->GetOutputSize()));

layers.PushBack(new ndBrainLayerLinear(layers[layers.GetCount() - 1]->GetOutputSize(), neuronsPerLayers));
layers.PushBack(new ndBrainLayerDropOut(layers[layers.GetCount() - 1]->GetOutputSize()));
layers.PushBack(new DIGIT_ACTIVATION_TYPE(layers[layers.GetCount() - 1]->GetOutputSize()));

layers.PushBack(new ndBrainLayerLinear(layers[layers.GetCount() - 1]->GetOutputSize(), trainingLabels->GetColumns()));
Expand Down
1 change: 0 additions & 1 deletion newton-4.00/sdk/dBrain/ndBrainInc.h
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,6 @@
#include <ndBrainOptimizer.h>
#include <ndBrainThreadPool.h>
#include <ndBrainLayerLinear.h>
#include <ndBrainLayerDropOut.h>
#include <ndBrainReplayBuffer.h>
#include <ndBrainOptimizerSgd.h>
#include <ndBrainOptimizerAdam.h>
Expand Down
5 changes: 5 additions & 0 deletions newton-4.00/sdk/dBrain/ndBrainLayerConvolutional_2d.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -231,6 +231,11 @@ void ndBrainLayerConvolutional_2d::AdamUpdate(const ndBrainLayer& u, const ndBra
}
}

void ndBrainLayerConvolutional_2d::UpdateDropOut()
{
ndAssert(0);
}

void ndBrainLayerConvolutional_2d::MakePrediction(const ndBrainVector& input, ndBrainVector& output) const
{
ndAssert(input.GetCount() == GetInputSize());
Expand Down
1 change: 1 addition & 0 deletions newton-4.00/sdk/dBrain/ndBrainLayerConvolutional_2d.h
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@ class ndBrainLayerConvolutional_2d : public ndBrainLayer
virtual const char* GetLabelId() const;
virtual void Blend(const ndBrainLayer& src, ndBrainFloat blend);

virtual void UpdateDropOut();
virtual void InitWeightsXavierMethod();
virtual void InitWeights(ndBrainFloat weighVariance, ndBrainFloat biasVariance);
virtual void AdamUpdate(const ndBrainLayer& u, const ndBrainLayer& v, ndBrainFloat epsilon);
Expand Down
105 changes: 0 additions & 105 deletions newton-4.00/sdk/dBrain/ndBrainLayerDropOut.cpp

This file was deleted.

47 changes: 0 additions & 47 deletions newton-4.00/sdk/dBrain/ndBrainLayerDropOut.h

This file was deleted.

1 change: 0 additions & 1 deletion newton-4.00/sdk/dBrain/ndBrainLayerLeakyReluActivation.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,6 @@ ndBrainLayer* ndBrainLayerLeakyReluActivation::Load(const ndBrainLoad* const loa
char buffer[1024];
loadSave->ReadString(buffer);

ndAssert(0);
loadSave->ReadString(buffer);
ndInt32 inputs = loadSave->ReadInt();

Expand Down
11 changes: 11 additions & 0 deletions newton-4.00/sdk/dBrain/ndBrainLayerLinear.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -28,14 +28,20 @@ ndBrainLayerLinear::ndBrainLayerLinear(ndInt32 inputs, ndInt32 outputs)
:ndBrainLayer()
,m_bias()
,m_weights(outputs, inputs)
,m_dropOut()
,m_dropOutProbability(ndBrainFloat(1.0f))
{
m_dropOut.SetCount(outputs);
m_bias.SetCount(outputs);
m_dropOut.Set(ndBrainFloat(1.0f));
}

ndBrainLayerLinear::ndBrainLayerLinear(const ndBrainLayerLinear& src)
:ndBrainLayer(src)
,m_bias(src.m_bias)
,m_weights(src.m_weights)
,m_dropOut(src.m_dropOut)
,m_dropOutProbability(src.m_dropOutProbability)
{
}

Expand Down Expand Up @@ -253,6 +259,11 @@ ndBrainLayer* ndBrainLayerLinear::Load(const ndBrainLoad* const loadSave)
return layer;
}

void ndBrainLayerLinear::UpdateDropOut()
{
ndAssert(0);
}

void ndBrainLayerLinear::MakePrediction(const ndBrainVector& input, ndBrainVector& output) const
{
m_weights.Mul(input, output);
Expand Down
3 changes: 3 additions & 0 deletions newton-4.00/sdk/dBrain/ndBrainLayerLinear.h
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ class ndBrainLayerLinear : public ndBrainLayer
virtual ndBrainVector* GetBias();
virtual ndBrainMatrix* GetWeights();

virtual void UpdateDropOut();
virtual void InitWeightsXavierMethod();
virtual void InitWeights(ndBrainFloat weighVariance, ndBrainFloat biasVariance);

Expand Down Expand Up @@ -72,6 +73,8 @@ class ndBrainLayerLinear : public ndBrainLayer

ndBrainVector m_bias;
ndBrainMatrix m_weights;
ndBrainVector m_dropOut;
ndBrainFloat m_dropOutProbability;
};


Expand Down
5 changes: 0 additions & 5 deletions newton-4.00/sdk/dBrain/ndBrainSaveLoad.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@
#include "ndBrain.h"
#include "ndBrainSaveLoad.h"
#include "ndBrainLayerLinear.h"
#include "ndBrainLayerDropOut.h"
#include "ndBrainLayerReluActivation.h"
#include "ndBrainLayerTanhActivation.h"
#include "ndBrainLayerConvolutional_2d.h"
Expand Down Expand Up @@ -104,10 +103,6 @@ ndBrain* ndBrainLoad::Load() const
{
layer = ndBrainLayerLinear::Load(this);
}
if (!strcmp(layerType, "ndBrainLayerDropOut"))
{
layer = ndBrainLayerDropOut::Load(this);
}
else if (!strcmp(layerType, "ndBrainLayerReluActivation"))
{
layer = ndBrainLayerReluActivation::Load(this);
Expand Down

0 comments on commit 5518d7a

Please sign in to comment.