Skip to content

Commit

Permalink
added backendfiles
Browse files Browse the repository at this point in the history
  • Loading branch information
balamma committed Dec 3, 2021
1 parent f6641ea commit 506b1f2
Show file tree
Hide file tree
Showing 1,406 changed files with 237,922 additions and 0 deletions.
12 changes: 12 additions & 0 deletions experiment/simulation/backend/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
FROM ubuntu:18.04

RUN apt update && \
apt install -y octave python3 python3-pip

RUN octave --eval 'pkg install -forge nnet'

COPY . /

RUN pip3 install -r requirements.txt

CMD python3 server.py
117 changes: 117 additions & 0 deletions experiment/simulation/backend/exp_clnn/clnnMappingCore.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,117 @@
function [] = clnnMappingCore(iterationStepSize)
global filename;

%X: Matrix of input vectors (N X d):
% Each input vector is a row vector.
%N: Number of input vectors.
%d: Dimension of each input vector.

%gridLength: Number of neurons per side (assuming a square grid of neurons).
%numIterations: Total number of iterations / time-steps

% SOM/Kohonen map K neurons arranged in a square LxL layout (K = L^2)
% mapping a square

load(strcat(filename, ".dat"));
maxIterationIndex = min(currentIterationIndex+iterationStepSize-1, numIterations);
currentIterationIndex
maxIterationIndex

% For each iteration
for i=currentIterationIndex:maxIterationIndex
ri=[];
disp('Iteration number:')
disp(i)

rnd = randperm(N);

% For N input vectors.
for p=1:N
j=rnd(p);

% Compute distance.
dist = (W - repmat(X(j,:),K,1) ).^2 ;
tsumdist = sum(dist,2);

% Find winning neuron.
[mindist ind] = min(tsumdist);

% 2D index of winning neuron.
ri(j,1) = I(ind);
ri(j,2) = J(ind);


% Distance of other neurons from the winning neuron.
dist = 1/(sqrt(2*piConst)*sigT).*exp( sum(( ([I( : ), J( : )]- repmat([ri(j,1), ri(j,2)], K,1)) .^2) ,2)/(-2*sigT)) * etaT;


W = W + repmat(dist(:),1,d).*(repmat(X(j,:),K,1) - W);

% Clear temp variables.
clear j dist tsumdist mindist ind;

end

clear ri rnd;
% Update neighbourhood function.
sigT = sig0*exp(-i/tau1);

% Update the learning rate.
etaT = eta0*exp(-i/tau2);
end

currentIterationIndex = i+1;
clear iterationStepSize;

% Connecting the adjacent nodes in the 2D map and plotting.
%figure;
axes('FontSize', 25);

subplot(1,2,1);
plot(X(:,1), X(:,2), 'ko', "markersize", 15);
xlabel('x-coordinate of data');
ylabel('y-coordinate of data');
title('Data distribution');
xlim([-0.1 1.1]);
ylim([-0.1 1.1]);
axis 'equal'

subplot(1,2,2);
for k=1:K
wdist = indx - repmat(indx(k,:),K,1);
wdistSqr = sum(wdist.*wdist, 2);
neighbourIndex = find(wdistSqr == 1);

%k
%neighbourIndex
%pause
numNeighbours = length(neighbourIndex);

for kk=1:numNeighbours
wtemp(1,:) = W(k,:);
ind = neighbourIndex(kk);
wtemp(2,:) = W(ind,:);
plot(wtemp(:,1), wtemp(:,2), 'k');
hold on;
clear wtemp ind;
end
end

clear k wdist wdistSqr neighbourIndex numNeighbours kk wtemp ind;
k=currentIterationIndex-1;
plot(W(:,1), W(:,2), 'r*', "markersize", 15);
hold off;
xlabel('x-coordinate of weight vector');
ylabel('y-coordinate of weight vector');
title(['SOM after iteration #' num2str(k)]);
xlim([-0.1 1.1]);
ylim([-0.1 1.1]);
axis 'equal'


% Save figure;
print(strcat(filename, ".png"));

% Save state of variables.
save(strcat(filename, ".dat"));

143 changes: 143 additions & 0 deletions experiment/simulation/backend/exp_clnn/clnn_mapping_2D_2D.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,143 @@
function [] = clnn_mapping_2D_2D(X, gridLength, numIterations)

%X: Matrix of input vectors (N X d):
% Each input vector is a row vector.
%N: Number of input vectors.
%d: Dimension of each input vector.

%gridLength: Number of neurons per side (assuming a square grid of neurons).
%numIterations: Total number of iterations / time-steps


% SOM/Kohonen map K neurons arranged in a square LxL layout (K = L^2)
% mapping a square



[N,d] = size(X);
X = X/max(max(abs(X)));
L = gridLength;
K = L^2;
T = numIterations;


figure;
plot(X(:,1), X(:,2), 'k*');
pause



%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% Variables and setup

pi = 3.1416;

% Initialize weights
% Each weight vector is a row vector.
% Weight index is a number, which can be converted into (i,j) form.
W = rand(K,d);

% Neuron index in (i,j) form.
[I,J] = ind2sub([L, L], 1:K);

indx(:,1) = I(:);
indx(:,2) = J(:);


% Number of neighbourhood neurons which need to be updated.
sig0 = floor(K/5);

% This number should be updated after every iteration/epoch.
sigT = sig0;

% Constant for updating sigT.
tau1 = T;

% Learning rate.
eta0 = 0.1;
etaT = eta0;

% Constant for updating etaT.
tau2 = 2*T;

% dT: Number of iterations/time-steps for which plot needs to be shown once.
% For example, if dT=10, then plot is generated once in every 10 iterations.
dT = 20;



% For each iteration
for i=1:T
ri=[];
disp('Iteration number:')
disp(i)

rnd = randperm(N);

% For N input vectors.
for p=1:N
j=rnd(p);

% Compute distance.
dist = (W - repmat(X(j,:),K,1) ).^2 ;
tsumdist = sum(dist,2);

% Find winning neuron.
[mindist ind] = min(tsumdist);

% 2D index of winning neuron.
ri(j,1) = I(ind);
ri(j,2) = J(ind);


% Distance of other neurons from the winning neuron.
dist = 1/(sqrt(2*pi)*sigT).*exp( sum(( ([I( : ), J( : )]- repmat([ri(j,1), ri(j,2)], K,1)) .^2) ,2)/(-2*sigT)) * etaT;


W = W + repmat(dist(:),1,d).*(repmat(X(j,:),K,1) - W);

% Plotting weights


end

% Update neighbourhood function.
sigT = sig0*exp(-i/tau1);

% Update the learning rate.
etaT = eta0*exp(-i/tau2);

if mod(i,dT) == dT-1
for k=1:K
wdist = indx - repmat(indx(k,:),K,1);
wdistSqr = sum(wdist.*wdist, 2);
neighbourIndex = find(wdistSqr == 1);

%k
%neighbourIndex
%pause
numNeighbours = length(neighbourIndex);

for kk=1:numNeighbours
wtemp(1,:) = W(k,:);
ind = neighbourIndex(kk);
wtemp(2,:) = W(ind,:);
plot(wtemp(:,1), wtemp(:,2), 'k');
hold on;
clear wtemp ind;
end
end
plot(W(:,1), W(:,2), 'r*');
hold off;
pause
end

% Plotting weights
%if mod(i,dT) == dT-1
% labels = num2str(C(:));
% text(ri(:,1), ri(:,2), labels);
% pause
%end
end


Loading

0 comments on commit 506b1f2

Please sign in to comment.