From 8a133300c260e836a6424ab240c82e8434b3aa68 Mon Sep 17 00:00:00 2001
From: rtalbi <dr_talbi@esi.dz>
Date: Fri, 17 Dec 2021 17:29:26 +0100
Subject: [PATCH] non-privacy presrerving neural networks

---
 ML/NN/NN.cpp | 49 +++++++++++++++++++++++++++++++++++++++++--------
 ML/NN/NN.h   |  6 +++++-
 2 files changed, 46 insertions(+), 9 deletions(-)

diff --git a/ML/NN/NN.cpp b/ML/NN/NN.cpp
index 85780c41..248c82f0 100644
--- a/ML/NN/NN.cpp
+++ b/ML/NN/NN.cpp
@@ -6,7 +6,7 @@
 
 
 NN::NN(double alpha, int epochs, int batchSize, float th, DatasetReader *dt, string logfile,
-       bool debug, string mainpath, int sgdWorkers) {
+       bool debug, string mainpath) {
 
 
     trainTime =0.0;
@@ -97,17 +97,50 @@ int NN::predict(Record *r, bool test ) {
 
 
 
-vector<float> backpropagate_layer(vector<float> XB) {
+vector<vector<float>> NN::backpropagate_layer(vector<neuron*> layer, vector<vector<float>> XB,vector<vector<float>> ytrue) {
 
-
-    vector <float > diff;
-    return diff;
+    vector<vector<float>> new_output_layer;
+    for(int i=0; i < ytrue.size(); i++)
+    {
+        layer[i]->train(XB, ytrue[i]);
+        vector <float > new_output_neuron = layer[i]->new_output;
+        new_output_layer.push_back(new_output_neuron);
+    }
+    return new_output_layer;
 
 }
 
-void NN::train ()
+void NN::backpropagate(vector<Record *> XB){
+
+    forward(XB); //todo define function for forwarding a batch of records
+    vector<vector<float>> R;
+    vector<vector<float>>ytrue;
+    int dim = XB[0]->values.size()-1;
+    for(int i=0; i<XB.size(); i++)
+    {
+
+        vector<float> r =  vector<float> (XB[i]->values.begin(), XB[i]->values.end());
+        r.pop_back();
+        R.push_back(r);
+        std::vector<float> hot_label(2); // hard coded the number of classes
+        hot_label[XB[i]->values[dim]] =1 ;
+        hot_label[1-XB[i]->values[dim]] =0;
+        ytrue.push_back(hot_label);
+
+    }
+
+
+
+    for(int j= network.size()-1; j>=0; j-- )
+    {
+        vector<vector<float>> new_output_layer = backpropagate_layer(network[j],ytrue); //todo remove the record from this method and replace it with the previous input resulting from the forward
+        ytrue = new_output_layer;
+    }
+
+}
+void NN::train () //
 {
-    int counter =0;
+    int counter =0; // use the backprpagation function here
     int recordCounter =0;
     int sizeBatch=batchSize;
     int size = dt->train_size;
@@ -174,7 +207,7 @@ void NN::Test( ){
     this->testTime = duration.count(); //- removeTime;
     cout << this->testTime << endl;
     classOutput.close();
-}
+} //todo use the forward function here
 
 
 
diff --git a/ML/NN/NN.h b/ML/NN/NN.h
index c5cb835f..6b260024 100644
--- a/ML/NN/NN.h
+++ b/ML/NN/NN.h
@@ -52,7 +52,11 @@ public :
     int predict (Record *r, bool test);
 
 public :
-    vector<float> backpropagate_layer(vector<float> XB);
+    vector<vector<float>> backpropagate_layer(vector<neuron*> layer, vector<vector<float>> XB,vector<vector<float>> ytrue);
+
+
+public :
+    void backpropagate(vector<Record *> XB);
 
 public :
     vector<float> forward_layer(vector<neuron*> layer, vector<float> x, bool test);
-- 
GitLab