diff --git a/ML/NN/NN.cpp b/ML/NN/NN.cpp
index 85780c411e965b1647ccc1bfa4c56da4db9916b0..248c82f099ae2633832698150a1db6e7e9dcbef3 100644
--- a/ML/NN/NN.cpp
+++ b/ML/NN/NN.cpp
@@ -6,7 +6,7 @@
 
 
 NN::NN(double alpha, int epochs, int batchSize, float th, DatasetReader *dt, string logfile,
-       bool debug, string mainpath, int sgdWorkers) {
+       bool debug, string mainpath) {
 
 
     trainTime =0.0;
@@ -97,17 +97,50 @@ int NN::predict(Record *r, bool test ) {
 
 
 
-vector<float> backpropagate_layer(vector<float> XB) {
+vector<vector<float>> NN::backpropagate_layer(vector<neuron*> layer, vector<vector<float>> XB,vector<vector<float>> ytrue) {
 
-
-    vector <float > diff;
-    return diff;
+    vector<vector<float>> new_output_layer;
+    for(int i=0; i < ytrue.size(); i++)
+    {
+        layer[i]->train(XB, ytrue[i]);
+        vector <float > new_output_neuron = layer[i]->new_output;
+        new_output_layer.push_back(new_output_neuron);
+    }
+    return new_output_layer;
 
 }
 
-void NN::train ()
+void NN::backpropagate(vector<Record *> XB){
+
+    forward(XB); //todo define function for forwarding a batch of records
+    vector<vector<float>> R;
+    vector<vector<float>>ytrue;
+    int dim = XB[0]->values.size()-1;
+    for(int i=0; i<XB.size(); i++)
+    {
+
+        vector<float> r =  vector<float> (XB[i]->values.begin(), XB[i]->values.end());
+        r.pop_back();
+        R.push_back(r);
+        std::vector<float> hot_label(2); // hard coded the number of classes
+        hot_label[XB[i]->values[dim]] =1 ;
+        hot_label[1-XB[i]->values[dim]] =0;
+        ytrue.push_back(hot_label);
+
+    }
+
+
+
+    for(int j= network.size()-1; j>=0; j-- )
+    {
+        vector<vector<float>> new_output_layer = backpropagate_layer(network[j],ytrue); //todo remove the record from this method and replace it with the previous input resulting from the forward
+        ytrue = new_output_layer;
+    }
+
+}
+void NN::train () //
 {
-    int counter =0;
+    int counter =0; // use the backprpagation function here
     int recordCounter =0;
     int sizeBatch=batchSize;
     int size = dt->train_size;
@@ -174,7 +207,7 @@ void NN::Test( ){
     this->testTime = duration.count(); //- removeTime;
     cout << this->testTime << endl;
     classOutput.close();
-}
+} //todo use the forward function here
 
 
 
diff --git a/ML/NN/NN.h b/ML/NN/NN.h
index c5cb835f7bc3d84837f30e01b5c940f0c42aa255..6b260024fbc9d002619ebbbcfed4e2cc9802c559 100644
--- a/ML/NN/NN.h
+++ b/ML/NN/NN.h
@@ -52,7 +52,11 @@ public :
     int predict (Record *r, bool test);
 
 public :
-    vector<float> backpropagate_layer(vector<float> XB);
+    vector<vector<float>> backpropagate_layer(vector<neuron*> layer, vector<vector<float>> XB,vector<vector<float>> ytrue);
+
+
+public :
+    void backpropagate(vector<Record *> XB);
 
 public :
     vector<float> forward_layer(vector<neuron*> layer, vector<float> x, bool test);