Skip to content
Snippets Groups Projects
Commit 8a133300 authored by rtalbi's avatar rtalbi
Browse files

non-privacy presrerving neural networks

parent aed036a8
No related branches found
No related tags found
No related merge requests found
...@@ -6,7 +6,7 @@ ...@@ -6,7 +6,7 @@
NN::NN(double alpha, int epochs, int batchSize, float th, DatasetReader *dt, string logfile, NN::NN(double alpha, int epochs, int batchSize, float th, DatasetReader *dt, string logfile,
bool debug, string mainpath, int sgdWorkers) { bool debug, string mainpath) {
trainTime =0.0; trainTime =0.0;
...@@ -97,17 +97,50 @@ int NN::predict(Record *r, bool test ) { ...@@ -97,17 +97,50 @@ int NN::predict(Record *r, bool test ) {
vector<float> backpropagate_layer(vector<float> XB) { vector<vector<float>> NN::backpropagate_layer(vector<neuron*> layer, vector<vector<float>> XB,vector<vector<float>> ytrue) {
vector<vector<float>> new_output_layer;
vector <float > diff; for(int i=0; i < ytrue.size(); i++)
return diff; {
layer[i]->train(XB, ytrue[i]);
vector <float > new_output_neuron = layer[i]->new_output;
new_output_layer.push_back(new_output_neuron);
}
return new_output_layer;
} }
void NN::train () void NN::backpropagate(vector<Record *> XB){
forward(XB); //todo define function for forwarding a batch of records
vector<vector<float>> R;
vector<vector<float>>ytrue;
int dim = XB[0]->values.size()-1;
for(int i=0; i<XB.size(); i++)
{
vector<float> r = vector<float> (XB[i]->values.begin(), XB[i]->values.end());
r.pop_back();
R.push_back(r);
std::vector<float> hot_label(2); // hard coded the number of classes
hot_label[XB[i]->values[dim]] =1 ;
hot_label[1-XB[i]->values[dim]] =0;
ytrue.push_back(hot_label);
}
for(int j= network.size()-1; j>=0; j-- )
{
vector<vector<float>> new_output_layer = backpropagate_layer(network[j],ytrue); //todo remove the record from this method and replace it with the previous input resulting from the forward
ytrue = new_output_layer;
}
}
void NN::train () //
{ {
int counter =0; int counter =0; // use the backprpagation function here
int recordCounter =0; int recordCounter =0;
int sizeBatch=batchSize; int sizeBatch=batchSize;
int size = dt->train_size; int size = dt->train_size;
...@@ -174,7 +207,7 @@ void NN::Test( ){ ...@@ -174,7 +207,7 @@ void NN::Test( ){
this->testTime = duration.count(); //- removeTime; this->testTime = duration.count(); //- removeTime;
cout << this->testTime << endl; cout << this->testTime << endl;
classOutput.close(); classOutput.close();
} } //todo use the forward function here
...@@ -52,7 +52,11 @@ public : ...@@ -52,7 +52,11 @@ public :
int predict (Record *r, bool test); int predict (Record *r, bool test);
public : public :
vector<float> backpropagate_layer(vector<float> XB); vector<vector<float>> backpropagate_layer(vector<neuron*> layer, vector<vector<float>> XB,vector<vector<float>> ytrue);
public :
void backpropagate(vector<Record *> XB);
public : public :
vector<float> forward_layer(vector<neuron*> layer, vector<float> x, bool test); vector<float> forward_layer(vector<neuron*> layer, vector<float> x, bool test);
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment