Skip to content
Snippets Groups Projects
Commit 64b78c68 authored by rtalbi's avatar rtalbi
Browse files

non-privacy presrerving neural networks

parent 8a133300
No related branches found
No related tags found
No related merge requests found
This diff is collapsed.
...@@ -58,13 +58,13 @@ NN::NN(double alpha, int epochs, int batchSize, float th, DatasetReader *dt, str ...@@ -58,13 +58,13 @@ NN::NN(double alpha, int epochs, int batchSize, float th, DatasetReader *dt, str
} }
vector<float> NN::forward_layer(vector<neuron*> layer, vector<float> x, bool test ){ vector<vector<float>> NN::forward_layer(vector<neuron*> layer, vector<vector<float>> x, bool test ){
vector<float> res; vector<vector<float>> res;
for (int j=0; j < layer.size(); j++) for (int j=0; j < layer.size(); j++)
{ {
neuron *n = layer[j]; neuron *n = layer[j];
res.push_back(n->predict(x,test)); res.push_back(n->predict_batch(x,test));
} }
return res; return res;
...@@ -72,36 +72,55 @@ vector<float> NN::forward_layer(vector<neuron*> layer, vector<float> x, bool tes ...@@ -72,36 +72,55 @@ vector<float> NN::forward_layer(vector<neuron*> layer, vector<float> x, bool tes
} }
int NN::predict(Record *r, bool test ) { vector<int> NN::predict(vector<Record *>R, bool test ) {
vector<float> x = vector<float> (r->values.begin(), r->values.end()); vector<vector<float>> XB;
for (int i=0; i < R.size(); i++)
{
Record *r = R[i];
vector<float> x = vector<float> (r->values.begin(), r->values.end());
XB.push_back(x);
}
for (int i=0; i < network.size(); i++) for (int i=0; i < network.size(); i++)
{ {
x = forward_layer(network[i], x, test ); XB = forward_layer(network[i], XB, test );
} }
float max = -1.0;
int argmax =0; vector<int> res;
for (int j=0; j < x.size(); j++)
for (int j=0; j < XB.size(); j++)
{ {
if (x[j]>max)
{ vector<float> x = XB[j];
max = x[j]; float max = -1.0;
argmax = j; int argmax =0;
} for (int k=0; k < x.size(); k++) {
if (x[k]>max)
{
max = x[k];
argmax = k;
}
}
res.push_back(argmax);
} }
return argmax;
return res;
} }
vector<vector<float>> NN::backpropagate_layer(vector<neuron*> layer, vector<vector<float>> XB,vector<vector<float>> ytrue) { vector<vector<float>> NN::backpropagate_layer(vector<neuron*> layer, vector<vector<float>> ytrue) {
vector<vector<float>> new_output_layer; vector<vector<float>> new_output_layer;
for(int i=0; i < ytrue.size(); i++) for(int i=0; i < ytrue.size(); i++)
{ {
vector<vector<float>> XB = layer[i]->previous_input;
layer[i]->train(XB, ytrue[i]); layer[i]->train(XB, ytrue[i]);
vector <float > new_output_neuron = layer[i]->new_output; vector <float > new_output_neuron = layer[i]->new_output;
new_output_layer.push_back(new_output_neuron); new_output_layer.push_back(new_output_neuron);
...@@ -110,9 +129,11 @@ vector<vector<float>> NN::backpropagate_layer(vector<neuron*> layer, vector<vect ...@@ -110,9 +129,11 @@ vector<vector<float>> NN::backpropagate_layer(vector<neuron*> layer, vector<vect
} }
void NN::backpropagate(vector<Record *> XB){ void NN::backpropagate(vector<Record *> XB){
forward(XB); //todo define function for forwarding a batch of records vector<int> prediction = predict(XB, false);
vector<vector<float>> R; vector<vector<float>> R;
vector<vector<float>>ytrue; vector<vector<float>>ytrue;
int dim = XB[0]->values.size()-1; int dim = XB[0]->values.size()-1;
...@@ -133,7 +154,7 @@ void NN::backpropagate(vector<Record *> XB){ ...@@ -133,7 +154,7 @@ void NN::backpropagate(vector<Record *> XB){
for(int j= network.size()-1; j>=0; j-- ) for(int j= network.size()-1; j>=0; j-- )
{ {
vector<vector<float>> new_output_layer = backpropagate_layer(network[j],ytrue); //todo remove the record from this method and replace it with the previous input resulting from the forward vector<vector<float>> new_output_layer = backpropagate_layer(network[j],ytrue);
ytrue = new_output_layer; ytrue = new_output_layer;
} }
...@@ -145,6 +166,7 @@ void NN::train () // ...@@ -145,6 +166,7 @@ void NN::train () //
int sizeBatch=batchSize; int sizeBatch=batchSize;
int size = dt->train_size; int size = dt->train_size;
Record * record; Record * record;
vector<Record*> XB;
extTrainBd = 0; extTrainBd = 0;
map<int, vector <Record*>> workerBatches; map<int, vector <Record*>> workerBatches;
...@@ -153,6 +175,41 @@ void NN::train () // ...@@ -153,6 +175,41 @@ void NN::train () //
for (int epochCpt = 0; epochCpt < epochs ; epochCpt ++ ) { for (int epochCpt = 0; epochCpt < epochs ; epochCpt ++ ) {
while (counter < size) {
if (size - counter < batchSize)
sizeBatch = size - counter;
for (recordCounter = 0; recordCounter < sizeBatch; recordCounter++) {
try {
record = dt->getTrainRecord();
XB.push_back(record);
extTrainBd += record->values.size() + 1;
counter++;
}
catch (std::exception const &e) {
cout << e.what() << endl;
}
}
backpropagate(XB);
for (int i = 0; i < XB.size(); i++) {
delete XB[i];
}
XB.clear();
}
counter = 0;
} }
...@@ -164,6 +221,7 @@ void NN::train () // ...@@ -164,6 +221,7 @@ void NN::train () //
} }
//todo : transform this method so that prediction happens for fa test batch
void NN::Test( ){ void NN::Test( ){
int counter =0; int counter =0;
...@@ -178,16 +236,6 @@ void NN::Test( ){ ...@@ -178,16 +236,6 @@ void NN::Test( ){
auto begin = chrono::high_resolution_clock::now(); auto begin = chrono::high_resolution_clock::now();
while (counter < size) { while (counter < size) {
try {
record = dt->getTestRecord();
//record->print();
extTestBd += sizeof(int)*record->values.size();
}
catch (std::exception const &e) {
//std::cout << "Exception: " << e.what() << "\n";
}
counter++; counter++;
...@@ -207,7 +255,7 @@ void NN::Test( ){ ...@@ -207,7 +255,7 @@ void NN::Test( ){
this->testTime = duration.count(); //- removeTime; this->testTime = duration.count(); //- removeTime;
cout << this->testTime << endl; cout << this->testTime << endl;
classOutput.close(); classOutput.close();
} //todo use the forward function here }
...@@ -49,17 +49,18 @@ public : ...@@ -49,17 +49,18 @@ public :
NN (double alpha, int epochs, int batchSize, float th, DatasetReader * dt, string logfile, bool debug, string mainpath); NN (double alpha, int epochs, int batchSize, float th, DatasetReader * dt, string logfile, bool debug, string mainpath);
public : public :
int predict (Record *r, bool test); vector<int> predict(vector<Record *>R, bool test );
public : public :
vector<vector<float>> backpropagate_layer(vector<neuron*> layer, vector<vector<float>> XB,vector<vector<float>> ytrue); vector<vector<float>> backpropagate_layer(vector<neuron*> layer, vector<vector<float>> ytrue);
public : public :
void backpropagate(vector<Record *> XB); void backpropagate(vector<Record *> XB);
public : public :
vector<float> forward_layer(vector<neuron*> layer, vector<float> x, bool test); vector<vector<float>> forward_layer(vector<neuron*> layer, vector<vector<float>> x, bool test );
public : public :
void train (); void train ();
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment