?? lineardiscriminanttraining.cpp
字號:
// LinearDiscriminantTraining.cpp: implementation of the LinearDiscriminantTraining class.
//CHEN ChangFeng's training class
//////////////////////////////////////////////////////////////////////
#include "LinearDiscriminantTraining.h"
#include "FeatureSelection.h"
#include "KNNDiscriminantTraining.h"
#include <fstream>
#include <iostream>
#include <vector>
#include <time.h>
using namespace std;
//////////////////////////////////////////////////////////////////////
// Construction/Destruction
//////////////////////////////////////////////////////////////////////
LinearDiscriminantTraining::LinearDiscriminantTraining()
{
int i;
FeatureSelection f;
f.OpenSampleFile("digit0.dat");
f.CharacVectorGeneration();
for(i=0;i<(f.trainingGroup).size();i++)
trainingGroup0.push_back(f.trainingGroup[i]);
for(i=0;i<(f.testGroup).size();i++)
testGroup0.push_back(f.testGroup[i]);
for(i=0;i<f.trainingGroup[0].size();i++)
w0.push_back(1);
f.OpenSampleFile("digit1.dat");
f.CharacVectorGeneration();
for(i=0;i<(f.trainingGroup).size();i++)
trainingGroup1.push_back(f.trainingGroup[i]);
for(i=0;i<(f.testGroup).size();i++)
testGroup1.push_back(f.testGroup[i]);
for(i=0;i<f.trainingGroup[0].size();i++)
w1.push_back(1);
f.OpenSampleFile("digit4.dat");
f.CharacVectorGeneration();
for(i=0;i<(f.trainingGroup).size();i++)
trainingGroup4.push_back(f.trainingGroup[i]);
for(i=0;i<(f.testGroup).size();i++)
testGroup4.push_back(f.testGroup[i]);
for(i=0;i<f.trainingGroup[0].size();i++)
w4.push_back(1);
f.OpenSampleFile("digit5.dat");
f.CharacVectorGeneration();
for(i=0;i<(f.trainingGroup).size();i++)
trainingGroup5.push_back(f.trainingGroup[i]);
for(i=0;i<(f.testGroup).size();i++)
testGroup5.push_back(f.testGroup[i]);
for(i=0;i<f.trainingGroup[0].size();i++)
w5.push_back(1);
f.OpenSampleFile("digit6.dat");
f.CharacVectorGeneration();
for(i=0;i<(f.trainingGroup).size();i++)
trainingGroup6.push_back(f.trainingGroup[i]);
for(i=0;i<(f.testGroup).size();i++)
testGroup6.push_back(f.testGroup[i]);
for(i=0;i<f.trainingGroup[0].size();i++)
w6.push_back(1);
f.OpenSampleFile("digit7.dat");
f.CharacVectorGeneration();
for(i=0;i<(f.trainingGroup).size();i++)
trainingGroup7.push_back(f.trainingGroup[i]);
for(i=0;i<(f.testGroup).size();i++)
testGroup7.push_back(f.testGroup[i]);
for(i=0;i<f.trainingGroup[0].size();i++)
w7.push_back(1);
f.OpenSampleFile("digit8.dat");
f.CharacVectorGeneration();
for(i=0;i<(f.trainingGroup).size();i++)
trainingGroup8.push_back(f.trainingGroup[i]);
for(i=0;i<(f.testGroup).size();i++)
testGroup8.push_back(f.testGroup[i]);
for(i=0;i<f.trainingGroup[0].size();i++)
w8.push_back(1);
f.OpenSampleFile("digit9.dat");
f.CharacVectorGeneration();
for(i=0;i<(f.trainingGroup).size();i++)
trainingGroup9.push_back(f.trainingGroup[i]);
for(i=0;i<(f.testGroup).size();i++)
testGroup9.push_back(f.testGroup[i]);
for(i=0;i<f.trainingGroup[0].size();i++)
w9.push_back(1);
classResult.open("RecongnizeResult.txt");
}
LinearDiscriminantTraining::~LinearDiscriminantTraining()
{
}
int LinearDiscriminantTraining::FeatureVectorProcess()
{
int i,j;
double h;
for(i=0;i<trainingGroup0.size();i++)
{
h =0;
for(j=1;j<trainingGroup0[i].size()-2;j++)
{
h += trainingGroup0[i][j];
}
for(j=1;j<trainingGroup0[i].size()-2;j++)
{
trainingGroup0[i][j] = trainingGroup0[i][j]/h;
}
}
for(i=0;i<trainingGroup1.size();i++)
{
h =0;
for(j=1;j<trainingGroup1[i].size()-2;j++)
{
h += trainingGroup1[i][j];
}
for(j=1;j<trainingGroup1[i].size()-2;j++)
{
trainingGroup1[i][j] = trainingGroup1[i][j]/h;
}
}
for(i=0;i<trainingGroup4.size();i++)
{
h =0;
for(j=1;j<trainingGroup4[i].size()-2;j++)
{
h += trainingGroup4[i][j];
}
for(j=1;j<trainingGroup4[i].size()-2;j++)
{
trainingGroup4[i][j] = trainingGroup4[i][j]/h;
}
}
for(i=0;i<trainingGroup5.size();i++)
{
h =0;
for(j=1;j<trainingGroup5[i].size()-2;j++)
{
h += trainingGroup0[i][j];
}
for(j=1;j<trainingGroup5[i].size()-2;j++)
{
trainingGroup5[i][j] = trainingGroup5[i][j]/h;
}
}
for(i=0;i<trainingGroup6.size();i++)
{
h =0;
for(j=1;j<trainingGroup6[i].size()-2;j++)
{
h += trainingGroup6[i][j];
}
for(j=1;j<trainingGroup6[i].size()-2;j++)
{
trainingGroup6[i][j] = trainingGroup6[i][j]/h;
}
}
for(i=0;i<trainingGroup7.size();i++)
{
h =0;
for(j=1;j<trainingGroup7[i].size()-2;j++)
{
h += trainingGroup7[i][j];
}
for(j=1;j<trainingGroup7[i].size()-2;j++)
{
trainingGroup7[i][j] = trainingGroup7[i][j]/h;
}
}
for(i=0;i<trainingGroup8.size();i++)
{
h =0;
for(j=1;j<trainingGroup8[i].size()-2;j++)
{
h += trainingGroup8[i][j];
}
for(j=1;j<trainingGroup8[i].size()-2;j++)
{
trainingGroup8[i][j] = trainingGroup8[i][j]/h;
}
}
for(i=0;i<trainingGroup9.size();i++)
{
h =0;
for(j=1;j<trainingGroup9[i].size()-2;j++)
{
h += trainingGroup9[i][j];
}
for(j=1;j<trainingGroup9[i].size()-2;j++)
{
trainingGroup9[i][j] = trainingGroup9[i][j]/h;
}
}
return 0;
}
int LinearDiscriminantTraining::TrainingProcedure()
{
int f,i;
// FeatureVectorProcess();
while(f<8*TRAININGNUM)
{
f =0;
f +=TrainingSingleClass(trainingGroup0,0,w0);
f +=TrainingSingleClass(trainingGroup1,1,w1);
f +=TrainingSingleClass(trainingGroup4,4,w4);
f +=TrainingSingleClass(trainingGroup5,5,w5);
f +=TrainingSingleClass(trainingGroup6,6,w6);
f +=TrainingSingleClass(trainingGroup7,7,w7);
f +=TrainingSingleClass(trainingGroup8,8,w8);
f +=TrainingSingleClass(trainingGroup9,9,w9);
}
cout <<"Finish Training"<<endl<<endl;
for(i=0;i<w0.size();i++)
{
cout<<w0[i]<<" ";
}
cout << endl;
for(i=0;i<w1.size();i++)
{
cout<<w1[i]<<" ";
}
cout << endl;
for(i=0;i<w4.size();i++)
{
cout<<w4[i]<<" ";
}
cout << endl;
for(i=0;i<w5.size();i++)
{
cout<<w5[i]<<" ";
}
cout << endl;
for(i=0;i<w6.size();i++)
{
cout<<w6[i]<<" ";
}
cout << endl;
for(i=0;i<w7.size();i++)
{
cout<<w7[i]<<" ";
}
cout << endl;
for(i=0;i<w8.size();i++)
{
cout<<w8[i]<<" ";
}
cout << endl;
for(i=0;i<w9.size();i++)
{
cout<<w9[i]<<" ";
}
cout << endl;
return 0;
}
int LinearDiscriminantTraining::TrainingSingleClass(vector < vector<double> >& trainingGroup
,int class_sign,vector < double >& w)
{
int i,j,h,g,s,t,m,
n = 0;
vector< double >tmp;
for(i=0;i<trainingGroup.size();i++)
{
for(j=0,h=0;j<trainingGroup[i].size();j++)
{
g = trainingGroup[i][j]*w[j];
h += g;
}
for(j=0,s=0;j<trainingGroup[i].size();j++)
{
g = trainingGroup[i][j]*w0[j];
s += g;
}
tmp.push_back(s);
for(j=0,s=0;j<trainingGroup[i].size();j++)
{
g = trainingGroup[i][j]*w1[j];
s += g;
}
tmp.push_back(s);
for(j=0,s=0;j<trainingGroup[i].size();j++)
{
g = trainingGroup[i][j]*w4[j];
s += g;
}
tmp.push_back(s);
for(j=0,s=0;j<trainingGroup[i].size();j++)
{
g = trainingGroup[i][j]*w5[j];
s += g;
}
tmp.push_back(s);
for(j=0,s=0;j<trainingGroup[i].size();j++)
{
g = trainingGroup[i][j]*w6[j];
s += g;
}
tmp.push_back(s);
for(j=0,s=0;j<trainingGroup[i].size();j++)
{
g = trainingGroup[i][j]*w7[j];
s += g;
}
tmp.push_back(s);
for(j=0,s=0;j<trainingGroup[i].size();j++)
{
g = trainingGroup[i][j]*w8[j];
s += g;
}
tmp.push_back(s);
for(j=0,s=0;j<trainingGroup[i].size();j++)
{
g = trainingGroup[i][j]*w9[j];
s += g;
}
tmp.push_back(s);
m = 0;
for(t=0;t <tmp.size();t++)
{
if(h > tmp[t])
m++;
}
if(m <tmp.size()-1)
{
for(j=0;j<w.size();j++)
w[j] += trainingGroup[i][j]*STEP_LENGTH;
}
else n++;
tmp.clear();
}
return n;
}
double LinearDiscriminantTraining::TestingProcedure()
{
double h = 0;
double s;
time_t begin, end;
float TrainingTime =0, TestingTime =0;
begin=time(NULL); ///
TrainingProcedure();
end=time(NULL); ///
TrainingTime= difftime(begin,end); ///
begin=time(NULL);
s =TestingSingleClass(testGroup0,0);
end=time(NULL);
TestingTime += difftime(begin,end); ///
h +=s;
s = s/TESTNUM;
classResult <<"The reconginize precision of 0:"<<s<<" "<<endl;
begin =time(NULL);
s =TestingSingleClass(testGroup1,1);
end =time(NULL);
TestingTime += difftime(begin,end); ///
h +=s;
s = s/TESTNUM;
classResult <<"The reconginize precision of 1:"<<s<<" "<<endl;
begin =time(NULL);
s =TestingSingleClass(testGroup4,4);
end =time(NULL);
TestingTime += difftime(begin,end); ///
h +=s;
s = s/TESTNUM;
classResult <<"The reconginize precision of 4:"<<s<<" "<<endl;
begin =time(NULL);
s =TestingSingleClass(testGroup5,5);
end =time(NULL);
TestingTime += difftime(begin,end); ///
h +=s;
s =s/TESTNUM;
classResult <<"The reconginize precision of 5:"<<s<<" "<<endl;
begin =time(NULL);
s=TestingSingleClass(testGroup6,6);
end =time(NULL);
TestingTime += difftime(begin,end); ///
h +=s;
s =s/TESTNUM;
classResult <<"The reconginize precision of 6:"<<s<<" "<<endl;
begin =time(NULL);
s =TestingSingleClass(testGroup7,7);
end =time(NULL);
TestingTime += difftime(begin,end); ///
h +=s;
s =s/TESTNUM;
classResult <<"The reconginize precision of 7:"<<s<<" "<<endl;
begin =time(NULL);
s =TestingSingleClass(testGroup8,8);
end =time(NULL);
TestingTime += difftime(begin,end); ///
h +=s;
s =s/TESTNUM;
classResult <<"The reconginize precision of 8:"<<s<<" "<<endl;
begin =time(NULL);
s=TestingSingleClass(testGroup9,9);
end =time(NULL);
TestingTime += difftime(begin,end); ///
h +=s;
s =s/TESTNUM;
classResult <<"The reconginize precision of 9:"<<s<<" "<<endl;
classResult<<endl;
classResult <<"The total recongnize precision: ";
classResult << h/(8*TESTNUM)<<endl;
classResult <<"The training time is: "<<TrainingTime<<endl;
classResult <<"The test time is: "<<TestingTime<<endl;
classResult.close();
return h/(8*TESTNUM);
}
int LinearDiscriminantTraining::TestingSingleClass(vector < vector<double> >& trainingGroup
,int class_sign)
{
int i,j,m=0,s,h,g,get_figure = 0;
for(i=0;i<trainingGroup.size();i++)
{
h = 0;
for(j=0,s=0;j<trainingGroup[i].size();j++)
{
g = trainingGroup[i][j]*w0[j];
s += g;
}
if(h <= s)
{
h = s;
get_figure = 0;
}
for(j=0,s=0;j<trainingGroup[i].size();j++)
{
g = trainingGroup[i][j]*w1[j];
s += g;
}
if(h <= s)
{
h = s;
get_figure = 1;
}
for(j=0,s=0;j<trainingGroup[i].size();j++)
{
g = trainingGroup[i][j]*w4[j];
s += g;
}
if(h <= s)
{
h = s;
get_figure = 4;
}
for(j=0,s=0;j<trainingGroup[i].size();j++)
{
g = trainingGroup[i][j]*w5[j];
s += g;
}
if(h <= s)
{
h = s;
get_figure = 5;
}
for(j=0,s=0;j<trainingGroup[i].size();j++)
{
g = trainingGroup[i][j]*w6[j];
s += g;
}
if(h <= s)
{
h = s;
get_figure = 6;
}
for(j=0,s=0;j<trainingGroup[i].size();j++)
{
g = trainingGroup[i][j]*w7[j];
s += g;
}
if(h <= s)
{
h = s;
get_figure = 7;
}
for(j=0,s=0;j<trainingGroup[i].size();j++)
{
g = trainingGroup[i][j]*w8[j];
s += g;
}
if(h <= s)
{
h = s;
get_figure = 8;
}
for(j=0,s=0;j<trainingGroup[i].size();j++)
{
g = trainingGroup[i][j]*w9[j];
s += g;
}
if(h <= s)
{
h = s;
get_figure = 9;
}
classResult <<get_figure<<" ";
if(get_figure == class_sign)
m++;
}
classResult <<endl;
return m;
}
int main()
{
int i,j;
FeatureSelection s;
ofstream out;
out.open("out.txt");
s.OpenSampleFile("digit0.dat");
s.CharacVectorGeneration();
for(i=0;i<(s.trainingGroup).size();i++)
{
cout << i+1 << " ";
for(j=0;j<(s.trainingGroup)[i].size();j++)
{
cout << (s.trainingGroup)[i][j] <<" ";
}
cout <<endl;
}
for(i=0;i<(s.testGroup).size();i++)
{
cout << i+31 << " ";
for(j=0;j<(s.testGroup)[i].size();j++)
{
cout << s.testGroup[i][j] <<" ";
}
cout <<endl;
}
cout <<endl <<endl;
KNNDiscriminantTraining l;
l.TestingProcedure();
cout <<"Finish all..."<<endl;
getchar();
return 0;
}
?? 快捷鍵說明
復(fù)制代碼
Ctrl + C
搜索代碼
Ctrl + F
全屏模式
F11
切換主題
Ctrl + Shift + D
顯示快捷鍵
?
增大字號
Ctrl + =
減小字號
Ctrl + -