?? svm.cs
字號(hào):
double sum = C * param.nu * l / 2;
for (i = 0; i < l; i++)
{
alpha2[i] = alpha2[i + l] = System.Math.Min(sum, C);
sum -= alpha2[i];
linear_term[i] = - prob.y[i];
y[i] = 1;
linear_term[i + l] = prob.y[i];
y[i + l] = - 1;
}
Solver_NU s = new Solver_NU();
s.Solve(2 * l, new SVR_Q(prob, param), linear_term, y, alpha2, C, C, param.eps, si, param.shrinking);
System.Console.Out.Write("epsilon = " + (- si.r) + "\n");
for (i = 0; i < l; i++)
alpha[i] = alpha2[i] - alpha2[i + l];
}
//
// decision_function
//
internal class decision_function
{
internal double[] alpha;
internal double rho;
}
internal static decision_function svm_train_one(svm_problem prob, svm_parameter param, double Cp, double Cn)
{
double[] alpha = new double[prob.l];
Solver.SolutionInfo si = new Solver.SolutionInfo();
switch (param.svm_type)
{
case svm_parameter.C_SVC:
solve_c_svc(prob, param, alpha, si, Cp, Cn);
break;
case svm_parameter.NU_SVC:
solve_nu_svc(prob, param, alpha, si);
break;
case svm_parameter.ONE_CLASS:
solve_one_class(prob, param, alpha, si);
break;
case svm_parameter.EPSILON_SVR:
solve_epsilon_svr(prob, param, alpha, si);
break;
case svm_parameter.NU_SVR:
solve_nu_svr(prob, param, alpha, si);
break;
}
System.Console.Out.Write("obj = " + si.obj + ", rho = " + si.rho + "\n");
// output SVs
int nSV = 0;
int nBSV = 0;
for (int i = 0; i < prob.l; i++)
{
if (System.Math.Abs(alpha[i]) > 0)
{
++nSV;
if (prob.y[i] > 0)
{
if (System.Math.Abs(alpha[i]) >= si.upper_bound_p)
++nBSV;
}
else
{
if (System.Math.Abs(alpha[i]) >= si.upper_bound_n)
++nBSV;
}
}
}
System.Console.Out.Write("nSV = " + nSV + ", nBSV = " + nBSV + "\n");
decision_function f = new decision_function();
f.alpha = alpha;
f.rho = si.rho;
return f;
}
// Platt's binary SVM Probablistic Output: an improvement from Lin et al.
private static void sigmoid_train(int l, double[] dec_values, double[] labels, double[] probAB)
{
double A, B;
double prior1 = 0, prior0 = 0;
int i;
for (i = 0; i < l; i++)
if (labels[i] > 0)
prior1 += 1;
else
prior0 += 1;
int max_iter = 100; // Maximal number of iterations
double min_step = 1e-10; // Minimal step taken in line search
double sigma = 1e-3; // For numerically strict PD of Hessian
double eps = 1e-5;
double hiTarget = (prior1 + 1.0) / (prior1 + 2.0);
double loTarget = 1 / (prior0 + 2.0);
double[] t = new double[l];
double fApB, p, q, h11, h22, h21, g1, g2, det, dA, dB, gd, stepsize;
double newA, newB, newf, d1, d2;
int iter;
// Initial Point and Initial Fun Value
A = 0.0; B = System.Math.Log((prior0 + 1.0) / (prior1 + 1.0));
double fval = 0.0;
for (i = 0; i < l; i++)
{
if (labels[i] > 0)
t[i] = hiTarget;
else
t[i] = loTarget;
fApB = dec_values[i] * A + B;
if (fApB >= 0)
fval += t[i] * fApB + System.Math.Log(1 + System.Math.Exp(- fApB));
else
fval += (t[i] - 1) * fApB + System.Math.Log(1 + System.Math.Exp(fApB));
}
for (iter = 0; iter < max_iter; iter++)
{
// Update Gradient and Hessian (use H' = H + sigma I)
h11 = sigma; // numerically ensures strict PD
h22 = sigma;
h21 = 0.0; g1 = 0.0; g2 = 0.0;
for (i = 0; i < l; i++)
{
fApB = dec_values[i] * A + B;
if (fApB >= 0)
{
p = System.Math.Exp(- fApB) / (1.0 + System.Math.Exp(- fApB));
q = 1.0 / (1.0 + System.Math.Exp(- fApB));
}
else
{
p = 1.0 / (1.0 + System.Math.Exp(fApB));
q = System.Math.Exp(fApB) / (1.0 + System.Math.Exp(fApB));
}
d2 = p * q;
h11 += dec_values[i] * dec_values[i] * d2;
h22 += d2;
h21 += dec_values[i] * d2;
d1 = t[i] - p;
g1 += dec_values[i] * d1;
g2 += d1;
}
// Stopping Criteria
if (System.Math.Abs(g1) < eps && System.Math.Abs(g2) < eps)
break;
// Finding Newton direction: -inv(H') * g
det = h11 * h22 - h21 * h21;
dA = (- (h22 * g1 - h21 * g2)) / det;
dB = (- ((- h21) * g1 + h11 * g2)) / det;
gd = g1 * dA + g2 * dB;
stepsize = 1; // Line Search
while (stepsize >= min_step)
{
newA = A + stepsize * dA;
newB = B + stepsize * dB;
// New function value
newf = 0.0;
for (i = 0; i < l; i++)
{
fApB = dec_values[i] * newA + newB;
if (fApB >= 0)
newf += t[i] * fApB + System.Math.Log(1 + System.Math.Exp(- fApB));
else
newf += (t[i] - 1) * fApB + System.Math.Log(1 + System.Math.Exp(fApB));
}
// Check sufficient decrease
if (newf < fval + 0.0001 * stepsize * gd)
{
A = newA; B = newB; fval = newf;
break;
}
else
stepsize = stepsize / 2.0;
}
if (stepsize < min_step)
{
System.Console.Error.Write("Line search fails in two-class probability estimates\n");
break;
}
}
if (iter >= max_iter)
System.Console.Error.Write("Reaching maximal iterations in two-class probability estimates\n");
probAB[0] = A; probAB[1] = B;
}
private static double sigmoid_predict(double decision_value, double A, double B)
{
double fApB = decision_value * A + B;
if (fApB >= 0)
return System.Math.Exp(- fApB) / (1.0 + System.Math.Exp(- fApB));
else
return 1.0 / (1 + System.Math.Exp(fApB));
}
// Method 2 from the multiclass_prob paper by Wu, Lin, and Weng
private static void multiclass_probability(int k, double[][] r, double[] p)
{
int t;
int iter = 0, max_iter = 100;
double[][] Q = new double[k][];
for (int i = 0; i < k; i++)
{
Q[i] = new double[k];
}
double[] Qp = new double[k];
double pQp, eps = 0.001;
for (t = 0; t < k; t++)
{
p[t] = 1.0 / k; // Valid if k = 1
Q[t][t] = 0;
for (int j = 0; j < t; j++)
{
Q[t][t] += r[j][t] * r[j][t];
Q[t][j] = Q[j][t];
}
for (int j = t + 1; j < k; j++)
{
Q[t][t] += r[j][t] * r[j][t];
Q[t][j] = (- r[j][t]) * r[t][j];
}
}
for (iter = 0; iter < max_iter; iter++)
{
// stopping condition, recalculate QP,pQP for numerical accuracy
pQp = 0;
for (t = 0; t < k; t++)
{
Qp[t] = 0;
for (int j = 0; j < k; j++)
Qp[t] += Q[t][j] * p[j];
pQp += p[t] * Qp[t];
}
double max_error = 0;
for (t = 0; t < k; t++)
{
double error = System.Math.Abs(Qp[t] - pQp);
if (error > max_error)
max_error = error;
}
if (max_error < eps)
break;
for (t = 0; t < k; t++)
{
double diff = (- Qp[t] + pQp) / Q[t][t];
p[t] += diff;
pQp = (pQp + diff * (diff * Q[t][t] + 2 * Qp[t])) / (1 + diff) / (1 + diff);
for (int j = 0; j < k; j++)
{
Qp[j] = (Qp[j] + diff * Q[t][j]) / (1 + diff);
p[j] /= (1 + diff);
}
}
}
if (iter >= max_iter)
System.Console.Error.Write("Exceeds max_iter in multiclass_prob\n");
}
// Cross-validation decision values for probability estimates
private static void svm_binary_svc_probability(svm_problem prob, svm_parameter param, double Cp, double Cn, double[] probAB)
{
int i;
int nr_fold = 5;
int[] perm = new int[prob.l];
double[] dec_values = new double[prob.l];
// random shuffle
for (i = 0; i < prob.l; i++)
perm[i] = i;
for (i = 0; i < prob.l; i++)
{
//UPGRADE_WARNING: Data types in Visual C# might be different. Verify the accuracy of narrowing conversions. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1042_3"'
int j = i + (int) (SupportClass.Random.NextDouble() * (prob.l - i));
do
{
int _ = perm[i]; perm[i] = perm[j]; perm[j] = _;
}
while (false);
}
for (i = 0; i < nr_fold; i++)
{
int begin = i * prob.l / nr_fold;
int end = (i + 1) * prob.l / nr_fold;
int j, k;
svm_problem subprob = new svm_problem();
subprob.l = prob.l - (end - begin);
subprob.x = new svm_node[subprob.l][];
subprob.y = new double[subprob.l];
k = 0;
for (j = 0; j < begin; j++)
{
subprob.x[k] = prob.x[perm[j]];
subprob.y[k] = prob.y[perm[j]];
++k;
}
for (j = end; j < prob.l; j++)
{
subprob.x[k] = prob.x[perm[j]];
subprob.y[k] = prob.y[perm[j]];
++k;
}
int p_count = 0, n_count = 0;
for (j = 0; j < k; j++)
if (subprob.y[j] > 0)
p_count++;
else
n_count++;
if (p_count == 0 && n_count == 0)
for (j = begin; j < end; j++)
dec_values[perm[j]] = 0;
else if (p_count > 0 && n_count == 0)
for (j = begin; j < end; j++)
dec_values[perm[j]] = 1;
else if (p_count == 0 && n_count > 0)
for (j = begin; j < end; j++)
dec_values[perm[j]] = - 1;
else
{
svm_parameter subparam = (svm_parameter) param.Clone();
subparam.probability = 0;
subparam.C = 1.0;
subparam.nr_weight = 2;
subparam.weight_label = new int[2];
subparam.weight = new double[2];
subparam.weight_label[0] = + 1;
subparam.weight_label[1] = - 1;
subparam.weight[0] = Cp;
subparam.weight[1] = Cn;
svm_model submodel = svm_train(subprob, subparam);
for (j = begin; j < end; j++)
{
double[] dec_value = new double[1];
svm_predict_values(submodel, prob.x[perm[j]], dec_value);
dec_values[perm[j]] = dec_value[0];
// ensure +1 -1 order; reason not using CV subroutine
dec_values[perm[j]] *= submodel.label[0];
}
}
}
sigmoid_train(prob.l, dec_values, prob.y, probAB);
}
// Return parameter of a Laplace distribution
private static double svm_svr_probability(svm_problem prob, svm_parameter param)
{
int i;
int nr_fold = 5;
double[] ymv = new double[prob.l];
double mae = 0;
svm_parameter newparam = (svm_parameter) param.Clone();
newparam.probability = 0;
svm_cross_validation(prob, newparam, nr_fold, ymv);
for (i = 0; i < prob.l; i++)
{
ymv[i] = prob.y[i] - ymv[i];
mae += System.Math.Abs(ymv[i]);
}
mae /= prob.l;
double std = System.Math.Sqrt(2 * mae * mae);
int count = 0;
mae = 0;
for (i = 0; i < prob.l; i++)
if (System.Math.Abs(ymv[i]) > 5 * std)
count = count + 1;
else
mae += System.Math.Abs(ymv[i]);
mae /= (prob.l - count);
System.Console.Error.Write("Prob. model for test data: target value = predicted value + z,\nz: Laplace distribution e^(-|z|/sigma)/(2sigma),sigma=" + mae + "\n");
return mae;
}
//
// Interface functions
//
public static svm_model svm_train(svm_problem prob, svm_parameter param)
{
svm_model model = new svm_model();
model.param = param;
if (param.svm_type == svm_parameter.ONE_CLASS || param.svm_type == svm_parameter.EPSILON_SVR || param.svm_type == svm_parameter.NU_SVR)
{
// regression or one-class-svm
model.nr_class = 2;
model.label = null;
model.nSV = null;
model.probA = null; model.probB = null;
model.sv_coef = new double[1][];
if (param.probability == 1 && (param.svm_type == svm_parameter.EPSILON_SVR || param.svm_type == svm_parameter.NU_SVR))
{
model.probA = new double[1];
model.probA[0] = svm_svr_probability(prob, param);
}
decision_function f = svm_train_one(prob, param, 0, 0);
model.rho = new double[1];
model.rho[0] = f.rho;
int nSV = 0;
int i;
for (i = 0; i < prob.l; i++)
if (System.Math.Abs(f.alpha[i]) > 0)
++nSV;
model.l = nSV;
model.SV = new svm_node[nSV][];
model.sv_coef[0] = new double[nSV];
int j = 0;
for (i = 0; i < prob.l; i++)
if (System.Math.Abs(f.alpha[i]) > 0)
{
model.SV[j] = prob.x[i];
model.sv_coef[0][j] = f.alpha[i];
++j;
}
}
else
{
// classification
// find out the number of classes
int l = prob.l;
int max_nr_class = 16;
int nr_class = 0;
int[] label = new int[max_nr_class];
int[] count = new int[max_nr_class];
int[] index = new int[l];
int i;
for (i = 0; i < l; i++)
{
//UPGRADE_WARNING: Data types in Visual C# might be different. Verify the accuracy of narrowing conversions. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1042_3"'
int this_label = (int) prob.y[i];
int j;
for (j = 0; j < nr_class; j++)
if (this_label == label[j])
{
++count[j];
break;
}
index[i] = j;
if (j == nr_class)
{
if (nr_class == max_nr_class)
{
max_nr_class *= 2;
int[] new_data = new int[max_nr_class];
Array.Copy(label, 0, new_data, 0, label.Length);
label = new_data;
new_data = new int[max_nr_class];
Array.Copy(count, 0, new_data, 0, count.Length);
count = new_data;
}
label[nr_class] = this_label;
count[nr_class] = 1;
++nr_class;
}
}
// group training data of the same class
int[] start = new int[nr_class];
start[0] = 0;
for (i = 1; i < nr_class; i++)
?? 快捷鍵說(shuō)明
復(fù)制代碼
Ctrl + C
搜索代碼
Ctrl + F
全屏模式
F11
切換主題
Ctrl + Shift + D
顯示快捷鍵
?
增大字號(hào)
Ctrl + =
減小字號(hào)
Ctrl + -