亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來(lái)到蟲(chóng)蟲(chóng)下載站! | ?? 資源下載 ?? 資源專輯 ?? 關(guān)于我們
? 蟲(chóng)蟲(chóng)下載站

?? svm.cpp

?? 支撐向量機(jī)庫(kù)文件
?? CPP
?? 第 1 頁(yè) / 共 4 頁(yè)
字號(hào):
		linear_term[i] = param->p - prob->y[i];		y[i] = 1;		alpha2[i+l] = 0;		linear_term[i+l] = param->p + prob->y[i];		y[i+l] = -1;	}	Solver s;	s.Solve(2*l, SVR_Q(*prob,*param), linear_term, y,		alpha2, param->C, param->C, param->eps, si, param->shrinking);	double sum_alpha = 0;	for(i=0;i<l;i++)	{		alpha[i] = alpha2[i] - alpha2[i+l];		sum_alpha += fabs(alpha[i]);	}	info("nu = %f\n",sum_alpha/(param->C*l));	delete[] alpha2;	delete[] linear_term;	delete[] y;}static void solve_nu_svr(	const svm_problem *prob, const svm_parameter *param,	double *alpha, Solver::SolutionInfo* si){	int l = prob->l;	double C = param->C;	double *alpha2 = new double[2*l];	double *linear_term = new double[2*l];	schar *y = new schar[2*l];	int i;	double sum = C * param->nu * l / 2;	for(i=0;i<l;i++)	{		alpha2[i] = alpha2[i+l] = min(sum,C);		sum -= alpha2[i];		linear_term[i] = - prob->y[i];		y[i] = 1;		linear_term[i+l] = prob->y[i];		y[i+l] = -1;	}	Solver_NU s;	s.Solve(2*l, SVR_Q(*prob,*param), linear_term, y,		alpha2, C, C, param->eps, si, param->shrinking);	info("epsilon = %f\n",-si->r);	for(i=0;i<l;i++)		alpha[i] = alpha2[i] - alpha2[i+l];	delete[] alpha2;	delete[] linear_term;	delete[] y;}//// decision_function//struct decision_function{	double *alpha;	double rho;	};decision_function svm_train_one(	const svm_problem *prob, const svm_parameter *param,	double Cp, double Cn){	double *alpha = Malloc(double,prob->l);	Solver::SolutionInfo si;	switch(param->svm_type)	{		case C_SVC:			solve_c_svc(prob,param,alpha,&si,Cp,Cn);			break;		case NU_SVC:			solve_nu_svc(prob,param,alpha,&si);			break;		case ONE_CLASS:			solve_one_class(prob,param,alpha,&si);			break;		case EPSILON_SVR:			solve_epsilon_svr(prob,param,alpha,&si);			break;		case NU_SVR:			solve_nu_svr(prob,param,alpha,&si);			break;	}	info("obj = %f, rho = %f\n",si.obj,si.rho);	// output SVs	int nSV = 0;	int nBSV = 0;	for(int i=0;i<prob->l;i++)	{		if(fabs(alpha[i]) > 0)		{			++nSV;			if(prob->y[i] > 0)			{				if(fabs(alpha[i]) >= si.upper_bound_p)					++nBSV;			}			else			{				if(fabs(alpha[i]) >= si.upper_bound_n)					++nBSV;			}		}	}	info("nSV = %d, nBSV = %d\n",nSV,nBSV);	decision_function f;	f.alpha = alpha;	f.rho = si.rho;	return f;}//// svm_model//struct svm_model{	svm_parameter param;	// parameter	int nr_class;		// number of classes, = 2 in regression/one class svm	int l;			// total #SV	svm_node **SV;		// SVs (SV[l])	double **sv_coef;	// coefficients for SVs in decision functions (sv_coef[n-1][l])	double *rho;		// constants in decision functions (rho[n*(n-1)/2])	double *probA;          // pariwise probability information	double *probB;	// for classification only	int *label;		// label of each class (label[n])	int *nSV;		// number of SVs for each class (nSV[n])				// nSV[0] + nSV[1] + ... + nSV[n-1] = l	// XXX	int free_sv;		// 1 if svm_model is created by svm_load_model				// 0 if svm_model is created by svm_train};// Platt's binary SVM Probablistic Output: an improvement from Lin et al.void sigmoid_train(	int l, const double *dec_values, const double *labels, 	double& A, double& B){	double prior1=0, prior0 = 0;	int i;	for (i=0;i<l;i++)		if (labels[i] > 0) prior1+=1;		else prior0+=1;		int max_iter=100; 	// Maximal number of iterations	double min_step=1e-10;	// Minimal step taken in line search	double sigma=1e-3;	// For numerically strict PD of Hessian	double eps=1e-5;	double hiTarget=(prior1+1.0)/(prior1+2.0);	double loTarget=1/(prior0+2.0);	double *t=Malloc(double,l);	double fApB,p,q,h11,h22,h21,g1,g2,det,dA,dB,gd,stepsize;	double newA,newB,newf,d1,d2;	int iter; 		// Initial Point and Initial Fun Value	A=0.0; B=log((prior0+1.0)/(prior1+1.0));	double fval = 0.0;	for (i=0;i<l;i++)	{		if (labels[i]>0) t[i]=hiTarget;		else t[i]=loTarget;		fApB = dec_values[i]*A+B;		if (fApB>=0)			fval += t[i]*fApB + log(1+exp(-fApB));		else			fval += (t[i] - 1)*fApB +log(1+exp(fApB));	}	for (iter=0;iter<max_iter;iter++)	{		// Update Gradient and Hessian (use H' = H + sigma I)		h11=sigma; // numerically ensures strict PD		h22=sigma;		h21=0.0;g1=0.0;g2=0.0;		for (i=0;i<l;i++)		{			fApB = dec_values[i]*A+B;			if (fApB >= 0)			{				p=exp(-fApB)/(1.0+exp(-fApB));				q=1.0/(1.0+exp(-fApB));			}			else			{				p=1.0/(1.0+exp(fApB));				q=exp(fApB)/(1.0+exp(fApB));			}			d2=p*q;			h11+=dec_values[i]*dec_values[i]*d2;			h22+=d2;			h21+=dec_values[i]*d2;			d1=t[i]-p;			g1+=dec_values[i]*d1;			g2+=d1;		}		// Stopping Criteria		if (fabs(g1)<eps && fabs(g2)<eps)			break;		// Finding Newton direction: -inv(H') * g		det=h11*h22-h21*h21;		dA=-(h22*g1 - h21 * g2) / det;		dB=-(-h21*g1+ h11 * g2) / det;		gd=g1*dA+g2*dB;		stepsize = 1; 		// Line Search		while (stepsize >= min_step)		{			newA = A + stepsize * dA;			newB = B + stepsize * dB;			// New function value			newf = 0.0;			for (i=0;i<l;i++)			{				fApB = dec_values[i]*newA+newB;				if (fApB >= 0)					newf += t[i]*fApB + log(1+exp(-fApB));				else					newf += (t[i] - 1)*fApB +log(1+exp(fApB));			}			// Check sufficient decrease			if (newf<fval+0.0001*stepsize*gd)			{				A=newA;B=newB;fval=newf;				break;			}			else				stepsize = stepsize / 2.0;		}		if (stepsize < min_step)		{			info("Line search fails in two-class probability estimates\n");			break;		}	}	if (iter>=max_iter)		info("Reaching maximal iterations in two-class probability estimates\n");	free(t);}double sigmoid_predict(double decision_value, double A, double B){	double fApB = decision_value*A+B;	if (fApB >= 0)		return exp(-fApB)/(1.0+exp(-fApB));	else		return 1.0/(1+exp(fApB)) ;}// Method 2 from the multiclass_prob paper by Wu, Lin, and Wengvoid multiclass_probability(int k, double **r, double *p){	int t;	int iter = 0, max_iter=100;	double **Q=Malloc(double *,k);	double *Qp=Malloc(double,k);	double pQp, eps=0.001;		for (t=0;t<k;t++)	{		p[t]=1.0/k;  // Valid if k = 1		Q[t]=Malloc(double,k);		Q[t][t]=0;		for (int j=0;j<t;j++)		{			Q[t][t]+=r[j][t]*r[j][t];			Q[t][j]=Q[j][t];		}		for (int j=t+1;j<k;j++)		{			Q[t][t]+=r[j][t]*r[j][t];			Q[t][j]=-r[j][t]*r[t][j];		}	}	for (iter=0;iter<max_iter;iter++)	{		// stopping condition, recalculate QP,pQP for numerical accuracy		pQp=0;		for (t=0;t<k;t++)		{			Qp[t]=0;			for (int j=0;j<k;j++)				Qp[t]+=Q[t][j]*p[j];			pQp+=p[t]*Qp[t];		}		double max_error=0;		for (t=0;t<k;t++)		{			double error=fabs(Qp[t]-pQp);			if (error>max_error)				max_error=error;		}		if (max_error<eps) break;				for (t=0;t<k;t++)		{			double diff=(-Qp[t]+pQp)/Q[t][t];			p[t]+=diff;			pQp=(pQp+diff*(diff*Q[t][t]+2*Qp[t]))/(1+diff)/(1+diff);			for (int j=0;j<k;j++)			{				Qp[j]=(Qp[j]+diff*Q[t][j])/(1+diff);				p[j]/=(1+diff);			}		}	}	if (iter>=max_iter)		info("Exceeds max_iter in multiclass_prob\n");	for(t=0;t<k;t++) free(Q[t]);	free(Q);	free(Qp);}// Cross-validation decision values for probability estimatesvoid svm_binary_svc_probability(	const svm_problem *prob, const svm_parameter *param,	double Cp, double Cn, double& probA, double& probB){	int i;	int nr_fold = 5;	int *perm = Malloc(int,prob->l);	double *dec_values = Malloc(double,prob->l);	// random shuffle	for(i=0;i<prob->l;i++) perm[i]=i;	for(i=0;i<prob->l;i++)	{		int j = i+rand()%(prob->l-i);		swap(perm[i],perm[j]);	}	for(i=0;i<nr_fold;i++)	{		int begin = i*prob->l/nr_fold;		int end = (i+1)*prob->l/nr_fold;		int j,k;		struct svm_problem subprob;		subprob.l = prob->l-(end-begin);		subprob.x = Malloc(struct svm_node*,subprob.l);		subprob.y = Malloc(double,subprob.l);					k=0;		for(j=0;j<begin;j++)		{			subprob.x[k] = prob->x[perm[j]];			subprob.y[k] = prob->y[perm[j]];			++k;		}		for(j=end;j<prob->l;j++)		{			subprob.x[k] = prob->x[perm[j]];			subprob.y[k] = prob->y[perm[j]];			++k;		}		int p_count=0,n_count=0;		for(j=0;j<k;j++)			if(subprob.y[j]>0)				p_count++;			else				n_count++;		if(p_count==0 && n_count==0)			for(j=begin;j<end;j++)				dec_values[perm[j]] = 0;		else if(p_count > 0 && n_count == 0)			for(j=begin;j<end;j++)				dec_values[perm[j]] = 1;		else if(p_count == 0 && n_count > 0)			for(j=begin;j<end;j++)				dec_values[perm[j]] = -1;		else		{			svm_parameter subparam = *param;			subparam.probability=0;			subparam.C=1.0;			subparam.nr_weight=2;			subparam.weight_label = Malloc(int,2);			subparam.weight = Malloc(double,2);			subparam.weight_label[0]=+1;			subparam.weight_label[1]=-1;			subparam.weight[0]=Cp;			subparam.weight[1]=Cn;			struct svm_model *submodel = svm_train(&subprob,&subparam);			for(j=begin;j<end;j++)			{				svm_predict_values(submodel,prob->x[perm[j]],&(dec_values[perm[j]])); 				// ensure +1 -1 order; reason not using CV subroutine				dec_values[perm[j]] *= submodel->label[0];			}					svm_destroy_model(submodel);			svm_destroy_param(&subparam);			free(subprob.x);			free(subprob.y);		}	}			sigmoid_train(prob->l,dec_values,prob->y,probA,probB);	free(dec_values);	free(perm);}// Return parameter of a Laplace distribution double svm_svr_probability(	const svm_problem *prob, const svm_parameter *param){	int i;	int nr_fold = 5;	double *ymv = Malloc(double,prob->l);	double mae = 0;	svm_parameter newparam = *param;	newparam.probability = 0;	svm_cross_validation(prob,&newparam,nr_fold,ymv);	for(i=0;i<prob->l;i++)	{		ymv[i]=prob->y[i]-ymv[i];		mae += fabs(ymv[i]);	}			mae /= prob->l;	double std=sqrt(2*mae*mae);	int count=0;	mae=0;	for(i=0;i<prob->l;i++)	        if (fabs(ymv[i]) > 5*std)                         count=count+1;		else 		        mae+=fabs(ymv[i]);	mae /= (prob->l-count);	info("Prob. model for test data: target value = predicted value + z,\nz: Laplace distribution e^(-|z|/sigma)/(2sigma),sigma= %g\n",mae);	free(ymv);	return mae;}//// Interface functions//svm_model *svm_train(const svm_problem *prob, const svm_parameter *param){	svm_model *model = Malloc(svm_model,1);	model->param = *param;	model->free_sv = 0;	// XXX	if(param->svm_type == ONE_CLASS ||	   param->svm_type == EPSILON_SVR ||	   param->svm_type == NU_SVR)	{		// regression or one-class-svm		model->nr_class = 2;		model->label = NULL;		model->nSV = NULL;		model->probA = NULL; model->probB = NULL;		model->sv_coef = Malloc(double *,1);		if(param->probability && 		   (param->svm_type == EPSILON_SVR ||		    param->svm_type == NU_SVR))		{			model->probA = Malloc(double,1);			model->probA[0] = svm_svr_probability(prob,param);		}		decision_function f = svm_train_one(prob,param,0,0);		model->rho = Malloc(double,1);		model->rho[0] = f.rho;		int nSV = 0;		int i;		for(i=0;i<prob->l;i++)			if(fabs(f.alpha[i]) > 0) ++nSV;		model->l = nSV;		model->SV = Malloc(svm_node *,nSV);		model->sv_coef[0] = Malloc(double,nSV);		int j = 0;		for(i=0;i<prob->l;i++)			if(fabs(f.alpha[i]) > 0)			{				model->SV[j] = prob->x[i];				model->sv_coef[0][j] = f.alpha[i];				++j;			}				free(f.alpha);	}	else	{		// classification		// find out the number of classes		int l = prob->l;		int max_nr_class = 16;		int nr_class = 0;		int *label = Malloc(int,max_nr_class);		int *count = Malloc(int,max_nr_class);		int *index = Malloc(int,l);		int i;		for(i=0;i<l;i++)		{			int this_label = (int)prob->y[i];			int j;			for(j=0;j<nr_class;j++)				if(this_label == label[j])				{					++count[j];					break;				}			index[i] = j;			if(j == nr_class)			{				if(nr_class == max_nr_class)				{					max_nr_class *= 2;					label = (int *)realloc(label,max_nr_class*sizeof(int));					count = (int *)realloc(count,max_nr_class*sizeof(int));				}				label[nr_class] = this_label;				count[nr_class] = 1;				++nr_class;			}		}		// group training data of the same class		int *start = Malloc(int,nr_class);		start[0] = 0;		for(i=1;i<nr_class;i++)			start[i] = start[i-1]+count[i-1];		svm_node **x = Malloc(svm_node *,l);				for(i=0;i<l;i++)		{			x[start[index[i]]] = prob->x[i];			++start[index[i]];		}				start[0] = 0;		for(i=1;i<nr_class;i++)			start[i] = start[i-1]+count[i-1];		// calculate weighted C		double *weighted_C = Malloc(double, nr_class);		for(i=0;i<nr_class;i++)			weighted_C[i] = param->C;		for(i=0;i<param->nr_weight;i++)		{				int j;			for(j=0;j<nr_class;j++)				if(param->weight_label[i] == label[j])					break;			if(j == nr_class)				fprintf(stderr,"warning: class label %d specified in weight is not found\n", param->weight_label[i]);			else				weighted_C[j] *= param->weight[i];		}		// train k*(k-1)/2 models				bool *nonzero = Malloc(bool,l);		for(i=0;i<l;i++)			nonzero[i] = false;		decision_function *f = Malloc(decision_function,nr_class*(nr_class-1)/2);		double *probA=NULL,*probB=NULL;		if (param->probability)		{			probA=Malloc(double,nr_class*(nr_class-1)/2);			probB=Malloc(double,nr_class*(nr_class-1)/2);		}		int p = 0;		for(i=0;i<nr_class;i++)			for(int j=i+1;j<nr_class;j++)			{				svm_problem sub_prob;				int si = start[i], sj = start[j];				int ci = count[i], cj = count[j];				sub_prob.l = ci+cj;				sub_prob.x = Malloc(svm_node *,sub_prob.l);				sub_prob.y = Malloc(double,sub_prob.l);				int k;				for(k=0;k<ci;k++)				{					sub_prob.x[k] = x[si+k];					sub_prob.y[k] = +1;				}				for(k=0;k<cj;k++)				{					sub_prob.x[ci+k] = x[sj+k];					sub_prob.y[ci+k] = -1;				}				if(param->probability)					svm_binary_svc_probability(&sub_prob,param,weighted_C[i],weighted_C[j],probA[p],probB[p]);				f[p] = svm_train_one(&sub_prob,param,weighted_C[i],weighted_C[j]);				for(k=0;k<ci;k++)					if(!nonzero[si+k] && fabs(f[p].alpha[k]) > 0)						nonzero[si+k] = true;				for(k=0;k<cj;k++)					if(!nonzero[sj+k] && fabs(f[p].alpha[ci+k]) > 0)						nonzero[sj+k] = true;				free(sub_prob.x);				free(sub_prob.y);				++p;			}		// build output		model->nr_class = nr_class;				model->label = Malloc(int,nr_class);		for(i=0;i<nr_class;i++)			model->label[i] = label[i];				model->rho = Malloc(double,nr_class*(nr_class-1)/2);		for(i=0;i<nr_class*(nr_class-1)/2;i++)			model->rho[i] = f[i].rho;		if(param->probability)		{			model->probA = Malloc(double,nr_class*(nr_class-1)/2);			model->probB = Malloc(double,nr_class*(nr_class-1)/2);			for(i=0;i<nr_class*(nr_class-1)/2;i++)			{				model->probA[i] = probA[i];				model->probB[i] = probB[i];			}		}		else		{			model->probA=NULL;			model->probB=NULL;		}		int total_sv = 0;		int *nz_count = Malloc(int,nr_class);		model->nSV = Malloc(int,nr_class);		for(i=0;i<nr_class;i++)		{			int nSV = 0;			for(int j=0;j<count[i];j++)				if(nonzero[start[i]+j])				{						++nSV;					++total_sv;				}			model->nSV[i] = nSV;			nz_count[i] = nSV;		}				info("Total nSV = %d\n",total_sv);		model->l = total_sv;		model->SV = Malloc(svm_node *,total_sv);		p = 0;		for(i=0;i<l;i++)			if(nonzero[i]) model->SV[p++] = x[i];		int *nz_start = Malloc(int,nr_class);		nz_start[0] = 0;		for(i=1;i<nr_class;i++)			nz_start[i] = nz_start[i-1]+nz_count[i-1];		model->sv_coef = Malloc(double *,nr_class-1);		for(i=0;i<nr_class-1;i++)			model->sv_coef[i] = Malloc(double,total_sv);

?? 快捷鍵說(shuō)明

復(fù)制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號(hào) Ctrl + =
減小字號(hào) Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
亚洲国产日日夜夜| 一区二区三区在线高清| 国产精品久久久久久久久免费相片 | 在线区一区二视频| 日韩精品中文字幕一区二区三区| 国产精品麻豆一区二区| 香蕉加勒比综合久久| yourporn久久国产精品| 精品久久久久久无| 三级在线观看一区二区| 色婷婷av一区| 国产精品成人一区二区艾草| 精品一区中文字幕| 欧美精品18+| 亚洲综合无码一区二区| 波多野结衣一区二区三区 | 激情亚洲综合在线| 欧美日韩国产系列| 一区二区三区日韩精品| 99这里都是精品| 亚洲国产岛国毛片在线| 国产成人午夜99999| 久久久久久久综合色一本| 免费亚洲电影在线| 日韩视频免费观看高清完整版 | 欧美性xxxxx极品少妇| 国产精品久久久久婷婷| 成人免费精品视频| 中文字幕成人网| 韩国精品一区二区| 久久日韩粉嫩一区二区三区| 久久精品国产精品亚洲综合| 日韩欧美你懂的| 国产永久精品大片wwwapp| 久久嫩草精品久久久精品一| 国产激情91久久精品导航 | 亚洲午夜在线电影| 色欧美88888久久久久久影院| 成人欧美一区二区三区1314| 99国产精品久久久久久久久久久| 中文字幕一区二区三区四区不卡| bt欧美亚洲午夜电影天堂| 亚洲色欲色欲www在线观看| 色诱亚洲精品久久久久久| 樱桃国产成人精品视频| 欧美三级在线播放| 蜜桃久久精品一区二区| 久久综合狠狠综合久久综合88 | 色综合天天综合| 亚洲一区二区av在线| 7878成人国产在线观看| 黄页视频在线91| 欧美国产激情一区二区三区蜜月| 99国产欧美久久久精品| 午夜国产不卡在线观看视频| 精品国产91洋老外米糕| 不卡av免费在线观看| 一区二区三区日本| 精品国产乱码久久久久久影片| 国产91丝袜在线18| 亚洲成人综合在线| 久久久久久一级片| 日本精品裸体写真集在线观看 | 国产伦理精品不卡| 日韩理论片中文av| 日韩欧美色综合| 99久久精品国产观看| 视频在线观看91| 国产亚洲精品免费| 欧美日韩精品三区| 国产乱人伦偷精品视频不卡 | 六月丁香婷婷色狠狠久久| 亚洲国产高清aⅴ视频| 欧美日韩精品欧美日韩精品| 国产伦精品一区二区三区免费| 一区二区三区日韩欧美精品| 久久青草欧美一区二区三区| 欧美日韩亚洲不卡| 99精品国产一区二区三区不卡| 男女男精品视频| 亚洲品质自拍视频| 久久久久久久电影| 在线不卡免费欧美| 色乱码一区二区三区88| 国产精品亚洲综合一区在线观看| 亚洲成av人片在www色猫咪| 日本免费新一区视频| 亚洲色图在线看| 中文字幕第一区| 日韩精品一区二区三区swag| eeuss鲁片一区二区三区 | 亚洲福利视频导航| 中文字幕制服丝袜一区二区三区| 日韩欧美一卡二卡| 91精品啪在线观看国产60岁| 色综合天天综合色综合av | 韩国成人在线视频| 三级久久三级久久久| 亚洲精品老司机| 亚洲视频小说图片| 国产精品网站导航| 久久久久88色偷偷免费| 欧美电影免费提供在线观看| 91精品国产综合久久精品性色| 欧洲av一区二区嗯嗯嗯啊| 91美女视频网站| av在线播放成人| 波多野结衣91| 成人av免费在线| 成人白浆超碰人人人人| 大陆成人av片| aaa欧美色吧激情视频| 97久久精品人人做人人爽50路| 欧美久久久久久久久| 欧美亚洲综合网| 欧美日韩一区中文字幕| 在线观看www91| 欧美四级电影在线观看| 91精品国产综合久久香蕉的特点| 欧美卡1卡2卡| 6080国产精品一区二区| 欧美一区二区三区系列电影| 欧美高清hd18日本| 日韩一区二区免费高清| 欧美福利视频导航| 欧美精品vⅰdeose4hd| 日韩欧美的一区二区| 欧美电视剧在线看免费| 精品久久久久久久人人人人传媒| 精品国产伦一区二区三区观看方式| 精品欧美一区二区在线观看| 久久―日本道色综合久久| 国产精品久久久久久福利一牛影视 | 欧美无砖砖区免费| 欧美精品粉嫩高潮一区二区| 欧美丰满美乳xxx高潮www| 欧美成人综合网站| 国产欧美一区二区三区鸳鸯浴| 国产精品乱码人人做人人爱 | 国产超碰在线一区| 99久久综合99久久综合网站| 欧美偷拍一区二区| 日韩欧美黄色影院| 亚洲国产成人自拍| 一个色综合av| 久久99国产精品久久| 成人h精品动漫一区二区三区| 在线视频欧美精品| 91精品在线麻豆| 国产精品免费av| 五月婷婷综合激情| 国产精品2024| 欧美午夜精品久久久| 国产无人区一区二区三区| 亚洲一区在线电影| 国产一区二区三区久久久| 91久久线看在观草草青青| 欧美一级黄色录像| 亚洲美女区一区| 国产美女精品在线| 欧美日韩在线精品一区二区三区激情 | 欧美三级在线看| 日本一区二区免费在线| 亚洲成av人片观看| 99精品视频免费在线观看| 欧美电影免费观看高清完整版在线 | 日日摸夜夜添夜夜添国产精品| 国产成人午夜99999| 欧美一区二区三区免费在线看 | 久久久久久久久一| 天天色综合成人网| 91麻豆免费看| 国产蜜臀av在线一区二区三区| 青草国产精品久久久久久| 91久久精品国产91性色tv| 欧美韩日一区二区三区四区| 另类成人小视频在线| 欧美亚洲国产一卡| 日韩毛片高清在线播放| 国产91精品在线观看| 日韩视频不卡中文| 午夜精品久久久久久久久久久| 91影院在线免费观看| 亚洲国产精品ⅴa在线观看| 国产在线精品视频| 亚洲精品一线二线三线| 日韩中文字幕一区二区三区| 欧美影院精品一区| 综合婷婷亚洲小说| 99在线视频精品| 中文字幕一区在线观看| 成人久久18免费网站麻豆| 国产日韩一级二级三级| 国产精品91xxx| 久久久综合网站| 国产九色精品成人porny | 亚洲婷婷综合久久一本伊一区| 高清国产一区二区| 国产亚洲综合色| 成人在线综合网站|