亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? bpn.cpp

?? 2002年
?? CPP
字號:
#include "bpn.h"

#include <math.h>
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#include "smp_func.h"

using namespace std;

LAYER::LAYER(int num_units, int num_punits){
	units = 0;
	p_units = 0;
	Resize(num_units, num_punits);
}

LAYER::~LAYER(){
} 

bool LAYER::Resize(int num_units, int num_punits){
	if(num_units < 1 || num_punits < 0) return false;
	units = num_units;
	p_units = num_punits;

	Output.resize(units+1);
	Output[0] = 1;	//BIAS
	Error.resize(units+1);

	if(num_punits > 0){//not the first layer
		Weight.resize(units+1);
		dWeight.resize(units+1);
		last_dWeight.resize(units+1);

		int i;
		for(i=1; i<=units; i++){
			Weight[i].resize(p_units+1);
			dWeight[i].resize(p_units+1);
			last_dWeight[i].resize(p_units+1);

			int j;
			for(j=0; j<p_units+1; j++){
				dWeight[i][j] = 0;
				last_dWeight[i][j] = 0;
			}
		}
	}

	Eta = 1;
	Alpha = 0.5;
	
	return true;
}

void LAYER::SetFuncType(FuncType functype){
	this->functype = functype;
}

REAL BPN::sigmoid(REAL Input){
	return 1 / (1 + exp(-1*Gain * Input));
}

REAL BPN::dsigmoid(REAL Out){
	return Gain * Out * (1-Out);
}

REAL BPN::purelinear(REAL Input){
	return Input;
}

REAL BPN::dpurelinear(REAL Out){
	return 1;
}

REAL BPN::tanh(REAL Input){
	REAL t = exp(Input);
	t = Sqr(t);
	return (1 - 2/(t+1));
}

REAL BPN::dtanh(REAL Out){
	return (1 - Sqr(Out));
}

void BPN::Resize(int NUM_LAYERS, int* Units){
	if(NUM_LAYERS < 2) return;

	this->NUM_LAYERS = NUM_LAYERS;	
	
	Layers.resize(NUM_LAYERS);
	Layers[0].Resize(Units[0], 0);

	int i;
	for (i=1; i<NUM_LAYERS; i++) {
		Layers[i].Resize(Units[i], Units[i-1]);		
	}

	InputLayer  = &Layers[0];
	OutputLayer = &Layers[NUM_LAYERS - 1];
}

BPN::BPN(int NUM_LAYERS, int* LAYER_SIZE, FuncType* functype, bool BIASED){
	this->NUM_LAYERS = 0;
	Gain = 1;

	if(NUM_LAYERS < 2) return;

	this->NUM_LAYERS  = NUM_LAYERS;

	if(BIASED)
		this->BIAS = 1;
	else
		this->BIAS = 0;

	int* Units = new int[NUM_LAYERS];

	if (LAYER_SIZE == NULL){
		int i;
		for(i = 0; i<NUM_LAYERS; i++){
			Units[i] = 1;
		}
	}
	else{
		int i;
		for(i = 0; i<NUM_LAYERS; i++){
			Units[i] = LAYER_SIZE[i];
		}
	}

	this->INPUT_SIZE  = Units[0];
	this->OUTPUT_SIZE = Units[NUM_LAYERS-1];

	Resize(NUM_LAYERS, Units);

	delete Units;
	
	int i;
	for(i=1; i<NUM_LAYERS; i++){
		if (functype ==NULL)
			Layers[i].functype = logsig;
		else
			Layers[i].functype = functype[i-1];
	}

	Input_HI = 1;
	Input_LO = -1;
	Target_HI = 1;
	Target_LO = -1;
	Input_MAX = 1;
	Input_MIN = -1;
	Target_MAX = 1;
	Target_MIN = -1;

	epoch = 10;
	batch_period = 1;
	traintype = adapt;
	
	//RandomWeights(-0.5,0.5);
}

BPN::~BPN(){
}

void BPN::RandomWeights(REAL Low, REAL High)
{
	int l,i,j;
	for (l=1; l<NUM_LAYERS; l++) {
		for (i=1; i<=Layers[l].units; i++) {
			for (j=0; j<=Layers[l-1].units; j++) {
				Layers[l].Weight[i][j] = REAL(rand()) / RAND_MAX *(High-Low) + Low;
			}    
		}
	}
}

int BPN::RandomInt(int Low, int High){
	return (rand()%(High-Low+1) + Low);
}

int BPN::RandomInt(int High){
	return RandomInt(0,High);
}

void BPN::SetInput(REAL* Input)
{ 
	int i;
	for(i=1; i<=INPUT_SIZE; i++) {
		InputLayer->Output[i] = Input[i-1];
	}
}

void BPN::GetOutput(REAL* Output)
{
	int i;

	for (i=1; i<=OutputLayer->units; i++) {
		Output[i-1] = OutputLayer->Output[i];
	}
}

void BPN::PropagateLayer(int Lower, int Upper)
{
	int  i,j;
	REAL Sum;

	for (i=1; i<=Layers[Upper].units; i++) {
		Sum = 0;
		for (j=0; j<=Layers[Lower].units; j++) {
			Sum += Layers[Upper].Weight[i][j] * Layers[Lower].Output[j];
		}
		switch(Layers[Upper].functype){
		case logsig:   Layers[Upper].Output[i] = sigmoid(Sum); break;
		case purelin:  Layers[Upper].Output[i] = purelinear(Sum); break;
		case tansig:   Layers[Upper].Output[i] = tanh(Sum); break;
		default:  Layers[Upper].Output[i] = purelinear(Sum);
		}
	}
}

void BPN::PropagateNet()
{
	int i;   
	for (i = 0; i < NUM_LAYERS-1; i++)
		PropagateLayer(i, i+1);
}

void BPN::ComputeOutputError(REAL* Desire)
{
	int  i;
	REAL Out, Err;
   
	Error = 0;
	for (i=1; i<=OutputLayer->units; i++) {
		Out = OutputLayer->Output[i];
		Err = Desire[i-1]-Out;

		switch(OutputLayer->functype){
		case logsig:   
			OutputLayer->Error[i] = dsigmoid(Out) * Err; break;
		case purelin:
			OutputLayer->Error[i] = dpurelinear(Out) * Err; break;
		case tansig:
			OutputLayer->Error[i] = dtanh(Out) * Err; break;
		default:  OutputLayer->Error[i] = dpurelinear(Out) * Err;
		}
		Error += 0.5 * Sqr(Err);
	}
}

void BPN::SetOutputError(REAL* Errors){
	int i;
	REAL Out, Err;
	for (i=1; i<=OutputLayer->units; i++) {
		Out = OutputLayer->Output[i];
		Err = Errors[i-1];

		switch(OutputLayer->functype){
		case logsig:   
			OutputLayer->Error[i] = dsigmoid(Out) * Err; break;
		case purelin:
			OutputLayer->Error[i] = dpurelinear(Out) * Err; break;
		case tansig:
			OutputLayer->Error[i] = dtanh(Out) * Err; break;
		default:  OutputLayer->Error[i] = dpurelinear(Out) * Err;
		}

		Error += 0.5 * Sqr(Err);
	}
}

void BPN::BackpropagateLayer(int Upper, int Lower)
{
	int  i,j;
	REAL Out, Err;
   
	for (i=1; i<=Layers[Lower].units; i++) {
		Out = Layers[Lower].Output[i];
		Err = 0;
		for (j=1; j<=Layers[Upper].units; j++) {
			Err += Layers[Upper].Weight[j][i] * Layers[Upper].Error[j];
		}
		switch(Layers[Lower].functype){
		case logsig:   
			Layers[Lower].Error[i] = dsigmoid(Out) * Err; break;
		case purelin:
			Layers[Lower].Error[i] = dpurelinear(Out) * Err; break;
		case tansig:
			Layers[Lower].Error[i] = dtanh(Out) * Err; break;
		default:  Layers[Lower].Error[i] = dpurelinear(Out) * Err;
		}
	}
}


void BPN::BackpropagateNet()
{
	int i;
	for (i=NUM_LAYERS-1; i>0; i--) {
		BackpropagateLayer(i, i-1);
	}
	GeneratedWeights();
}

void BPN::ResetdWeights(){
	int  l,i,j;
	for (l=1; l<NUM_LAYERS; l++) {
		for (i=1; i<=Layers[l].units; i++) {
			for (j=0; j<=Layers[l-1].units; j++) {
				Layers[l].dWeight[i][j] = 0;
			}
		}
	}
}

void BPN::GeneratedWeights(){
	int  l,i,j;
	for (l=1; l<NUM_LAYERS; l++) {
		for (i=1; i<=Layers[l].units; i++) {
			for (j=0; j<=Layers[l-1].units; j++) {
				Layers[l].dWeight[i][j] += Layers[l].Eta * Layers[l].Error[i] * Layers[l-1].Output[j];
			}
		}
	}
}

void BPN::AdjustWeights(){
	int  l,i,j;
	for (l=1; l<NUM_LAYERS; l++) {
		for (i=1; i<=Layers[l].units; i++) {
			for (j=0; j<=Layers[l-1].units; j++) {
				Layers[l].Weight[i][j] += Layers[l].dWeight[i][j] 
						+ Layers[l].Alpha * Layers[l].last_dWeight[i][j];
				Layers[l].last_dWeight[i][j] = Layers[l].dWeight[i][j];
				Layers[l].dWeight[i][j] = 0;
			}
		}
	}
}

void BPN::SimulateNet(REAL* Input, REAL* Output){
	SetInput(Input);
	PropagateNet();
	GetOutput(Output);
}

void BPN::SimulateNet(int num_samples, REAL* Input, REAL* Output){
	int i;
	for(i=0; i<num_samples; i++){
		SimulateNet(Input + INPUT_SIZE * i, Output + OUTPUT_SIZE * i);
	}
}

void BPN::ForwardBack(REAL* Input, REAL* Desire){
	SetInput(Input);
	PropagateNet();

	ComputeOutputError(Desire);
	BackpropagateNet();
}

void BPN::Adapt(int num_samples, REAL* Input, REAL* Desire)
{
	int i, t;
	for(i=0; i<epoch*num_samples; i++){
		t = RandomInt(num_samples-1);
		ForwardBack(Input + INPUT_SIZE * t, Desire + OUTPUT_SIZE * t);
		AdjustWeights();
	}
}

void BPN::BatchTrain(int num_samples, REAL* Input, REAL* Desire){
	int i, j, t, n;
	n = epoch*num_samples / batch_period;
	for(i=0; i < n; i++){
		for(j=0; j < batch_period; j++){
			t = RandomInt(num_samples-1);
			ForwardBack(Input + INPUT_SIZE * t, Desire + OUTPUT_SIZE * t);
		}
		AdjustWeights();
	}
}

void BPN::TestNet(REAL* Input, REAL* Desire){
	SetInput(Input);
	PropagateNet();
	ComputeOutputError(Desire);
}

REAL BPN::TestNet(int num_samples, REAL* Input, REAL* Desire){
	REAL Error = 0; 
	int i;
	for(i=0; i<num_samples; i++){
		TestNet(Input + INPUT_SIZE * i, Desire + OUTPUT_SIZE * i);
		Error += GetError();
	}
	return Error / num_samples;
}

void BPN::SetEpoch(int epoch){
	this->epoch = epoch;
}

void BPN::SetBatchPeriod(int period){
	this->batch_period = period;
}

void BPN::TrainNet(int num_samples, REAL* Input, REAL* Desire){
	switch(traintype){
	case adapt:
		Adapt(num_samples, Input, Desire);
		break;
	case batch:
		BatchTrain(num_samples, Input, Desire);
		break;
	default:
		Adapt(num_samples, Input, Desire);
		break;
	}
}

REAL BPN::Normalize_Input(REAL y){
	return((y - Input_MIN) / (Input_MAX - Input_MIN) * (Input_HI - Input_LO) + Input_LO);
}

REAL BPN::DeNormalize_Input(REAL y){
	return((y - Input_LO) / (Input_HI - Input_LO) * (Input_MAX - Input_MIN) + Input_MIN);
}

REAL BPN::Normalize_Target(REAL y){
	return((y - Target_MIN) / (Target_MAX - Target_MIN) * (Target_HI - Target_LO) + Target_LO);
}

REAL BPN::DeNormalize_Target(REAL y){
	return((y - Target_LO) / (Target_HI - Target_LO) * (Target_MAX - Target_MIN) + Target_MIN);
}

void BPN::Normalize(int num_samples, REAL* Input, REAL* Desire){
	int i;
	for(i=0; i<num_samples * INPUT_SIZE ; i++){
		Input[i] = Normalize_Input(Input[i]);
	}
	for(i=0; i<num_samples * OUTPUT_SIZE ; i++){
		Desire[i] = Normalize_Target(Desire[i]);
	}
}

void BPN::DeNormalize(int num_samples, REAL* Output){
	int i;
	for(i=0; i<num_samples * OUTPUT_SIZE; i++){
		Output[i] = DeNormalize_Target(Output[i]);
	}
}

bool BPN::SaveNet(char* filename){
	int l,i,j;
	FILE* fp=fopen(filename,"w");
	if (fp == NULL) return false;

	fprintf(fp,"%d\n",NUM_LAYERS);
	for(i=0; i<NUM_LAYERS; i++)
		fprintf(fp,"%d ",Layers[i].units);
	fprintf(fp,"\n");
	for (l=1; l<NUM_LAYERS; l++) {
		for (i=0; i<=Layers[l-1].units; i++) {
			for (j=1; j<=Layers[l].units; j++) {
				fprintf(fp,"%f ",Layers[l].Weight[j][i]);
			}
			fprintf(fp,"\n");
		}
	}
	for(l=1; l<NUM_LAYERS; l++){
		switch(Layers[l].functype){
		case logsig:    fprintf(fp, "logsig\n");  break;
		case tansig:    fprintf(fp, "tansig\n");  break;
		case purelin:   fprintf(fp, "purelin\n"); break;
		default:
			fprintf(fp, "logsig\n");
		}
	}
	fclose(fp);
	return true;
}

bool BPN::RestoreNetFromString(char* string){
	int l,i,j;
	bool Success = false;
	
	if (string == NULL) return false;

	int num_layers;

	if (!sscanf(string,"%d",&num_layers)){
		return false;
	}
	if(num_layers < 2) return false;

	get_int(&string);
		
	int* Units = new int[num_layers];

	for(i=0; i<num_layers; i++){
		if(!sscanf(string,"%d",&Units[i])){
			delete Units;
			return false;
		}
		else
			get_int(&string);
	}

	FuncType* functype = new FuncType[num_layers-1];

	float*** weight=new float**[num_layers];
	for (i=1; i<num_layers; i++) {
		weight[i]  = new float*[Units[i]+1];
		for (j=1; j<=Units[i]; j++)
			weight[i][j] = new float[Units[i-1]+1];
	}

	for (l=1; l<num_layers; l++) {
		for (i=0; i<=Units[l-1]; i++) {
			for (j=1; j<=Units[l]; j++) {
				if (!sscanf(string,"%f",&weight[l][j][i]))
					goto destruct;
				else
					get_float(&string);
			}
		}
	}

	char t[100];
	for(l=1; l<num_layers; l++){
		get_word(&string);
		sscanf(string,"%s",&t[0]);
		if (strcmp(t,"logsig") ==0){
			functype[l-1] = logsig;
			string += 6;
		}
		else if(strcmp(t,"tansig") ==0){
			functype[l-1] = tansig;
			string += 6;
		}
		else if(strcmp(t,"purelin") ==0){
			functype[l-1] = purelin;
			string += 7;
		}
	}

	Success = true;

	Resize(num_layers, Units);

	for (l=1; l<num_layers; l++) {
		for (i=1; i<=Layers[l].units; i++) {
			for (j=0; j<=Layers[l-1].units; j++) {
				Layers[l].Weight[i][j] = weight[l][i][j];
			}    
		}
		Layers[l].functype = functype[l-1];
	}
	
destruct:
	for (i=1; i<num_layers; i++) {
		for (j=1; j<=Units[i]; j++){
			delete weight[i][j];
		}
		delete weight[i];
	}
	NUM_LAYERS = num_layers;
	INPUT_SIZE = Units[0];
	OUTPUT_SIZE = Units[num_layers-1];
	
	delete weight;
	delete Units;
	delete functype;
	return Success;
}

bool BPN::RestoreNet(char* filename, int mode){
	FILE *fp = fopen(filename, "r");
	if (fp == NULL) return false;

	char string[2000];
	int i = 0;
	while(!feof(fp)){
		fscanf(fp,"%c",&string[i++]);
	}
	string[i-1] = 0;

	fclose(fp);
	return RestoreNetFromString(string);
}

REAL BPN::GetError(){
	return Error;
}

void BPN::GetInputError(REAL* Errors){
	int i;
	for(i=1; i<=InputLayer->units; i++){
		Errors[i-1] = InputLayer->Error[i];
	}
}

bool BPN::SetInputRange(REAL min, REAL max){
	if(max < min) return false;
	Input_MAX = max;
	Input_MIN = min;
	return true;
}

bool BPN::SetTargetRange(REAL min, REAL max){
	if(max < min) return false;
	Target_MAX = max;
	Target_MIN = min;
	return true;
}

bool BPN::SetInsideRange(REAL input_min, REAL input_max, REAL target_min, REAL target_max){
	if(input_max < input_min || target_max < target_min) return false;
	Input_HI = input_max;
	Input_LO = input_min;
	Target_HI = target_max;
	Target_LO = target_min;
	return true;
}

bool BPN::SetFunctype(FuncType functype, int layer){
	if (layer >= NUM_LAYERS || layer < 0)
		return false;
	else{
		Layers[layer].SetFuncType(functype);
		return true;
	}
}

void BPN::SetTraintype(TrainType traintype){
	this->traintype = traintype;
}

?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
狠狠网亚洲精品| 久久婷婷综合激情| 国产精品99久久久久久宅男| 午夜天堂影视香蕉久久| 国产精品国产三级国产| 国产午夜精品一区二区三区嫩草| 69堂精品视频| 日韩欧美中文字幕公布| 精品国产网站在线观看| 久久综合久久综合亚洲| 337p日本欧洲亚洲大胆精品| 久久五月婷婷丁香社区| 国产精品天美传媒沈樵| 国产精品不卡视频| 夜夜揉揉日日人人青青一国产精品 | 中文字幕一区二区三区不卡在线| 欧美一二区视频| 精品成人a区在线观看| 精品国产一区二区三区忘忧草 | 国产精品久久久久四虎| 国产精品美女久久久久久久| 日韩久久一区二区| 亚洲国产精品久久久久婷婷884 | 91偷拍与自偷拍精品| 色婷婷亚洲精品| 91精品国产91久久综合桃花 | 成人av在线电影| 色狠狠一区二区| 欧美日韩一卡二卡三卡| 欧美成人bangbros| 国产精品高清亚洲| 日韩精品欧美精品| 国产福利视频一区二区三区| 色婷婷综合久色| 精品99999| 亚洲精品亚洲人成人网在线播放| 水野朝阳av一区二区三区| 国精产品一区一区三区mba桃花| 波多野洁衣一区| 欧美一二三四区在线| 欧美激情综合五月色丁香小说| 亚洲一区二区三区在线播放| 日韩avvvv在线播放| aa级大片欧美| 日韩欧美一区在线| 亚洲影院在线观看| 国产成人在线影院| 51精品国自产在线| 亚洲精品福利视频网站| 久久99精品久久久久久国产越南| 99精品桃花视频在线观看| 日韩欧美的一区二区| 一区二区不卡在线视频 午夜欧美不卡在| 蜜臀av一区二区| 欧美亚洲高清一区| 亚洲欧美在线观看| 国产91清纯白嫩初高中在线观看| 欧美精品视频www在线观看| 亚洲视频你懂的| 国产剧情av麻豆香蕉精品| 欧美一区二区视频免费观看| 一区二区三区中文字幕电影| 粉嫩高潮美女一区二区三区 | 国产精品一二三区| 欧美成人激情免费网| 日韩成人免费看| 欧美日韩一区二区三区四区| 一区在线观看视频| 风流少妇一区二区| 久久久久成人黄色影片| 国内精品久久久久影院一蜜桃| 欧美视频日韩视频在线观看| 亚洲色图一区二区| 99久久99久久免费精品蜜臀| 国产婷婷色一区二区三区四区| 成人毛片老司机大片| 日韩欧美国产成人一区二区| 天堂成人国产精品一区| 欧美日韩精品综合在线| 亚洲成人精品影院| 欧美日韩免费电影| 午夜精品久久久久久久久久久| 欧美亚日韩国产aⅴ精品中极品| 亚洲欧美在线观看| 99视频超级精品| 亚洲欧美日韩国产中文在线| 91视频在线观看| 亚洲国产成人tv| 91精品欧美一区二区三区综合在| 日本伊人色综合网| 久久一留热品黄| 成人aaaa免费全部观看| 日韩伦理av电影| 欧美日韩一区二区三区在线看 | 日韩av中文在线观看| 欧美一区二区三区四区视频| 韩日欧美一区二区三区| 国产日韩欧美一区二区三区乱码 | 欧美精品v国产精品v日韩精品| 日本中文在线一区| 久久嫩草精品久久久精品| 粉嫩av一区二区三区在线播放| 国产精品伦理在线| 欧美日韩欧美一区二区| 另类的小说在线视频另类成人小视频在线| 日韩视频免费观看高清完整版| 国产一区二区影院| 亚洲久本草在线中文字幕| 欧美日韩一区二区三区不卡| 国产一区在线不卡| 亚洲人被黑人高潮完整版| 在线不卡中文字幕| 在线播放亚洲一区| 国产精品亚洲人在线观看| 亚洲丝袜自拍清纯另类| 欧美一级电影网站| 色94色欧美sute亚洲线路一久| 奇米色一区二区| 18欧美亚洲精品| 制服丝袜国产精品| 99久久综合狠狠综合久久| 午夜av一区二区三区| 国产精品美女www爽爽爽| 在线成人高清不卡| 成人高清免费观看| 麻豆精品久久久| 一区二区三区电影在线播| 久久精品一级爱片| 欧美精品一卡二卡| 色哟哟精品一区| 国产成人精品三级| 老司机精品视频导航| 亚洲国产精品一区二区www| 久久综合中文字幕| 7777精品伊人久久久大香线蕉最新版| 豆国产96在线|亚洲| 蜜桃视频在线观看一区二区| 亚洲国产乱码最新视频| 国产精品久久久久久久久久免费看| 777xxx欧美| 欧美日韩国产首页在线观看| 91色porny| 成人国产亚洲欧美成人综合网| 精东粉嫩av免费一区二区三区| 婷婷开心久久网| 香港成人在线视频| 一区二区三区国产豹纹内裤在线| 中文字幕乱码亚洲精品一区| 亚洲精品在线一区二区| 精品国产91九色蝌蚪| 欧美一级午夜免费电影| 欧美日韩精品一区二区三区蜜桃 | 91精品国产色综合久久不卡电影| 91香蕉视频mp4| 色哟哟一区二区三区| 99精品黄色片免费大全| 99精品欧美一区二区蜜桃免费| 成人av在线电影| 北条麻妃国产九九精品视频| 北岛玲一区二区三区四区| 日韩免费高清视频| 日韩手机在线导航| 日韩午夜av一区| 国产亚洲一区字幕| 国产精品入口麻豆原神| 中文在线资源观看网站视频免费不卡| 久久久久久久网| 欧美激情在线免费观看| 国产精品乱码一区二区三区软件 | 日韩西西人体444www| 欧美mv日韩mv国产网站app| 亚洲精品一区二区三区香蕉| 国产夜色精品一区二区av| 国产日韩欧美精品在线| 亚洲欧洲无码一区二区三区| 夜夜亚洲天天久久| 免费成人深夜小野草| 岛国精品一区二区| 91久久免费观看| 欧美一级专区免费大片| 久久精品视频一区二区三区| 亚洲欧美综合另类在线卡通| 亚洲国产一二三| 国产一区二区精品久久99| 成人在线综合网站| 欧美日韩卡一卡二| 久久久欧美精品sm网站| 亚洲免费观看高清完整| 日韩av电影免费观看高清完整版| 国产精品911| 在线精品视频免费观看| 日韩亚洲欧美中文三级| 一区视频在线播放| 六月丁香婷婷久久| 97se狠狠狠综合亚洲狠狠| 日韩三级.com| 有码一区二区三区| 国产精品资源网| 欧美久久高跟鞋激| 亚洲色图都市小说|