亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? dependencydecoder.java

?? MSTParser是以最大生成樹理論為基礎的判別式依存句法分析器。它將一科依存樹的得分看作是 所有依存關系的得分的總和
?? JAVA
字號:
package mstparser;import java.io.*;import java.util.ArrayList;import java.util.Arrays;import java.util.StringTokenizer;import gnu.trove.*;public class DependencyDecoder {    DependencyPipe pipe;        public DependencyDecoder(DependencyPipe pipe) {	this.pipe = pipe;    }    protected int[][] getTypes(double[][][][] nt_probs, int len) {	int[][] static_types = new int[len][len];	for(int i = 0; i < len; i++) {	    for(int j = 0; j < len; j++) {		if(i == j) {static_types[i][j] = 0; continue; }		int wh = -1;		double best = Double.NEGATIVE_INFINITY;		for(int t = 0; t < pipe.types.length; t++) {		    double score = 0.0;		    if(i < j)			score = nt_probs[i][t][0][1] + nt_probs[j][t][0][0];		    else			score = nt_probs[i][t][1][1] + nt_probs[j][t][1][0];		    		    if(score > best) { wh = t; best = score; }		}		static_types[i][j] = wh;	    }	}	return static_types;    }    // static type for each edge: run time O(n^3 + Tn^2) T is number of types    public Object[][] decodeProjective(DependencyInstance inst,				       FeatureVector[][][] fvs,				       double[][][] probs,				       FeatureVector[][][][] nt_fvs,				       double[][][][] nt_probs, int K) {	String[] toks = inst.sentence;	String[] pos = inst.pos;	int[][] static_types = null;	if(pipe.labeled) {	    static_types = getTypes(nt_probs,toks.length);	}	KBestParseForest pf = new KBestParseForest(0,toks.length-1,inst,K);			for(int s = 0; s < toks.length; s++) {	    pf.add(s,-1,0,0.0,new FeatureVector(-1,-1.0,null));	    pf.add(s,-1,1,0.0,new FeatureVector(-1,-1.0,null));	}				for(int j = 1; j < toks.length; j++) {	    for(int s = 0; s < toks.length && s+j < toks.length; s++) {		int t = s+j;						FeatureVector prodFV_st = fvs[s][t][0];		FeatureVector prodFV_ts = fvs[s][t][1];						double prodProb_st = probs[s][t][0];		double prodProb_ts = probs[s][t][1];						int type1 = pipe.labeled ? static_types[s][t] : 0;		int type2 = pipe.labeled ? static_types[t][s] : 0;				FeatureVector nt_fv_s_01 = nt_fvs[s][type1][0][1];		FeatureVector nt_fv_s_10 = nt_fvs[s][type2][1][0];		FeatureVector nt_fv_t_00 = nt_fvs[t][type1][0][0];		FeatureVector nt_fv_t_11 = nt_fvs[t][type2][1][1];		double nt_prob_s_01 = nt_probs[s][type1][0][1];		double nt_prob_s_10 = nt_probs[s][type2][1][0];		double nt_prob_t_00 = nt_probs[t][type1][0][0];		double nt_prob_t_11 = nt_probs[t][type2][1][1];							double prodProb = 0.0;						for(int r = s; r <= t; r++) {							    /** first is direction, second is complete*/		    /** _s means s is the parent*/		    if(r != t) {			ParseForestItem[] b1 = pf.getItems(s,r,0,0);			ParseForestItem[] c1 = pf.getItems(r+1,t,1,0);									if(b1 != null && c1 != null) {			    int[][] pairs = pf.getKBestPairs(b1,c1);			    for(int k = 0; k < pairs.length; k++) {												if(pairs[k][0] == -1 || pairs[k][1] == -1)				    break;												int comp1 = pairs[k][0]; int comp2 = pairs[k][1];												double bc = b1[comp1].prob+c1[comp2].prob;												double prob_fin = bc+prodProb_st;				FeatureVector fv_fin = prodFV_st;				if(pipe.labeled) {				    fv_fin = FeatureVector.cat(nt_fv_s_01,FeatureVector.cat(nt_fv_t_00,fv_fin));				    prob_fin += nt_prob_s_01+nt_prob_t_00;				}				pf.add(s,r,t,type1,0,1,prob_fin,fv_fin,b1[comp1],c1[comp2]);												prob_fin = bc+prodProb_ts;				fv_fin = prodFV_ts;				if(pipe.labeled) {				    fv_fin = FeatureVector.cat(nt_fv_t_11,FeatureVector.cat(nt_fv_s_10,fv_fin));				    prob_fin += nt_prob_t_11+nt_prob_s_10;				}				pf.add(s,r,t,type2,1,1,prob_fin,fv_fin,b1[comp1],c1[comp2]);			    }			}								    }							}										for(int r = s; r <= t; r++) {							    if(r != s) {			ParseForestItem[] b1 = pf.getItems(s,r,0,1);			ParseForestItem[] c1 = pf.getItems(r,t,0,0);			if(b1 != null && c1 != null) {			    int[][] pairs = pf.getKBestPairs(b1,c1);			    for(int k = 0; k < pairs.length; k++) {													if(pairs[k][0] == -1 || pairs[k][1] == -1)				    break;													int comp1 = pairs[k][0]; int comp2 = pairs[k][1];													double bc = b1[comp1].prob+c1[comp2].prob;													if(!pf.add(s,r,t,-1,0,0,bc,					   new FeatureVector(-1,-1.0,null),					   b1[comp1],c1[comp2]))				    break;			    }			}		    }								    if(r != t) {			ParseForestItem[] b1 = pf.getItems(s,r,1,0);			ParseForestItem[] c1 = pf.getItems(r,t,1,1);			if(b1 != null && c1 != null) {			    int[][] pairs = pf.getKBestPairs(b1,c1);			    for(int k = 0; k < pairs.length; k++) {													if(pairs[k][0] == -1 || pairs[k][1] == -1)				    break;													int comp1 = pairs[k][0]; int comp2 = pairs[k][1];													double bc = b1[comp1].prob+c1[comp2].prob;													if(!pf.add(s,r,t,-1,1,0,bc,					   new FeatureVector(-1,-1.0,null),b1[comp1],c1[comp2]))				    break;			    }			}		    }		}	    }					}			return pf.getBestParses();    }    public Object[][] decodeNonProjective(DependencyInstance inst,					  FeatureVector[][][] fvs,					  double[][][] probs,					  FeatureVector[][][][] nt_fvs,					  double[][][][] nt_probs, int K) {	String[] pos = inst.pos;			int numWords = inst.sentence.length;	int[][] oldI = new int[numWords][numWords];	int[][] oldO = new int[numWords][numWords];	double[][] scoreMatrix = new double[numWords][numWords];	double[][] orig_scoreMatrix = new double[numWords][numWords];	boolean[] curr_nodes = new boolean[numWords];	TIntIntHashMap[] reps = new TIntIntHashMap[numWords];	int[][] static_types = null;	if(pipe.labeled) {	    static_types = getTypes(nt_probs,pos.length);	}	for(int i = 0; i < numWords; i++) {	    curr_nodes[i] = true;	    reps[i] = new TIntIntHashMap();	    reps[i].put(i,0);	    for(int j = 0; j < numWords; j++) {		// score of edge (i,j) i --> j		scoreMatrix[i][j] = probs[i < j ? i : j][i < j ? j : i][i < j ? 0 : 1]		    + (pipe.labeled ? nt_probs[i][static_types[i][j]][i < j ? 0 : 1][1]		       + nt_probs[j][static_types[i][j]][i < j ? 0 : 1][0]		       : 0.0);		orig_scoreMatrix[i][j] = probs[i < j ? i : j][i < j ? j : i][i < j ? 0 : 1]		    + (pipe.labeled ? nt_probs[i][static_types[i][j]][i < j ? 0 : 1][1]		       + nt_probs[j][static_types[i][j]][i < j ? 0 : 1][0]		       : 0.0);		oldI[i][j] = i;		oldO[i][j] = j;		if(i == j || j == 0) continue; // no self loops of i --> 0	    }	}	TIntIntHashMap final_edges = chuLiuEdmonds(scoreMatrix,curr_nodes,oldI,oldO,false,new TIntIntHashMap(),reps);	int[] par = new int[numWords];	int[] ns = final_edges.keys();	for(int i = 0; i < ns.length; i++) {	    int ch = ns[i]; int pr = final_edges.get(ns[i]);	    par[ch] = pr;	}	int[] n_par = getKChanges(par,orig_scoreMatrix,Math.min(K,par.length));	int new_k = 1;	for(int i = 0; i < n_par.length; i++)	    if(n_par[i] > -1) new_k++;	// Create Feature Vectors;	int[][] fin_par = new int[new_k][numWords];	FeatureVector[][] fin_fv = new FeatureVector[new_k][numWords];	fin_par[0] = par;	int c = 1;	for(int i = 0; i < n_par.length; i++) {	    if(n_par[i] > -1) {		int[] t_par = new int[par.length];		for(int j = 0; j < t_par.length; j++)		    t_par[j] = par[j];		t_par[i] = n_par[i];		fin_par[c] = t_par;		c++;	    }	}	for(int k = 0; k < fin_par.length; k++) {	    for(int i = 0; i < fin_par[k].length; i++) {		int ch = i; int pr = fin_par[k][i];		if(pr != -1) {		    fin_fv[k][ch] = fvs[ch < pr ? ch : pr][ch < pr ? pr : ch][ch < pr ? 1 : 0];		    if(pipe.labeled) {			fin_fv[k][ch] = FeatureVector.cat(fin_fv[k][ch],nt_fvs[ch][static_types[pr][ch]][ch < pr ? 1 : 0][0]);			fin_fv[k][ch] = FeatureVector.cat(fin_fv[k][ch],nt_fvs[pr][static_types[pr][ch]][ch < pr ? 1 : 0][1]);		    }		}		else		    fin_fv[k][ch] = new FeatureVector(-1,-1.0,null);	    }	}			FeatureVector[] fin = new FeatureVector[new_k];	String[] result = new String[new_k];	for(int k = 0; k < fin.length; k++) {	    fin[k] = new FeatureVector(-1,-1.0,null);	    for(int i = 1; i < fin_fv[k].length; i++)		fin[k] = FeatureVector.cat(fin_fv[k][i],fin[k]);	    result[k] = "";	    for(int i = 1; i < par.length; i++)		result[k] += fin_par[k][i]+"|"+i + (pipe.labeled ? ":"+static_types[fin_par[k][i]][i] : ":0") +" ";	}			// create d.	Object[][] d = new Object[new_k][2];	for(int k = 0; k < new_k; k++) {	    d[k][0] = fin[k];	    d[k][1] = result[k].trim();	}	return d;    }    private int[] getKChanges(int[] par, double[][] scoreMatrix, int K) {	int[] result = new int[par.length];	int[] n_par = new int[par.length];	double[] n_score = new double[par.length];	for(int i = 0; i < par.length; i++) {	    result[i] = -1;	    n_par[i] = -1;	    n_score[i] = Double.NEGATIVE_INFINITY;	}	boolean[][] isChild = calcChilds(par);	for(int i = 1; i < n_par.length; i++) {	    double max = Double.NEGATIVE_INFINITY;	    int wh = -1;	    for(int j = 0; j < n_par.length; j++) {		if(i == j || par[i] == j || isChild[i][j]) continue;		if(scoreMatrix[j][i] > max) { max = scoreMatrix[j][i]; wh = j; }	    }	    n_par[i] = wh;	    n_score[i] = max;	}	for(int k = 0; k < K; k++) {	    double max = Double.NEGATIVE_INFINITY;	    int wh = -1;	    int whI = -1;	    for(int i = 0; i < n_par.length; i++) {		if(n_par[i] == -1) continue;		double score = scoreMatrix[n_par[i]][i];		if(score > max) {		    max = score; whI = i; wh = n_par[i];		}	    }	    if(max == Double.NEGATIVE_INFINITY)		break;	    result[whI] = wh;	    n_par[whI] = -1;	}	return result;    }    private boolean[][] calcChilds(int[] par) {	boolean[][] isChild = new boolean[par.length][par.length];	for(int i = 1; i < par.length; i++) {	    int l = par[i];	    while(l != -1) {		isChild[l][i] = true;		l = par[l];	    }	}	return isChild;    }        private static TIntIntHashMap chuLiuEdmonds(double[][] scoreMatrix, boolean[] curr_nodes, 						int[][] oldI, int[][] oldO, boolean print,						TIntIntHashMap final_edges, TIntIntHashMap[] reps) {			// need to construct for each node list of nodes they represent (here only!)			int[] par = new int[curr_nodes.length];	int numWords = curr_nodes.length;			// create best graph	par[0] = -1;	for(int i = 1; i < par.length; i++) {	    // only interested in current nodes	    if(!curr_nodes[i]) continue;	    double maxScore = scoreMatrix[0][i];	    par[i] = 0;	    for(int j = 0; j < par.length; j++) {		if(j == i) continue;		if(!curr_nodes[j]) continue;		double newScore = scoreMatrix[j][i];		if(newScore > maxScore) {		    maxScore = newScore;		    par[i] = j;		}	    }	}			if(print) {	    System.out.println("After init");	    for(int i = 0; i < par.length; i++) {		if(curr_nodes[i])		    System.out.print(par[i] + "|" + i + " ");	    }	    System.out.println();	}			//Find a cycle	ArrayList cycles = new ArrayList();	boolean[] added = new boolean[numWords];	for(int i = 0; i < numWords && cycles.size() == 0; i++) {	    // if I have already considered this or	    // This is not a valid node (i.e. has been contracted)	    if(added[i] || !curr_nodes[i]) continue;	    added[i] = true;	    TIntIntHashMap cycle = new TIntIntHashMap();	    cycle.put(i,0);	    int l = i;	    while(true) {		if(par[l] == -1) {		    added[l] = true;		    break;		}		if(cycle.contains(par[l])) {		    cycle = new TIntIntHashMap();		    int lorg = par[l];		    cycle.put(lorg,par[lorg]);		    added[lorg] = true;		    int l1 = par[lorg];		    while(l1 != lorg) {			cycle.put(l1,par[l1]);			added[l1] = true;			l1 = par[l1];								    }		    cycles.add(cycle);		    break;		}		cycle.put(l,0);		l = par[l];		if(added[l] && !cycle.contains(l))		    break;		added[l] = true;	    }	}			// get all edges and return them	if(cycles.size() == 0) {	    //System.out.println("TREE:");	    for(int i = 0; i < par.length; i++) {		if(!curr_nodes[i]) continue;		if(par[i] != -1) {		    int pr = oldI[par[i]][i];		    int ch = oldO[par[i]][i];		    final_edges.put(ch,pr);		    //System.out.print(pr+"|"+ch + " ");		}		else		    final_edges.put(0,-1);	    }	    //System.out.println();	    return final_edges;	}	int max_cyc = 0;	int wh_cyc = 0;	for(int i = 0; i < cycles.size(); i++) {	    TIntIntHashMap cycle = (TIntIntHashMap)cycles.get(i);	    if(cycle.size() > max_cyc) { max_cyc = cycle.size(); wh_cyc = i; }	}			TIntIntHashMap cycle = (TIntIntHashMap)cycles.get(wh_cyc);	int[] cyc_nodes = cycle.keys();	int rep = cyc_nodes[0];			if(print) {	    System.out.println("Found Cycle");	    for(int i = 0; i < cyc_nodes.length; i++)		System.out.print(cyc_nodes[i] + " ");	    System.out.println();	}	double cyc_weight = 0.0;	for(int j = 0; j < cyc_nodes.length; j++) {	    cyc_weight += scoreMatrix[par[cyc_nodes[j]]][cyc_nodes[j]];	}					for(int i = 0; i < numWords; i++) {				    if(!curr_nodes[i] || cycle.contains(i)) continue;							    double max1 = Double.NEGATIVE_INFINITY;	    int wh1 = -1;	    double max2 = Double.NEGATIVE_INFINITY;	    int wh2 = -1;				    for(int j = 0; j < cyc_nodes.length; j++) {		int j1 = cyc_nodes[j];						if(scoreMatrix[j1][i] > max1) {		    max1 = scoreMatrix[j1][i];		    wh1 = j1;//oldI[j1][i];		}						// cycle weight + new edge - removal of old		double scr = cyc_weight + scoreMatrix[i][j1] - scoreMatrix[par[j1]][j1];		if(scr > max2) {		    max2 = scr;		    wh2 = j1;//oldO[i][j1];		}	    }				    scoreMatrix[rep][i] = max1;	    oldI[rep][i] = oldI[wh1][i];//wh1;	    oldO[rep][i] = oldO[wh1][i];//oldO[wh1][i];	    scoreMatrix[i][rep] = max2;	    oldO[i][rep] = oldO[i][wh2];//wh2;	    oldI[i][rep] = oldI[i][wh2];//oldI[i][wh2];				}			TIntIntHashMap[] rep_cons = new TIntIntHashMap[cyc_nodes.length];	for(int i = 0; i < cyc_nodes.length; i++) {	    rep_cons[i] = new TIntIntHashMap();	    int[] keys = reps[cyc_nodes[i]].keys();	    Arrays.sort(keys);	    if(print) System.out.print(cyc_nodes[i] + ": ");	    for(int j = 0; j < keys.length; j++) {		rep_cons[i].put(keys[j],0);		if(print) System.out.print(keys[j] + " ");	    }	    if(print) System.out.println();	}			// don't consider not representative nodes	// these nodes have been folded	for(int i = 1; i < cyc_nodes.length; i++) {	    curr_nodes[cyc_nodes[i]] = false;	    int[] keys = reps[cyc_nodes[i]].keys();	    for(int j = 0; j < keys.length; j++)		reps[rep].put(keys[j],0);	}			chuLiuEdmonds(scoreMatrix,curr_nodes,oldI,oldO,print,final_edges,reps);			// check each node in cycle, if one of its representatives	// is a key in the final_edges, it is the one.	int wh = -1;	boolean found = false;	for(int i = 0; i < rep_cons.length && !found; i++) {	    int[] keys = rep_cons[i].keys();	    for(int j = 0; j < keys.length && !found; j++) {		if(final_edges.contains(keys[j])) {		    wh = cyc_nodes[i];		    found = true;		}	    }	}			int l = par[wh];	while(l != wh) {	    int ch = oldO[par[l]][l];	    int pr = oldI[par[l]][l];	    final_edges.put(ch,pr);	    l = par[l];	}			if(print) {	    int[] keys = final_edges.keys();	    Arrays.sort(keys);	    for(int i = 0; i < keys.length; i++)		System.out.print(final_edges.get(keys[i])+"|"+keys[i]+" ");	    System.out.println();	}			return final_edges;		    }	    }

?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
aaa国产一区| 精品福利一区二区三区 | 丁香婷婷综合色啪| 欧美午夜视频网站| 国产精品三级电影| 美国av一区二区| 欧美视频日韩视频在线观看| 国产精品欧美一级免费| 日韩电影网1区2区| 欧美日韩精品一区视频| 国产精品久久久久久久久动漫| 日本伊人色综合网| 欧美日韩精品欧美日韩精品一| 国产精品乱码一区二区三区软件| 国内精品伊人久久久久av一坑| 欧美日韩电影一区| 亚洲成人久久影院| 欧日韩精品视频| 亚洲女人的天堂| av电影在线观看不卡| 国产日韩欧美精品在线| 国内外成人在线视频| 欧美videofree性高清杂交| 肉色丝袜一区二区| 欧美性极品少妇| 一区二区三区四区蜜桃| 91国偷自产一区二区三区成为亚洲经典| 欧美韩国日本综合| 成人av动漫网站| 1000部国产精品成人观看| 顶级嫩模精品视频在线看| 久久久国产精品麻豆| 成人午夜精品在线| 日韩码欧中文字| 欧美亚洲禁片免费| 日韩电影一区二区三区四区| 欧美国产成人精品| 波多野结衣中文一区| 国产精品色哟哟网站| 99精品视频一区| 亚洲韩国一区二区三区| 欧美一区二区三区在| 久久精品国产久精国产爱| 久久影视一区二区| 国产91清纯白嫩初高中在线观看 | 国产精品91xxx| 亚洲国产精品99久久久久久久久 | 国产剧情在线观看一区二区| 久久女同精品一区二区| 国产成人精品1024| 亚洲蜜桃精久久久久久久| 欧美色图激情小说| 另类的小说在线视频另类成人小视频在线 | 99久久久久久| 亚洲一线二线三线久久久| 91精品国产91久久综合桃花| 久久精品国产色蜜蜜麻豆| 中文字幕国产精品一区二区| 一本色道**综合亚洲精品蜜桃冫| 日韩福利电影在线观看| 久久精品一区二区| 色综合天天综合| 日韩精品久久久久久| 日本一区二区三区免费乱视频 | 亚洲欧洲精品一区二区三区不卡| 欧美性视频一区二区三区| 九色综合狠狠综合久久| 中文字幕一区二区三区视频| 欧美人狂配大交3d怪物一区| 国产呦萝稀缺另类资源| 亚洲精品乱码久久久久久久久 | 国产成人99久久亚洲综合精品| 亚洲精品成人天堂一二三| 日韩免费观看2025年上映的电影| 国产精品123区| 日韩黄色免费电影| 自拍av一区二区三区| 精品少妇一区二区三区在线播放| 成人性生交大合| 美女爽到高潮91| 亚洲精品视频在线看| 欧美激情一区二区三区全黄| 欧美一级视频精品观看| 日本韩国欧美在线| 国产精品66部| 九色|91porny| 亚洲va韩国va欧美va精品| 亚洲日本丝袜连裤袜办公室| 久久麻豆一区二区| 欧美不卡视频一区| 欧美日韩国产另类一区| 色诱视频网站一区| 成人成人成人在线视频| 日本高清成人免费播放| 国产美女在线观看一区| 视频一区免费在线观看| 亚洲国产日韩在线一区模特| 国产精品不卡在线| 久久久久九九视频| 久久九九久精品国产免费直播| 91精品国产免费| 337p亚洲精品色噜噜噜| 欧美日韩中文另类| 欧美日精品一区视频| 91国产视频在线观看| 色婷婷亚洲综合| 91在线一区二区| 一本大道久久a久久综合婷婷| 成人免费视频免费观看| 国产成人亚洲综合a∨猫咪| 国产精品91xxx| 高清在线成人网| 成a人片亚洲日本久久| 成人av网址在线| 91视视频在线直接观看在线看网页在线看 | 免费三级欧美电影| 日韩精品三区四区| 美女免费视频一区| 九色porny丨国产精品| 国产精品自产自拍| 丰满亚洲少妇av| 色综合夜色一区| 色综合激情五月| 欧美色图一区二区三区| 制服丝袜亚洲网站| 欧美大片一区二区三区| 久久久精品综合| 亚洲欧美综合在线精品| 一区二区三区中文免费| 亚洲国产精品麻豆| 久久se精品一区精品二区| 国产乱一区二区| 色婷婷av一区二区三区大白胸| 久久久五月婷婷| 中文字幕精品—区二区四季| 亚洲欧美日韩一区| 午夜在线电影亚洲一区| 久久99久久久欧美国产| 成人精品国产免费网站| 欧美日韩一本到| 精品国产一区二区三区av性色| 国产精品欧美综合在线| 亚洲国产成人va在线观看天堂| 日韩电影在线一区| 国产成人精品一区二区三区四区| 9人人澡人人爽人人精品| 欧美中文字幕不卡| 亚洲精品在线观看网站| 日韩美女啊v在线免费观看| 日韩高清在线观看| 成人精品视频一区二区三区尤物| 欧美亚洲国产一区在线观看网站| 日韩欧美国产wwwww| 综合色天天鬼久久鬼色| 蜜桃视频在线观看一区二区| 成人一区二区三区在线观看| 欧美日韩不卡一区二区| 国产精品污www在线观看| 亚洲成人在线网站| 不卡视频免费播放| 日韩欧美在线不卡| 亚洲日本va在线观看| 日本精品视频一区二区三区| 欧美精品一区男女天堂| 亚洲激情一二三区| 高清免费成人av| 在线综合+亚洲+欧美中文字幕| 国产精品久久久久影院老司 | 日韩精品中文字幕在线不卡尤物| 椎名由奈av一区二区三区| 卡一卡二国产精品| 欧美三级在线看| 亚洲日本va在线观看| 成人午夜电影久久影院| 精品欧美黑人一区二区三区| 亚洲永久精品大片| 99re视频精品| 欧美国产1区2区| 国产伦精品一区二区三区视频青涩 | 成人一级视频在线观看| 日韩精品一区二区三区四区 | 欧美丰满少妇xxxbbb| 怡红院av一区二区三区| 99精品热视频| 中文字幕一区二区不卡| 国产一区在线精品| 日韩精品一区二区三区蜜臀| 肉色丝袜一区二区| 欧美日韩视频一区二区| 亚洲一区二区三区免费视频| 色综合亚洲欧洲| 亚洲天堂久久久久久久| 色综合色综合色综合色综合色综合| 日本一区二区三区久久久久久久久不| 免费久久99精品国产| 精品国免费一区二区三区| 久久国产麻豆精品| 久久精品夜夜夜夜久久| 成人综合婷婷国产精品久久蜜臀 | 一本到一区二区三区|