亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? parser.java

?? java 詞法分析器,用于一般的C,C++,VB,PS/SQL 語句的翻譯
?? JAVA
字號:
package fri.patterns.interpreter.parsergenerator;import java.util.*;import java.io.*;import fri.patterns.interpreter.parsergenerator.syntax.Rule;/**	The universal bottom-up parser algorithm. Runs with a Lexer (containing the input),	ParserTables (containing the syntax), and a Semantic (optional).	<pre>	private static final String [][] syntax =	{		{ "Start", "\"Hello\"", "\"World\"" },		{ Token.IGNORED, "`whitespaces`" },	};	SyntaxSeparation separation = new SyntaxSeparation(new Syntax(syntax));	LexerBuilder builder = new LexerBuilder(separation.getLexerSyntax(), separation.getIgnoredSymbols());	Lexer lexer = builder.getLexer();	lexer.setInput("\tHello \r\n\tWorld\n");	ParserTables parserTables = new SLRParserTables(separation.getParserSyntax());	Parser parser = new Parser(parserTables);	parser.parse(lexer, new PrintSemantic());	</pre>	TODO: implement error recovery: method recover()	@author (c) 2000, Fritz Ritzberger*/public class Parser implements Serializable{	private Lexer lexer;	private ParserTables tables;	private transient Semantic semantic;	protected Stack stateStack = new Stack();	protected Stack valueStack = new Stack();	protected Stack rangeStack = new Stack();	private transient Object result;	private transient List inputTokens;	private transient List rangeList;	private transient Token.Range range = new Token.Range(null, null);	private transient PrintStream out;	private boolean passExpectedToLexer = true;	// private boolean showConflicts;	private boolean DEBUG;		/**		Create a generic bottom-up Parser with passed ParserTables (representing the current syntax to apply).		@param tables ParserTables representing the syntax.	*/	public Parser(ParserTables tables)	{		this.tables = tables;	}		/** Returns the parsing result built from Semantic call return values. Retrievable after parsing. */	public Object getResult()	{		return result;	}	//	/** Sets if SHIFT/REDUCE or REDUCE/REDUCE conflicts wil be shown on System.err. *///	public void setShowConflicts(boolean showConflicts)	{//		this.showConflicts = showConflicts;//	}		/**		Sets the lexer to be used for parsing. The Lexer contains (or will contain) the input to parse.		The Parser calls <i>setTerminals()</i> on this call.	*/	public void setLexer(Lexer lexer)	{		boolean initLexer = (this.lexer != lexer);	// look if passed lexer needs terminals		this.lexer = lexer;		clear();	// clear if reused		if (initLexer)			lexer.setTerminals(getParserTables().getTerminals());	// pass terminals to lexer	}		/** Returns the lexer that was set to this parser, to call <i>setInput()</i> to the lexer. */	public Lexer getLexer()	{		return lexer;	}		/** Sets the input to contained lexer, or throws IllegalStateException if no lexer was set. */	public void setInput(Object input)		throws IOException	{		if (lexer == null)			throw new IllegalStateException("Can not set input when no lexer was defined!");		clear();	// clear if reused		lexer.setInput(input);	}	/** Sets the semantic to be applied to parsing results. */	public void setSemantic(Semantic semantic)	{		this.semantic = semantic;	}	/** Returns the semantic that was set to this parser. */	public Semantic getSemantic()	{		return semantic;	}	/** Returns current ParserTables. */	public ParserTables getParserTables()	{		return tables;	}	/** Default is true. When true, the Parser will pass a Map of expected symbols to Lexer at every token request. */	public void setPassExpectedToLexer(boolean passExpectedToLexer)	{		this.passExpectedToLexer = passExpectedToLexer;	}		// bottom-up state machine methods		private Integer top()	{		return (Integer) stateStack.peek();	}		private void push(Integer state, Object result, Token.Range range)	{		stateStack.push(state);		semanticPush(result, range);	}	private void pop(int pops)	{		inputTokens = new ArrayList();		rangeList = new ArrayList();				for (int i = 0; i < pops; i++)	{			stateStack.pop();			semanticPop(i, pops);		}	}	private void semanticPush(Object result, Token.Range range)	{		if (semantic != null)	{	// when a semantic is present			valueStack.push(result);	// we need to know parse result			rangeStack.push(range);	// and its start-end positions within input text		}	}	private void semanticPop(int popIndex, int countOfPops)	{		if (semantic != null)	{			// the value pop			inputTokens.add(0, valueStack.pop());						// the range pop			Token.Range currentRange = (Token.Range) rangeStack.pop();			rangeList.add(0, currentRange);						if (popIndex == 0)	// first pop of right side holds last token value				this.range = new Token.Range(null, currentRange.end);	// helper to remember end address							if (popIndex == countOfPops - 1)	// if it is the last pop, make a valid range for next push()				this.range = new Token.Range(currentRange.start, this.range.end);		}	}	/**		Reduce a rule when input satisfied it. Pop the stack n times, n is the number of right symbols of the rule.		Semantic gets called with all input tokens corresponding to the rule, if not null.		A new state gets pushed, determined by the new state (after pops) and the nonterminal of the rule (left side).	*/	protected void reduce(Integer ruleIndex)	{		if (DEBUG)			dump("reduce "+ruleIndex);		Rule rule = getParserTables().getSyntax().getRule(ruleIndex.intValue());		pop(rule.rightSize());	// pop count of elements on right side				semanticReduce(rule);				String nonterminal = rule.getNonterminal();		push(getParserTables().getGotoState(top(), nonterminal), result, range);				dumpStack();	}	private void semanticReduce(Rule rule)	{		if (semantic != null)	{			result = semantic.doSemantic(rule, inputTokens, rangeList);		}	}		/**		Push a new state upon state stack, determined by the GOTO table with current state		and the received token symbol. Then read a new token from Lexer, trying to evaluate a rule.	*/	protected Token shift(Token token)		throws IOException	{		if (DEBUG)			dump("shift from token symbol >"+token.symbol+"<");		push(getParserTables().getGotoState(top(), token.symbol), token.text, token.range);		dumpStack();				Token newToken = getNextToken();		if (DEBUG)			dump("next token "+newToken.symbol+" >"+newToken.text+"<");		return newToken;	}		/** Delivers the next token from lexer to parser. Override to convert the Token value. */	protected Token getNextToken()		throws IOException	{		Map expected = passExpectedToLexer && top().intValue() >= 0 ? getParserTables().getExpected(top()) : null;		Token token = lexer.getNextToken(expected);		return token;	}		// public parsing methods		/**		Parse the tokens returned from passed lexer. This call is for checking correctness without semantics.		@param lexer the Lexer, loaded with input to scan.		@return true when input was syntactically correct.	*/	public boolean parse(Lexer lexer)		throws IOException	{		setLexer(lexer);		return parse();	}		/**		Parse the tokens returned from passed lexer. This call is for processing input with semantics.		At least <i>setLexer()</i> must have been called before.		@param semantic the semantic to apply to parser results.		@return true when input was syntactically correct.	*/	public boolean parse(Semantic semantic)		throws IOException	{		if (lexer == null)			throw new IllegalStateException("No lexer was defined to scan input!");		setSemantic(semantic);		return parse();	}		/**		Parse the tokens returned from passed input.		At least <i>setLexer()</i> must have been called before.		@param input the input to parse, as File, InputStream, String, ....		@return true when input was syntactically correct.	*/	public boolean parse(Object input)		throws IOException	{		setInput(input);		return parse();	}		/**		Parse the tokens returned from passed lexer. This call is for integrating a semantic.		@param lexer Lexer containing the input to parse		@param semantic the semantic to apply to parser results.		@return true when input was syntactically correct.	*/	public boolean parse(Lexer lexer, Semantic semantic)		throws IOException	{		setLexer(lexer);		setSemantic(semantic);		return parse();	}		/**		Parse the tokens returned from passed lexer. This call is for integrating a semantic.		@param input the input to parse, as File, InputStream, String, ....		@param semantic the semantic to apply to parser results.		@return true when input was syntactically correct.	*/	public boolean parse(Object input, Semantic semantic)		throws IOException	{		setInput(input);		setSemantic(semantic);		return parse();	}			/**		Start parsing after setting Lexer and optionally Semantic. At least <i>setLexer()</i> must have been called before.		<p>		Init the parser, read first token, push state 0 and set action to SHIFT.		Loop while action is not ERROR or ACCEPT, and token symbol is not ERROR, and top of stack is not ERROR.		Within loop, get next action from PARSE-ACTION table using current state and token symbol.		When action greater than zero, call reduce(), else when action is SHIFT, call shift().		@return true when input was syntactically correct.	*/	public boolean parse()		throws IOException	{		stateStack.push(new Integer(0));	// push first state on stack		Integer action = ParserTables.SHIFT;	// some allowed initial value		Token token = getNextToken();	// start reading input		if (DEBUG)			dump("initial token symbol >"+token.symbol+"<, text >"+token.text+"<");				while (token.symbol != null &&	// lexer error				action.equals(ParserTables.ACCEPT) == false &&	// input accepted				action.equals(ParserTables.ERROR) == false &&	// parse-action table error				top().equals(ParserTables.ERROR) == false)	// goto table error		{			action = getParserTables().getParseAction(top(), token.symbol);						if (action.intValue() > 0)				reduce(action);			else			if (action.equals(ParserTables.SHIFT))				token = shift(token);							action = recover(action, token);	// recover if error		}				return detectError(token, top(), action);	}		/**		Recover from error. Not implemented.		@param action current action from PARSE-ACTION table.		@param token recently received Token.		@return action to proceed with. Token.symbol may not be null and current state may not be ERROR after this call.	*/	protected Integer recover(Integer action, Token token)	{		return action;	}			/**		Called after parse loop to determine if everything was OK.		@return true when action is ACCEPT, token.symbol is EPSILON, and state is not ERROR.	*/	protected boolean detectError(Token token, Integer state, Integer action)	{		boolean ret = true;				if (token.symbol == null || action.equals(ParserTables.ERROR))	{			if (token.symbol == null)				ensureOut().println("ERROR: Unknown symbol: >"+token.text+"<, state "+state);			else				ensureOut().println("ERROR: Wrong symbol: "+(Token.isEpsilon(token) ? "EOF" : token.symbol+", text: >"+token.text+"<")+", state "+state);			lexer.dump(out);			Map h = getParserTables().getExpected(state);			if (h != null)	{				ensureOut().print("Expected was (one of): ");								for (Iterator it = h.keySet().iterator(); it.hasNext(); )	{					String s = (String) it.next();					ensureOut().print((Token.isEpsilon(s) ? "EOF" : s)+(it.hasNext() ? ", " : ""));				}				ensureOut().println();			}			ret = false;		}		else		if (state.equals(ParserTables.ERROR))	{	// ERROR lies on stack, from SHIFT			pop(1);			ensureOut().println("ERROR: found no possible follow state for "+top()+", text >"+token.text+"<");			lexer.dump(out);			ret = false;		}		else		if (Token.isEpsilon(token) == false)	{			ensureOut().println("ERROR: Input is not finished.");			lexer.dump(out);			ret = false;		}		else		if (action.equals(ParserTables.ACCEPT) == false)	{			ensureOut().println("ERROR: Could not achieve ACCEPT. Symbol: "+token.symbol);			lexer.dump(out);			ret = false;		}		if (ret == false)			result = null;				return ret;	}	private void clear()	{		stateStack.removeAllElements();		valueStack.removeAllElements();		rangeStack.removeAllElements();		range = new Token.Range(null, null);		inputTokens = null;		result = null;		if (lexer != null)			lexer.clear();	}	private void dumpStack()	{		if (DEBUG)	{			ensureOut().print("stack: ");			for (int i = 0; i < stateStack.size(); i++)				ensureOut().print(stateStack.elementAt(i)+" ");			ensureOut().println();		}	}	private void dump(String s)	{		ensureOut().println(s);	}		private PrintStream ensureOut()	{		if (out == null)			out = System.err;		return out;	}		/** Debug output will go to passed stream. */	public void setPrintStream(PrintStream out)	{		this.out = (out != null) ? out : System.err;	}		/** Set the debug mode. */	public void setDebug(boolean debug)	{		DEBUG = debug;	}	}

?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
激情欧美一区二区三区在线观看| 亚洲gay无套男同| 欧美一级片在线看| 精品污污网站免费看| 欧美亚州韩日在线看免费版国语版| 成人午夜视频免费看| 成人网在线播放| 成人国产精品免费观看视频| 成人免费视频国产在线观看| 国产 欧美在线| av爱爱亚洲一区| 欧日韩精品视频| 欧美视频一区二区| 日韩你懂的在线播放| 日韩小视频在线观看专区| 欧美一级黄色录像| 日韩欧美在线不卡| 国产欧美日产一区| 国产精品福利影院| 亚洲综合一二三区| 秋霞成人午夜伦在线观看| 久久国产精品露脸对白| 粉嫩久久99精品久久久久久夜| 成人听书哪个软件好| 在线观看一区二区精品视频| 5858s免费视频成人| www激情久久| 亚洲精品成人在线| 蜜臀av一区二区三区| 国产精品18久久久久久vr| 99在线精品一区二区三区| 欧美欧美欧美欧美| 久久久久久一二三区| 亚洲色欲色欲www| 日本vs亚洲vs韩国一区三区二区| 国产超碰在线一区| 欧美日韩一区二区三区四区| 日韩三级伦理片妻子的秘密按摩| 欧美激情一区二区三区不卡| 午夜欧美在线一二页| 国模冰冰炮一区二区| 色婷婷国产精品久久包臀 | 国产一区二区在线看| av亚洲精华国产精华精华| 欧美久久久久久蜜桃| 中文字幕乱码久久午夜不卡| 亚洲成人一二三| 成人国产免费视频| xfplay精品久久| 亚洲成av人片在线观看| aaa欧美色吧激情视频| 日韩欧美二区三区| 亚洲v日本v欧美v久久精品| 国产一区二区美女| 91精品国产入口| 亚洲精品中文字幕乱码三区| 国产精品一品二品| 4438x亚洲最大成人网| 亚洲免费在线视频一区 二区| 国产乱理伦片在线观看夜一区| 欧美午夜一区二区三区| 中文字幕亚洲一区二区va在线| 久久国产精品无码网站| 欧美一区二区三区喷汁尤物| 伊人婷婷欧美激情| 成人高清视频在线| 国产欧美日韩亚州综合| 精品一区免费av| 欧美一区二区三区公司| 亚洲va中文字幕| 欧美午夜片在线看| 亚洲第一福利视频在线| 欧美午夜精品久久久久久孕妇| 最近日韩中文字幕| 99久久国产综合精品麻豆| 国产欧美一区二区三区鸳鸯浴 | 亚洲不卡一区二区三区| 91麻豆国产福利精品| 综合久久给合久久狠狠狠97色| 丁香天五香天堂综合| 久久九九久精品国产免费直播| 韩国理伦片一区二区三区在线播放| 日韩欧美久久久| 另类小说欧美激情| 久久综合九色综合欧美98| 久久av资源网| 久久精品视频在线看| fc2成人免费人成在线观看播放| 欧美高清在线视频| 91在线视频播放| 亚洲高清在线视频| 91精品中文字幕一区二区三区| 人人精品人人爱| 久久先锋影音av| 91丨九色丨蝌蚪富婆spa| 亚洲精品国产一区二区精华液| 欧美熟乱第一页| 久久疯狂做爰流白浆xx| 国产欧美一区二区精品忘忧草 | 日韩在线一区二区三区| 3d动漫精品啪啪一区二区竹菊| 蜜桃av一区二区| 国产欧美日韩精品在线| 一本一本大道香蕉久在线精品 | 欧美一区日本一区韩国一区| 免费高清在线视频一区·| 久久久精品欧美丰满| 久久久不卡网国产精品一区| 风间由美中文字幕在线看视频国产欧美| 欧美国产综合一区二区| 欧美色图片你懂的| 国产盗摄精品一区二区三区在线 | 在线观看一区日韩| 久久国产尿小便嘘嘘尿| 中文字幕欧美一| 宅男噜噜噜66一区二区66| 粉嫩嫩av羞羞动漫久久久| 亚洲18色成人| 亚洲欧美综合在线精品| 欧美成人一级视频| 欧美性猛片xxxx免费看久爱| 久久69国产一区二区蜜臀| 亚洲柠檬福利资源导航| 日韩色在线观看| 欧美中文字幕不卡| 成人午夜电影久久影院| 日日夜夜精品免费视频| 国产精品美女久久久久aⅴ| 91精品婷婷国产综合久久| av电影天堂一区二区在线观看| 美女精品自拍一二三四| 一区二区三区在线高清| 国产精品素人视频| 国产喂奶挤奶一区二区三区| 欧美剧情片在线观看| 91论坛在线播放| jizz一区二区| 成人夜色视频网站在线观看| 国产一区在线视频| 午夜a成v人精品| 亚洲视频电影在线| 国产精品人人做人人爽人人添| 精品粉嫩超白一线天av| 91精品国产综合久久精品| 色综合久久久久综合体| av一本久道久久综合久久鬼色| 高清不卡在线观看| 国产精品一区二区三区乱码 | 精品福利av导航| 日韩欧美色综合网站| 日韩一区二区电影在线| 91精品国产综合久久蜜臀| 精品视频一区二区三区免费| 99精品久久久久久| 91女人视频在线观看| 99久久精品免费看| 99国产精品视频免费观看| k8久久久一区二区三区| 99re这里只有精品视频首页| 丰满少妇久久久久久久| 成人黄色777网| 91亚洲大成网污www| 在线观看免费一区| 欧美探花视频资源| 日韩一区二区三区高清免费看看| 日韩欧美国产综合| 久久亚洲一区二区三区明星换脸| 久久综合999| 国产精品欧美一区喷水| 亚洲人成网站色在线观看| 一区二区三区四区精品在线视频 | 精品久久久久久综合日本欧美| 日韩精品一区二区三区视频播放 | 99re热这里只有精品免费视频| 92国产精品观看| 欧美日韩一二三区| 欧美va天堂va视频va在线| 久久久久久电影| 亚洲人精品午夜| 日韩av成人高清| 成人午夜视频在线观看| 欧美性做爰猛烈叫床潮| 精品国产髙清在线看国产毛片| 国产性天天综合网| 一区二区三区中文免费| 日韩av在线播放中文字幕| 高清成人免费视频| 欧美日高清视频| 欧美激情一区在线观看| 亚洲综合久久av| 国产精品亚洲综合一区在线观看| 色婷婷av一区二区三区大白胸| 欧美一区二区在线播放| 国产精品青草久久| 男女激情视频一区| 99精品视频中文字幕| 精品国产电影一区二区| 亚洲一区二区av电影| 国产一区二区福利| 欧美日韩精品一区二区三区四区 |