?? tinydocparser.java
字號(hào):
// $ANTLR : "TinyDoc.g" -> "TinyDocParser.java"$
package isis.anp.nesc;
import antlr.TokenBuffer;
import antlr.TokenStreamException;
import antlr.TokenStreamIOException;
import antlr.ANTLRException;
import antlr.LLkParser;
import antlr.Token;
import antlr.TokenStream;
import antlr.RecognitionException;
import antlr.NoViableAltException;
import antlr.MismatchedTokenException;
import antlr.SemanticException;
import antlr.ParserSharedInputState;
import antlr.collections.impl.BitSet;
import antlr.collections.AST;
import java.util.Hashtable;
import antlr.ASTFactory;
import antlr.ASTPair;
import antlr.collections.impl.ASTArray;
import isis.anp.nesc.tinydoc.Author;
import isis.anp.nesc.tinydoc.Description;
import isis.anp.nesc.tinydoc.Paragraph;
import isis.anp.nesc.tinydoc.Param;
import isis.anp.nesc.tinydoc.Returns;
import isis.anp.nesc.tinydoc.Section;
import isis.anp.nesc.tinydoc.See;
import isis.anp.nesc.tinydoc.TinyDoc;
import antlr.ASTFactory;
import antlr.ASTPair;
import antlr.CommonHiddenStreamToken;
import antlr.NoViableAltException;
import antlr.ParserSharedInputState;
import antlr.RecognitionException;
import antlr.Token;
import antlr.TokenBuffer;
import antlr.TokenStream;
import antlr.TokenStreamException;
import antlr.collections.AST;
import antlr.collections.impl.ASTArray;
import antlr.collections.impl.BitSet;
public class TinyDocParser extends antlr.LLkParser implements TinyDocParserTokenTypes
{
protected TinyDocParser(TokenBuffer tokenBuf, int k) {
super(tokenBuf,k);
tokenNames = _tokenNames;
buildTokenTypeASTClassMap();
astFactory = new ASTFactory(getTokenTypeToASTClassMap());
}
public TinyDocParser(TokenBuffer tokenBuf) {
this(tokenBuf,1);
}
protected TinyDocParser(TokenStream lexer, int k) {
super(lexer,k);
tokenNames = _tokenNames;
buildTokenTypeASTClassMap();
astFactory = new ASTFactory(getTokenTypeToASTClassMap());
}
public TinyDocParser(TokenStream lexer) {
this(lexer,1);
}
public TinyDocParser(ParserSharedInputState state) {
super(state,1);
tokenNames = _tokenNames;
buildTokenTypeASTClassMap();
astFactory = new ASTFactory(getTokenTypeToASTClassMap());
}
public final TinyDoc translationUnit() throws RecognitionException, TokenStreamException {
TinyDoc tdObj = new TinyDoc();
returnAST = null;
ASTPair currentAST = new ASTPair();
AST translationUnit_AST = null;
Section sectionObj = null;
try { // for error handling
AST tmp1_AST = null;
tmp1_AST = astFactory.create(LT(1));
astFactory.addASTChild(currentAST, tmp1_AST);
match(START);
{
if ((_tokenSet_0.member(LA(1)))) {
sectionObj=firstParagraph();
astFactory.addASTChild(currentAST, returnAST);
tdObj.add(sectionObj);
}
else if ((_tokenSet_1.member(LA(1)))) {
}
else {
throw new NoViableAltException(LT(1), getFilename());
}
}
{
_loop4:
do {
if ((_tokenSet_2.member(LA(1)))) {
sectionObj=labeledParagraph();
astFactory.addASTChild(currentAST, returnAST);
tdObj.add(sectionObj);
}
else {
break _loop4;
}
} while (true);
}
AST tmp2_AST = null;
tmp2_AST = astFactory.create(LT(1));
astFactory.addASTChild(currentAST, tmp2_AST);
match(END);
translationUnit_AST = (AST)currentAST.root;
}
catch (RecognitionException ex) {
reportError(ex);
recover(ex,_tokenSet_3);
}
returnAST = translationUnit_AST;
return tdObj;
}
public final Description firstParagraph() throws RecognitionException, TokenStreamException {
Description dObj = new Description();
returnAST = null;
ASTPair currentAST = new ASTPair();
AST firstParagraph_AST = null;
Paragraph pObj = null;
try { // for error handling
pObj=firstSentence();
astFactory.addASTChild(currentAST, returnAST);
dObj.setFirstSentence(pObj);
{
switch ( LA(1)) {
case ID:
case DOT:
{
pObj=paragraph();
astFactory.addASTChild(currentAST, returnAST);
dObj.setParagraph(pObj);
break;
}
case END:
case PARAM:
case RETURN:
case SEE:
case AUTHOR:
{
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
firstParagraph_AST = (AST)currentAST.root;
firstParagraph_AST = (AST)astFactory.make( (new ASTArray(2)).add(astFactory.create(FIRSTPARAGRAPH)).add(firstParagraph_AST));
currentAST.root = firstParagraph_AST;
currentAST.child = firstParagraph_AST!=null &&firstParagraph_AST.getFirstChild()!=null ?
firstParagraph_AST.getFirstChild() : firstParagraph_AST;
currentAST.advanceChildToEnd();
firstParagraph_AST = (AST)currentAST.root;
}
catch (RecognitionException ex) {
reportError(ex);
recover(ex,_tokenSet_1);
}
returnAST = firstParagraph_AST;
return dObj;
}
public final Section labeledParagraph() throws RecognitionException, TokenStreamException {
Section sObj = null;
returnAST = null;
ASTPair currentAST = new ASTPair();
AST labeledParagraph_AST = null;
Token id = null;
AST id_AST = null;
Paragraph pObj = null;
try { // for error handling
{
switch ( LA(1)) {
case PARAM:
{
AST tmp3_AST = null;
tmp3_AST = astFactory.create(LT(1));
astFactory.makeASTRoot(currentAST, tmp3_AST);
match(PARAM);
{
_loop17:
do {
if ((LA(1)==WS)) {
AST tmp4_AST = null;
tmp4_AST = astFactory.create(LT(1));
astFactory.addASTChild(currentAST, tmp4_AST);
match(WS);
}
else {
break _loop17;
}
} while (true);
}
id = LT(1);
id_AST = astFactory.create(id);
astFactory.addASTChild(currentAST, id_AST);
match(ID);
sObj = new Param();
((Param)sObj).setName(id.getText());
break;
}
case RETURN:
{
AST tmp5_AST = null;
tmp5_AST = astFactory.create(LT(1));
astFactory.makeASTRoot(currentAST, tmp5_AST);
match(RETURN);
sObj = new Returns();
break;
}
case SEE:
{
AST tmp6_AST = null;
tmp6_AST = astFactory.create(LT(1));
astFactory.makeASTRoot(currentAST, tmp6_AST);
match(SEE);
sObj = new See();
break;
}
case AUTHOR:
{
AST tmp7_AST = null;
tmp7_AST = astFactory.create(LT(1));
astFactory.makeASTRoot(currentAST, tmp7_AST);
match(AUTHOR);
sObj = new Author();
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
pObj=paragraph();
astFactory.addASTChild(currentAST, returnAST);
sObj.setParagraph(pObj);
labeledParagraph_AST = (AST)currentAST.root;
}
catch (RecognitionException ex) {
reportError(ex);
recover(ex,_tokenSet_1);
}
returnAST = labeledParagraph_AST;
return sObj;
}
public final Paragraph firstSentence() throws RecognitionException, TokenStreamException {
Paragraph pObj = new Paragraph();
returnAST = null;
ASTPair currentAST = new ASTPair();
AST firstSentence_AST = null;
Token id = null;
AST id_AST = null;
Token d = null;
AST d_AST = null;
try { // for error handling
{
_loop9:
do {
if ((LA(1)==ID)) {
id = LT(1);
id_AST = astFactory.create(id);
astFactory.addASTChild(currentAST, id_AST);
match(ID);
pObj.addHiddenAfter((CommonHiddenStreamToken)id);
}
else {
break _loop9;
}
} while (true);
}
{
if ((LA(1)==DOT)) {
d = LT(1);
d_AST = astFactory.create(d);
astFactory.addASTChild(currentAST, d_AST);
match(DOT);
pObj.addHiddenAfter((CommonHiddenStreamToken)d);
}
else if ((_tokenSet_0.member(LA(1)))) {
}
else {
throw new NoViableAltException(LT(1), getFilename());
}
}
firstSentence_AST = (AST)currentAST.root;
firstSentence_AST = (AST)astFactory.make( (new ASTArray(2)).add(astFactory.create(FIRSTSENTENCE)).add(firstSentence_AST));
currentAST.root = firstSentence_AST;
currentAST.child = firstSentence_AST!=null &&firstSentence_AST.getFirstChild()!=null ?
firstSentence_AST.getFirstChild() : firstSentence_AST;
currentAST.advanceChildToEnd();
firstSentence_AST = (AST)currentAST.root;
}
catch (RecognitionException ex) {
reportError(ex);
recover(ex,_tokenSet_0);
}
returnAST = firstSentence_AST;
return pObj;
}
public final Paragraph paragraph() throws RecognitionException, TokenStreamException {
Paragraph pObj = new Paragraph();
returnAST = null;
ASTPair currentAST = new ASTPair();
AST paragraph_AST = null;
Token d = null;
AST d_AST = null;
Token id = null;
AST id_AST = null;
try { // for error handling
{
int _cnt13=0;
_loop13:
do {
switch ( LA(1)) {
case DOT:
{
d = LT(1);
d_AST = astFactory.create(d);
astFactory.addASTChild(currentAST, d_AST);
match(DOT);
pObj.addHiddenAfter((CommonHiddenStreamToken)d);
break;
}
case ID:
{
id = LT(1);
id_AST = astFactory.create(id);
astFactory.addASTChild(currentAST, id_AST);
match(ID);
pObj.addHiddenAfter((CommonHiddenStreamToken)id);
break;
}
default:
{
if ( _cnt13>=1 ) { break _loop13; } else {throw new NoViableAltException(LT(1), getFilename());}
}
}
_cnt13++;
} while (true);
}
paragraph_AST = (AST)currentAST.root;
}
catch (RecognitionException ex) {
reportError(ex);
recover(ex,_tokenSet_1);
}
returnAST = paragraph_AST;
return pObj;
}
public final void dummy() throws RecognitionException, TokenStreamException {
returnAST = null;
ASTPair currentAST = new ASTPair();
AST dummy_AST = null;
try { // for error handling
switch ( LA(1)) {
case FIRSTSENTENCE:
{
AST tmp8_AST = null;
tmp8_AST = astFactory.create(LT(1));
astFactory.addASTChild(currentAST, tmp8_AST);
match(FIRSTSENTENCE);
dummy_AST = (AST)currentAST.root;
break;
}
case FIRSTPARAGRAPH:
{
AST tmp9_AST = null;
tmp9_AST = astFactory.create(LT(1));
astFactory.addASTChild(currentAST, tmp9_AST);
match(FIRSTPARAGRAPH);
dummy_AST = (AST)currentAST.root;
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
catch (RecognitionException ex) {
reportError(ex);
recover(ex,_tokenSet_3);
}
returnAST = dummy_AST;
}
public static final String[] _tokenNames = {
"<0>",
"EOF",
"<2>",
"NULL_TREE_LOOKAHEAD",
"START",
"END",
"ID",
"DOT",
"\"@param\"",
"WS",
"\"@return\"",
"\"@see\"",
"\"@author\"",
"FIRSTSENTENCE",
"FIRSTPARAGRAPH",
"NL"
};
protected void buildTokenTypeASTClassMap() {
tokenTypeToASTClassMap=null;
};
private static final long[] mk_tokenSet_0() {
long[] data = { 7648L, 0L};
return data;
}
public static final BitSet _tokenSet_0 = new BitSet(mk_tokenSet_0());
private static final long[] mk_tokenSet_1() {
long[] data = { 7456L, 0L};
return data;
}
public static final BitSet _tokenSet_1 = new BitSet(mk_tokenSet_1());
private static final long[] mk_tokenSet_2() {
long[] data = { 7424L, 0L};
return data;
}
public static final BitSet _tokenSet_2 = new BitSet(mk_tokenSet_2());
private static final long[] mk_tokenSet_3() {
long[] data = { 2L, 0L};
return data;
}
public static final BitSet _tokenSet_3 = new BitSet(mk_tokenSet_3());
}
?? 快捷鍵說(shuō)明
復(fù)制代碼
Ctrl + C
搜索代碼
Ctrl + F
全屏模式
F11
切換主題
Ctrl + Shift + D
顯示快捷鍵
?
增大字號(hào)
Ctrl + =
減小字號(hào)
Ctrl + -