?? parser.c
字號:
pg->actionerr = NULL;
(*ap->func) (pg, ap->args); /* Call action. */
if (ptab->PL [rule] & PL_DELTREE) /* Delete the subtree? */
{
#if DEBUG
if(debug)cfprintf("Parse: Prune Tree at rule:%d\n",rule);
#endif
*(ptab->NS) = 0; /* Reset np to 0. */
PruneAstTree(pg, np, 0, 0); /* Get rid of all nodes including root */
pg->ROOT = np; /* will be next node allocated */
}
}
}/* END attach() */
/* NESTED SUBROUTINE */
static int procrules()
{
/* Attach all currently known rules to the ast */
ptab->rule = rule;
for (r = ptab->RStop; r < ptab->Rs; )
{
rule = *r++;
if(rule < 0)
{/* Shift occured */
*(++(ptab->NS)) = (AstP)0; /* Set node pointer to zero. */
*(++(ptab->LS)) = 0;
lexindx -= 2; /* lex symbol is one token closer */
}
else
{
attach (); /* Attach node to AST. */
if(pg->actionerr) {
#if DEBUG
if(debug)cfprintf("PARSE action error exit\n");
#endif
return 1;
}
}
}
ptab->Rs = ptab->RStop; /* Reset reduction stk ptr. */
#if HOWBIG
if((ptab->NS - ptab->NStop) > maxNS_parse)
maxNS_parse = ptab->NS - ptab->NStop;
if((ptab->LS - ptab->LStop) > maxLS_parse)
maxLS_parse = ptab->LS - ptab->LStop;
#endif
return 0;
} /* END: procrules */
/* do_parse */
pg = ptab->pg;
SS = ptab->SS;
state = ptab->state;
token = ptab->token;
link = ptab->link;
xx = ptab->xx;
debug = pg->debug & 1;
lexindx = 4;
#if DEBUG
if(debug)cfprintf("ParseSTART: state=%d token=%d\n", state, token);
#endif
Scan:
if(token < 0)
{
if(procrules())
return -1;
/* get new token */
ptab->state = state;
ptab->SS = SS;
ptab->xx = xx;
return(0);
}
/* Test for Shift, and Shift-Reduce */
base = state * ptab->bitwords;
if(BITSET(ptab->M_bits, base, token))
{
x = ptab->MT_tran[ ptab->MT_beg[state] + ptab->token];
*++(SS) = state; /* Put state on parse stack.*/
#if HOWBIG
if((SS - ptab->SStop) > maxSS_parse)
maxSS_parse = SS - ptab->SStop;
#endif
if (SS >= ptab->SSmax) /* If parse stack too large.*/
{
cfprintf("Parse: stack overflow \n");
OXPORT_crash("");
}
*ptab->Rs++ = -1; /* Mark reduction stack as shifted */
token = -1; /* Indicate token consumed */
if(x > 0)
{
state = x;
#if DEBUG
if(debug)cfprintf("Parse: shift to state %d\n", state);
#endif
goto Scan; /* Shift only */
}
/* --- REDUCE -----------------------*/
Neg: rule = -x; /* Make positive. */
Reduce:
rulesize = ptab->PL [rule] & 0x000f;
if(rulesize == 15)
rulesize = -1;
SS -= rulesize;
if(rulesize == -1)
{
*SS = state; /* Stack current state. */
}
stacktop = *SS;
*ptab->Rs++ = rule;
#if HOWBIG
if((ptab->Rs - ptab->RStop) > maxRS_parse)
maxRS_parse = ptab->Rs - ptab->RStop;
#endif
#if DEBUG
if(debug)cfprintf("Parse: stack rule %d, new state is %d head=%d\n",
rule, stacktop, ptab->Head[rule]);
#endif
/* Check for goal */
if(rule == 0)
{
if(procrules())
return -1;
pg->root = pg->ROOT;
#if DEBUG
if(debug)cfprintf("PARSE DONE rootnode=%x\n", pg->root);
#endif
return 1;
}
/* Check non terminal transitions for current state */
base = stacktop * ptab->bitwords;
if(BITSET(ptab->M_bits, base, ptab->Head[rule]))
{
x = ptab->MN_tran[ptab->MN_beg[stacktop]+(ptab->Head[rule]-ptab->n_terms)];
if(x > 0)
{
state = x;
#if DEBUG
if(debug)cfprintf("Parse: NT trans to state %d\n", state);
#endif
goto Scan;
}
#if DEBUG
if(debug)cfprintf("Parse: NT reduce to rule %d, state=%d\n",-x, stacktop);
#endif
goto Neg;
}
#if DEBUG
if(debug)cfprintf("Parse: NO nt tran for state %d\n",stacktop);
#endif
goto Scan;
}/* END of shift/reduce test */
/* Check for pure reductions */
#if DEBUG
if(debug)cfprintf("Parse: check reductions for state %d\n", state);
#endif
rule = ptab->D_red [state];
if(rule >= 0) {
#if DEBUG
if(debug)cfprintf("Parse: use default reduction to rule %d\n", rule);
#endif
goto Reduce; /* Default reduction */
}
if (rule == -32767)
{
ptab->state = state;
ptab->token = token;
ptab->SS = SS;
ptab->xx = xx;
#if DEBUG
if(debug)cfprintf("PARSE error exit\n");
#endif
return(-1);
}
/* Check multiple reductions */
#if DEBUG
if(debug)cfprintf("Parse: check multiple reductions in state %d\n", state);
#endif
for (i = ptab->R_start [state]; i < ptab->R_start [state+1]; i++)
{
if (ptab->R_symbol [i] == token) /* Found? */
{
rule = ptab->R_prod [i];
#if DEBUG
if(debug)cfprintf("Parse: multiple reduction to rule %d token=%d\n", rule, token);
#endif
goto Reduce;
}
}
rule = -rule; /* Multiple default. */
#if DEBUG
if(debug)cfprintf("Parse: use multiple default rule %d\n", rule);
#endif
goto Reduce;
}/* END OF do_parse() */
/* THIS LEXER USES PARSER STYLE TABLES [Improve me NDC] */
static void
new_chunk(PG *pg, char tok)
{
long *tempbase;
int chunksize;
if(pg->symbase == pg->chunkbase+4)
{/* The whole current chunk is worthless (monster symbol spans a chunk) */
tempbase = *(long**)pg->chunkbase; /* pointer to prev chunk */
freeC(pg->category, pg->chunkbase);
pg->chunkbase = (char *)tempbase;
}
/* Compute size of new chunk */
if(pg->symspot >= TEXTCHUNK-5)
chunksize = pg->symspot+TEXTCHUNK+5;
else chunksize = TEXTCHUNK;
tempbase = mallocC(pg->category, chunksize);
*tempbase = (long)pg->chunkbase; /* pointer to prev chunk */
pg->chunkbase = (char*)tempbase;
pg->chunkend = pg->chunkbase+chunksize-1;
memcpy(pg->chunkbase+4, pg->symbase, pg->symspot);
pg->symbase = pg->chunkbase+4;
pg->symend = pg->symbase+pg->symspot;
*pg->symend++ = tok;
*pg->symend = 0;
((char*)&pg->symhash)[pg->symspot++ & 3] ^= tok; /* running hash */
}
static inline void
add_token(PG *pg, char tok)
{
if(pg->symend < pg->chunkend) {
*pg->symend++ = tok;
*pg->symend = 0;
((char*)&pg->symhash)[pg->symspot++ & 3] ^= tok; /* running hash */
}
else new_chunk(pg, tok);
}
static int
do_lex (LTABLE *ltab)
{
short i, x;
short rule;
unsigned base;
short state;
int token;
short stacktop;
short *SS;
PG *pg;
int rulesize;
int debug;
pg = ltab->pg;
SS = ltab->SS;
state = ltab->state;
token = ltab->token;
debug = pg->debug & 2;
#if DEBUG
if(debug)cfprintf("LexSTART: char %c (0x%x)\n", token, token);
#endif
Scan:
if(token < 0)
{/* get new token */
ltab->state = state;
ltab->token = token;
ltab->SS = SS;
#if DEBUG
if(debug)cfprintf("Lex: return for next char\n");
#endif
return(0);
}
/* Test for Shift, and Shift-Reduce */
base = state * ltab->bitwords;
if(BITSET(ltab->M_bits, base, token))
{
x = ltab->MT_tran[ ltab->MT_beg[state] + ltab->token];
*++(SS) = state; /* Put state on parse stack.*/
#if 0
#if HOWBIG
if((SS - ltab->SStop) > maxSS_lex)
maxSS_lex = SS - ltab->SStop;
#endif
if (SS >= ltab->SSmax) /* If parse stack too large.*/
{
cfprintf("Lex: stack overflow \n");
OXPORT_crash("");
}
#endif
/* Put char in textchunk */
add_token(pg, token);
#if DEBUG
if(debug)cfprintf("Lex consume char\n");
#endif
token = -1; /* Indicate token consumed */
if(x > 0)
{
state = x;
#if DEBUG
if(debug)cfprintf("Lex: shift to state %d\n", state);
#endif
goto Scan; /* Shift only */
}
/* --- REDUCE -----------------------*/
Neg: rule = -x; /* Make positive. */
Reduce:
rulesize = ltab->PL [rule] & 0x000f;
if(rulesize == 15)
rulesize = -1;
SS -= rulesize;
if(rulesize == -1)
{
*SS = state; /* Stack current state. */
}
stacktop = *SS;
#if DEBUG
if(debug)cfprintf("Lex: reduce to rule %d, new state is %d head=%d\n",
rule, stacktop, ltab->Head[rule]);
#endif
if(rule <= ltab->TM)
{
ltab->lextoken = ltab->LGT[rule];
state = 0;
SS = ltab->SStop;
if(ltab->lextoken > 0)
{
#if DEBUG
if(debug)cfprintf("Lex: add symbol %s\n", pg->symbase);
#endif
pg->L_stack[pg->LSP] = NewParserSymbol(pg, pg->symbase);
ltab->state = state;
ltab->SS = SS;
ltab->token = token;
ltab->rule = rule;
if(ltab->PL [rule] & PL_ACTION)
{
PACTIONS ap = &pg->ACTIONS[(ltab->PL[rule]>>7)&511];
(*ap->func) (pg, ap->args);
}
if(ltab->lextoken > 0) {
#if DEBUG
if(debug)cfprintf("Lex: exit with token=%d\n", ltab->lextoken);
#endif
pg->LSP = (pg->LSP+1) & 3;
pg->L_stack[pg->LSP] = ltab->lextoken;
pg->LSP = (pg->LSP+1) & 3;
return ltab->lextoken;
}
else
{/* Token was cancelled by action */
goto Scan;
}
}
else
{/* IGNORE rule */
#if DEBUG
if(debug)cfprintf("Lex: ignore rule %d\n", rule);
#endif
pg->symend = pg->symbase;
pg->symhash = 0;
pg->symspot = 0;
goto Scan;
}
}
/* Check non terminal transitions for current state */
base = stacktop * ltab->bitwords;
if(BITSET(ltab->M_bits, base, ltab->Head[rule]))
{
x = ltab->MN_tran[ltab->MN_beg[stacktop]+(ltab->Head[rule]-ltab->n_terms)];
if(x > 0)
{
state = x;
#if DEBUG
if(debug)cfprintf("Lex: NT trans to state %d\n", state);
#endif
goto Scan;
}
#if DEBUG
if(debug)cfprintf("Lex: NT reduce to rule %d, state=%d\n",-x, stacktop);
#endif
goto Neg;
}
#if DEBUG
if(debug)cfprintf("Lex: NO nt tran for state %d\n",stacktop);
#endif
goto Scan;
}/* END of shift/reduce test */
/* Check for pure reductions */
#if DEBUG
if(debug)cfprintf("Lex: check reductions for state %d\n", state);
#endif
rule = ltab->D_red [state];
if(rule >= 0) goto Reduce; /* Default reduction */
if (rule == -32767)
{
pg->symspot = 0;
pg->symhash = 0;
pg->symend = pg->symbase;
ltab->state = 0;
ltab->SS = ltab->SStop;
#if DEBUG
if(debug)cfprintf("Lex: return ERROR\n");
#endif
return(-1);
}
#if DEBUG
if(debug)cfprintf("Lex: Check multiple reductions in state %d\n", state);
#endif
/* Check multiple reductions */
for (i = ltab->R_start [state]; i < ltab->R_start [state+1]; i++)
{
if (ltab->R_symbol [i] == token) /* Found? */
{
rule = ltab->R_prod [i];
#if DEBUG
if(debug)cfprintf("Lex: multiple reduction to rule %d token=%d\n", rule, token);
#endif
goto Reduce;
}
}
rule = -rule; /* Multiple default. */
#if DEBUG
if(debug)cfprintf("Lex: use multiple default rule %d\n", rule);
#endif
goto Reduce;
}/* END OF do_lex() */
static short
read_object(void *obj, PG *pg)
{
if(pg->obj_inbufcnt <= 0) {
pg->obj_inbufsize =
?? 快捷鍵說明
復制代碼
Ctrl + C
搜索代碼
Ctrl + F
全屏模式
F11
切換主題
Ctrl + Shift + D
顯示快捷鍵
?
增大字號
Ctrl + =
減小字號
Ctrl + -