gem5/ext/ply/example/ansic/clex.py
Nathan Binkert 44ebb8d3e2 Update to ply 2.3
ext/ply/ply/lex.py:
ext/ply/ply/yacc.py:
ext/ply/CHANGES:
ext/ply/README:
ext/ply/TODO:
ext/ply/doc/ply.html:
ext/ply/example/ansic/clex.py:
ext/ply/example/ansic/cparse.py:
ext/ply/example/calc/calc.py:
ext/ply/example/hedit/hedit.py:
ext/ply/example/optcalc/calc.py:
ext/ply/test/README:
ext/ply/test/calclex.py:
ext/ply/test/lex_doc1.exp:
ext/ply/test/lex_doc1.py:
ext/ply/test/lex_dup1.exp:
ext/ply/test/lex_dup1.py:
ext/ply/test/lex_dup2.exp:
ext/ply/test/lex_dup2.py:
ext/ply/test/lex_dup3.exp:
ext/ply/test/lex_dup3.py:
ext/ply/test/lex_empty.py:
ext/ply/test/lex_error1.py:
ext/ply/test/lex_error2.py:
ext/ply/test/lex_error3.exp:
ext/ply/test/lex_error3.py:
ext/ply/test/lex_error4.exp:
ext/ply/test/lex_error4.py:
ext/ply/test/lex_hedit.exp:
ext/ply/test/lex_hedit.py:
ext/ply/test/lex_ignore.exp:
ext/ply/test/lex_ignore.py:
ext/ply/test/lex_re1.exp:
ext/ply/test/lex_re1.py:
ext/ply/test/lex_rule1.py:
ext/ply/test/lex_token1.py:
ext/ply/test/lex_token2.py:
ext/ply/test/lex_token3.py:
ext/ply/test/lex_token4.py:
ext/ply/test/lex_token5.exp:
ext/ply/test/lex_token5.py:
ext/ply/test/yacc_badargs.exp:
ext/ply/test/yacc_badargs.py:
ext/ply/test/yacc_badprec.exp:
ext/ply/test/yacc_badprec.py:
ext/ply/test/yacc_badprec2.exp:
ext/ply/test/yacc_badprec2.py:
ext/ply/test/yacc_badrule.exp:
ext/ply/test/yacc_badrule.py:
ext/ply/test/yacc_badtok.exp:
ext/ply/test/yacc_badtok.py:
ext/ply/test/yacc_dup.exp:
ext/ply/test/yacc_dup.py:
ext/ply/test/yacc_error1.exp:
ext/ply/test/yacc_error1.py:
ext/ply/test/yacc_error2.exp:
ext/ply/test/yacc_error2.py:
ext/ply/test/yacc_error3.exp:
ext/ply/test/yacc_error3.py:
ext/ply/test/yacc_inf.exp:
ext/ply/test/yacc_inf.py:
ext/ply/test/yacc_missing1.exp:
ext/ply/test/yacc_missing1.py:
ext/ply/test/yacc_nodoc.exp:
ext/ply/test/yacc_nodoc.py:
ext/ply/test/yacc_noerror.exp:
ext/ply/test/yacc_noerror.py:
ext/ply/test/yacc_nop.exp:
ext/ply/test/yacc_nop.py:
ext/ply/test/yacc_notfunc.exp:
ext/ply/test/yacc_notfunc.py:
ext/ply/test/yacc_notok.exp:
ext/ply/test/yacc_notok.py:
ext/ply/test/yacc_rr.exp:
ext/ply/test/yacc_rr.py:
ext/ply/test/yacc_simple.exp:
ext/ply/test/yacc_simple.py:
ext/ply/test/yacc_sr.exp:
ext/ply/test/yacc_sr.py:
ext/ply/test/yacc_term1.exp:
ext/ply/test/yacc_term1.py:
ext/ply/test/yacc_unused.exp:
ext/ply/test/yacc_unused.py:
ext/ply/test/yacc_uprec.exp:
ext/ply/test/yacc_uprec.py:
    Import patch ply.diff
src/arch/isa_parser.py:
    everything is now within the ply package

--HG--
rename : ext/ply/lex.py => ext/ply/ply/lex.py
rename : ext/ply/yacc.py => ext/ply/ply/yacc.py
extra : convert_revision : fca8deabd5c095bdeabd52a1f236ae1404ef106e
2007-05-24 21:54:51 -07:00

164 lines
3.8 KiB
Python

# ----------------------------------------------------------------------
# clex.py
#
# A lexer for ANSI C.
# ----------------------------------------------------------------------
import sys
sys.path.insert(0,"../..")
import ply.lex as lex
# Reserved words
reserved = (
'AUTO', 'BREAK', 'CASE', 'CHAR', 'CONST', 'CONTINUE', 'DEFAULT', 'DO', 'DOUBLE',
'ELSE', 'ENUM', 'EXTERN', 'FLOAT', 'FOR', 'GOTO', 'IF', 'INT', 'LONG', 'REGISTER',
'RETURN', 'SHORT', 'SIGNED', 'SIZEOF', 'STATIC', 'STRUCT', 'SWITCH', 'TYPEDEF',
'UNION', 'UNSIGNED', 'VOID', 'VOLATILE', 'WHILE',
)
tokens = reserved + (
# Literals (identifier, integer constant, float constant, string constant, char const)
'ID', 'TYPEID', 'ICONST', 'FCONST', 'SCONST', 'CCONST',
# Operators (+,-,*,/,%,|,&,~,^,<<,>>, ||, &&, !, <, <=, >, >=, ==, !=)
'PLUS', 'MINUS', 'TIMES', 'DIVIDE', 'MOD',
'OR', 'AND', 'NOT', 'XOR', 'LSHIFT', 'RSHIFT',
'LOR', 'LAND', 'LNOT',
'LT', 'LE', 'GT', 'GE', 'EQ', 'NE',
# Assignment (=, *=, /=, %=, +=, -=, <<=, >>=, &=, ^=, |=)
'EQUALS', 'TIMESEQUAL', 'DIVEQUAL', 'MODEQUAL', 'PLUSEQUAL', 'MINUSEQUAL',
'LSHIFTEQUAL','RSHIFTEQUAL', 'ANDEQUAL', 'XOREQUAL', 'OREQUAL',
# Increment/decrement (++,--)
'PLUSPLUS', 'MINUSMINUS',
# Structure dereference (->)
'ARROW',
# Conditional operator (?)
'CONDOP',
# Delimeters ( ) [ ] { } , . ; :
'LPAREN', 'RPAREN',
'LBRACKET', 'RBRACKET',
'LBRACE', 'RBRACE',
'COMMA', 'PERIOD', 'SEMI', 'COLON',
# Ellipsis (...)
'ELLIPSIS',
)
# Completely ignored characters
t_ignore = ' \t\x0c'
# Newlines
def t_NEWLINE(t):
r'\n+'
t.lexer.lineno += t.value.count("\n")
# Operators
t_PLUS = r'\+'
t_MINUS = r'-'
t_TIMES = r'\*'
t_DIVIDE = r'/'
t_MOD = r'%'
t_OR = r'\|'
t_AND = r'&'
t_NOT = r'~'
t_XOR = r'\^'
t_LSHIFT = r'<<'
t_RSHIFT = r'>>'
t_LOR = r'\|\|'
t_LAND = r'&&'
t_LNOT = r'!'
t_LT = r'<'
t_GT = r'>'
t_LE = r'<='
t_GE = r'>='
t_EQ = r'=='
t_NE = r'!='
# Assignment operators
t_EQUALS = r'='
t_TIMESEQUAL = r'\*='
t_DIVEQUAL = r'/='
t_MODEQUAL = r'%='
t_PLUSEQUAL = r'\+='
t_MINUSEQUAL = r'-='
t_LSHIFTEQUAL = r'<<='
t_RSHIFTEQUAL = r'>>='
t_ANDEQUAL = r'&='
t_OREQUAL = r'\|='
t_XOREQUAL = r'^='
# Increment/decrement
t_PLUSPLUS = r'\+\+'
t_MINUSMINUS = r'--'
# ->
t_ARROW = r'->'
# ?
t_CONDOP = r'\?'
# Delimeters
t_LPAREN = r'\('
t_RPAREN = r'\)'
t_LBRACKET = r'\['
t_RBRACKET = r'\]'
t_LBRACE = r'\{'
t_RBRACE = r'\}'
t_COMMA = r','
t_PERIOD = r'\.'
t_SEMI = r';'
t_COLON = r':'
t_ELLIPSIS = r'\.\.\.'
# Identifiers and reserved words
reserved_map = { }
for r in reserved:
reserved_map[r.lower()] = r
def t_ID(t):
r'[A-Za-z_][\w_]*'
t.type = reserved_map.get(t.value,"ID")
return t
# Integer literal
t_ICONST = r'\d+([uU]|[lL]|[uU][lL]|[lL][uU])?'
# Floating literal
t_FCONST = r'((\d+)(\.\d+)(e(\+|-)?(\d+))? | (\d+)e(\+|-)?(\d+))([lL]|[fF])?'
# String literal
t_SCONST = r'\"([^\\\n]|(\\.))*?\"'
# Character constant 'c' or L'c'
t_CCONST = r'(L)?\'([^\\\n]|(\\.))*?\''
# Comments
def t_comment(t):
r' /\*(.|\n)*?\*/'
t.lineno += t.value.count('\n')
# Preprocessor directive (ignored)
def t_preprocessor(t):
r'\#(.)*?\n'
t.lineno += 1
def t_error(t):
print "Illegal character %s" % repr(t.value[0])
t.lexer.skip(1)
lexer = lex.lex(optimize=1)
if __name__ == "__main__":
lex.runmain(lexer)