3 # Tokens (from "token.h")
5 # This file is automatically generated; please don't muck it up!
7 # To update the symbols in this file, 'cd' to the top directory of
8 # the python source tree after building the interpreter and run:
57 for _name
, _value
in globals().items():
58 if type(_value
) is type(0):
59 tok_name
[_value
] = _name
77 inFileName
= args
and args
[0] or "Include/token.h"
78 outFileName
= "Lib/token.py"
84 sys
.stdout
.write("I/O error: %s\n" % str(err
))
86 lines
= string
.splitfields(fp
.read(), "\n")
89 "#define[ \t][ \t]*([A-Z][A-Z_]*)[ \t][ \t]*([0-9][0-9]*)",
93 match
= prog
.match(line
)
95 name
, val
= match
.group(1, 2)
96 val
= string
.atoi(val
)
97 tokens
[val
] = name
# reverse so we can sort them...
100 # load the output skeleton from the target:
102 fp
= open(outFileName
)
104 sys
.stderr
.write("I/O error: %s\n" % str(err
))
106 format
= string
.splitfields(fp
.read(), "\n")
109 start
= format
.index("#--start constants--") + 1
110 end
= format
.index("#--end constants--")
112 sys
.stderr
.write("target does not contain format markers")
116 lines
.append("%s = %d" % (tokens
[val
], val
))
117 format
[start
:end
] = lines
119 fp
= open(outFileName
, 'w')
121 sys
.stderr
.write("I/O error: %s\n" % str(err
))
123 fp
.write(string
.joinfields(format
, "\n"))
127 if __name__
== "__main__":