Null commit with -f option to force an uprev and put HEADs firmly on the trunk.
[python/dscho.git] / Lib / sre_compile.py
blob539e878dce15f0d08d0ccb6894325b74093a1b96
2 # Secret Labs' Regular Expression Engine
4 # convert template to internal format
6 # Copyright (c) 1997-2001 by Secret Labs AB. All rights reserved.
8 # See the sre.py file for information on usage and redistribution.
11 import _sre
13 from sre_constants import *
15 assert _sre.MAGIC == MAGIC, "SRE module mismatch"
17 MAXCODE = 65535
19 def _compile(code, pattern, flags):
20 # internal: compile a (sub)pattern
21 emit = code.append
22 for op, av in pattern:
23 if op in (LITERAL, NOT_LITERAL):
24 if flags & SRE_FLAG_IGNORECASE:
25 emit(OPCODES[OP_IGNORE[op]])
26 emit(_sre.getlower(av, flags))
27 else:
28 emit(OPCODES[op])
29 emit(av)
30 elif op is IN:
31 if flags & SRE_FLAG_IGNORECASE:
32 emit(OPCODES[OP_IGNORE[op]])
33 def fixup(literal, flags=flags):
34 return _sre.getlower(literal, flags)
35 else:
36 emit(OPCODES[op])
37 fixup = lambda x: x
38 skip = len(code); emit(0)
39 _compile_charset(av, flags, code, fixup)
40 code[skip] = len(code) - skip
41 elif op is ANY:
42 if flags & SRE_FLAG_DOTALL:
43 emit(OPCODES[ANY_ALL])
44 else:
45 emit(OPCODES[ANY])
46 elif op in (REPEAT, MIN_REPEAT, MAX_REPEAT):
47 if flags & SRE_FLAG_TEMPLATE:
48 raise error, "internal: unsupported template operator"
49 emit(OPCODES[REPEAT])
50 skip = len(code); emit(0)
51 emit(av[0])
52 emit(av[1])
53 _compile(code, av[2], flags)
54 emit(OPCODES[SUCCESS])
55 code[skip] = len(code) - skip
56 elif _simple(av) and op == MAX_REPEAT:
57 emit(OPCODES[REPEAT_ONE])
58 skip = len(code); emit(0)
59 emit(av[0])
60 emit(av[1])
61 _compile(code, av[2], flags)
62 emit(OPCODES[SUCCESS])
63 code[skip] = len(code) - skip
64 else:
65 emit(OPCODES[REPEAT])
66 skip = len(code); emit(0)
67 emit(av[0])
68 emit(av[1])
69 _compile(code, av[2], flags)
70 code[skip] = len(code) - skip
71 if op == MAX_REPEAT:
72 emit(OPCODES[MAX_UNTIL])
73 else:
74 emit(OPCODES[MIN_UNTIL])
75 elif op is SUBPATTERN:
76 if av[0]:
77 emit(OPCODES[MARK])
78 emit((av[0]-1)*2)
79 # _compile_info(code, av[1], flags)
80 _compile(code, av[1], flags)
81 if av[0]:
82 emit(OPCODES[MARK])
83 emit((av[0]-1)*2+1)
84 elif op in (SUCCESS, FAILURE):
85 emit(OPCODES[op])
86 elif op in (ASSERT, ASSERT_NOT):
87 emit(OPCODES[op])
88 skip = len(code); emit(0)
89 if av[0] >= 0:
90 emit(0) # look ahead
91 else:
92 lo, hi = av[1].getwidth()
93 if lo != hi:
94 raise error, "look-behind requires fixed-width pattern"
95 emit(lo) # look behind
96 _compile(code, av[1], flags)
97 emit(OPCODES[SUCCESS])
98 code[skip] = len(code) - skip
99 elif op is CALL:
100 emit(OPCODES[op])
101 skip = len(code); emit(0)
102 _compile(code, av, flags)
103 emit(OPCODES[SUCCESS])
104 code[skip] = len(code) - skip
105 elif op is AT:
106 emit(OPCODES[op])
107 if flags & SRE_FLAG_MULTILINE:
108 av = AT_MULTILINE.get(av, av)
109 if flags & SRE_FLAG_LOCALE:
110 av = AT_LOCALE.get(av, av)
111 elif flags & SRE_FLAG_UNICODE:
112 av = AT_UNICODE.get(av, av)
113 emit(ATCODES[av])
114 elif op is BRANCH:
115 emit(OPCODES[op])
116 tail = []
117 for av in av[1]:
118 skip = len(code); emit(0)
119 # _compile_info(code, av, flags)
120 _compile(code, av, flags)
121 emit(OPCODES[JUMP])
122 tail.append(len(code)); emit(0)
123 code[skip] = len(code) - skip
124 emit(0) # end of branch
125 for tail in tail:
126 code[tail] = len(code) - tail
127 elif op is CATEGORY:
128 emit(OPCODES[op])
129 if flags & SRE_FLAG_LOCALE:
130 av = CH_LOCALE[av]
131 elif flags & SRE_FLAG_UNICODE:
132 av = CH_UNICODE[av]
133 emit(CHCODES[av])
134 elif op is GROUPREF:
135 if flags & SRE_FLAG_IGNORECASE:
136 emit(OPCODES[OP_IGNORE[op]])
137 else:
138 emit(OPCODES[op])
139 emit(av-1)
140 else:
141 raise ValueError, ("unsupported operand type", op)
143 def _compile_charset(charset, flags, code, fixup=None):
144 # compile charset subprogram
145 emit = code.append
146 if not fixup:
147 fixup = lambda x: x
148 for op, av in _optimize_charset(charset, fixup):
149 emit(OPCODES[op])
150 if op is NEGATE:
151 pass
152 elif op is LITERAL:
153 emit(fixup(av))
154 elif op is RANGE:
155 emit(fixup(av[0]))
156 emit(fixup(av[1]))
157 elif op is CHARSET:
158 code.extend(av)
159 elif op is BIGCHARSET:
160 code.extend(av)
161 elif op is CATEGORY:
162 if flags & SRE_FLAG_LOCALE:
163 emit(CHCODES[CH_LOCALE[av]])
164 elif flags & SRE_FLAG_UNICODE:
165 emit(CHCODES[CH_UNICODE[av]])
166 else:
167 emit(CHCODES[av])
168 else:
169 raise error, "internal: unsupported set operator"
170 emit(OPCODES[FAILURE])
172 def _optimize_charset(charset, fixup):
173 # internal: optimize character set
174 out = []
175 charmap = [0]*256
176 try:
177 for op, av in charset:
178 if op is NEGATE:
179 out.append((op, av))
180 elif op is LITERAL:
181 charmap[fixup(av)] = 1
182 elif op is RANGE:
183 for i in range(fixup(av[0]), fixup(av[1])+1):
184 charmap[i] = 1
185 elif op is CATEGORY:
186 # XXX: could append to charmap tail
187 return charset # cannot compress
188 except IndexError:
189 # character set contains unicode characters
190 return _optimize_unicode(charset, fixup)
191 # compress character map
192 i = p = n = 0
193 runs = []
194 for c in charmap:
195 if c:
196 if n == 0:
197 p = i
198 n = n + 1
199 elif n:
200 runs.append((p, n))
201 n = 0
202 i = i + 1
203 if n:
204 runs.append((p, n))
205 if len(runs) <= 2:
206 # use literal/range
207 for p, n in runs:
208 if n == 1:
209 out.append((LITERAL, p))
210 else:
211 out.append((RANGE, (p, p+n-1)))
212 if len(out) < len(charset):
213 return out
214 else:
215 # use bitmap
216 data = _mk_bitmap(charmap)
217 out.append((CHARSET, data))
218 return out
219 return charset
221 def _mk_bitmap(bits):
222 data = []
223 m = 1; v = 0
224 for c in bits:
225 if c:
226 v = v + m
227 m = m << 1
228 if m > MAXCODE:
229 data.append(v)
230 m = 1; v = 0
231 return data
233 # To represent a big charset, first a bitmap of all characters in the
234 # set is constructed. Then, this bitmap is sliced into chunks of 256
235 # characters, duplicate chunks are eliminitated, and each chunk is
236 # given a number. In the compiled expression, the charset is
237 # represented by a 16-bit word sequence, consisting of one word for
238 # the number of different chunks, a sequence of 256 bytes (128 words)
239 # of chunk numbers indexed by their original chunk position, and a
240 # sequence of chunks (16 words each).
242 # Compression is normally good: in a typical charset, large ranges of
243 # Unicode will be either completely excluded (e.g. if only cyrillic
244 # letters are to be matched), or completely included (e.g. if large
245 # subranges of Kanji match). These ranges will be represented by
246 # chunks of all one-bits or all zero-bits.
248 # Matching can be also done efficiently: the more significant byte of
249 # the Unicode character is an index into the chunk number, and the
250 # less significant byte is a bit index in the chunk (just like the
251 # CHARSET matching).
253 def _optimize_unicode(charset, fixup):
254 charmap = [0]*65536
255 negate = 0
256 for op, av in charset:
257 if op is NEGATE:
258 negate = 1
259 elif op is LITERAL:
260 charmap[fixup(av)] = 1
261 elif op is RANGE:
262 for i in range(fixup(av[0]), fixup(av[1])+1):
263 charmap[i] = 1
264 elif op is CATEGORY:
265 # XXX: could expand category
266 return charset # cannot compress
267 if negate:
268 for i in range(65536):
269 charmap[i] = not charmap[i]
270 comps = {}
271 mapping = [0]*256
272 block = 0
273 data = []
274 for i in range(256):
275 chunk = tuple(charmap[i*256:(i+1)*256])
276 new = comps.setdefault(chunk, block)
277 mapping[i] = new
278 if new == block:
279 block += 1
280 data += _mk_bitmap(chunk)
281 header = [block]
282 assert MAXCODE == 65535
283 for i in range(128):
284 header.append(mapping[2*i]+256*mapping[2*i+1])
285 data[0:0] = header
286 return [(BIGCHARSET, data)]
288 def _simple(av):
289 # check if av is a "simple" operator
290 lo, hi = av[2].getwidth()
291 if lo == 0 and hi == MAXREPEAT:
292 raise error, "nothing to repeat"
293 return lo == hi == 1 and av[2][0][0] != SUBPATTERN
295 def _compile_info(code, pattern, flags):
296 # internal: compile an info block. in the current version,
297 # this contains min/max pattern width, and an optional literal
298 # prefix or a character map
299 lo, hi = pattern.getwidth()
300 if lo == 0:
301 return # not worth it
302 # look for a literal prefix
303 prefix = []
304 prefix_skip = 0
305 charset = [] # not used
306 if not (flags & SRE_FLAG_IGNORECASE):
307 # look for literal prefix
308 for op, av in pattern.data:
309 if op is LITERAL:
310 if len(prefix) == prefix_skip:
311 prefix_skip = prefix_skip + 1
312 prefix.append(av)
313 elif op is SUBPATTERN and len(av[1]) == 1:
314 op, av = av[1][0]
315 if op is LITERAL:
316 prefix.append(av)
317 else:
318 break
319 else:
320 break
321 # if no prefix, look for charset prefix
322 if not prefix and pattern.data:
323 op, av = pattern.data[0]
324 if op is SUBPATTERN and av[1]:
325 op, av = av[1][0]
326 if op is LITERAL:
327 charset.append((op, av))
328 elif op is BRANCH:
329 c = []
330 for p in av[1]:
331 if not p:
332 break
333 op, av = p[0]
334 if op is LITERAL:
335 c.append((op, av))
336 else:
337 break
338 else:
339 charset = c
340 elif op is BRANCH:
341 c = []
342 for p in av[1]:
343 if not p:
344 break
345 op, av = p[0]
346 if op is LITERAL:
347 c.append((op, av))
348 else:
349 break
350 else:
351 charset = c
352 elif op is IN:
353 charset = av
354 ## if prefix:
355 ## print "*** PREFIX", prefix, prefix_skip
356 ## if charset:
357 ## print "*** CHARSET", charset
358 # add an info block
359 emit = code.append
360 emit(OPCODES[INFO])
361 skip = len(code); emit(0)
362 # literal flag
363 mask = 0
364 if prefix:
365 mask = SRE_INFO_PREFIX
366 if len(prefix) == prefix_skip == len(pattern.data):
367 mask = mask + SRE_INFO_LITERAL
368 elif charset:
369 mask = mask + SRE_INFO_CHARSET
370 emit(mask)
371 # pattern length
372 if lo < MAXCODE:
373 emit(lo)
374 else:
375 emit(MAXCODE)
376 prefix = prefix[:MAXCODE]
377 if hi < MAXCODE:
378 emit(hi)
379 else:
380 emit(0)
381 # add literal prefix
382 if prefix:
383 emit(len(prefix)) # length
384 emit(prefix_skip) # skip
385 code.extend(prefix)
386 # generate overlap table
387 table = [-1] + ([0]*len(prefix))
388 for i in range(len(prefix)):
389 table[i+1] = table[i]+1
390 while table[i+1] > 0 and prefix[i] != prefix[table[i+1]-1]:
391 table[i+1] = table[table[i+1]-1]+1
392 code.extend(table[1:]) # don't store first entry
393 elif charset:
394 _compile_charset(charset, 0, code)
395 code[skip] = len(code) - skip
397 STRING_TYPES = [type("")]
399 try:
400 STRING_TYPES.append(type(unicode("")))
401 except NameError:
402 pass
404 def _code(p, flags):
406 flags = p.pattern.flags | flags
407 code = []
409 # compile info block
410 _compile_info(code, p, flags)
412 # compile the pattern
413 _compile(code, p.data, flags)
415 code.append(OPCODES[SUCCESS])
417 return code
419 def compile(p, flags=0):
420 # internal: convert pattern list to internal format
422 if type(p) in STRING_TYPES:
423 import sre_parse
424 pattern = p
425 p = sre_parse.parse(p, flags)
426 else:
427 pattern = None
429 code = _code(p, flags)
431 # print code
433 # XXX: <fl> get rid of this limitation!
434 assert p.pattern.groups <= 100,\
435 "sorry, but this version only supports 100 named groups"
437 # map in either direction
438 groupindex = p.pattern.groupdict
439 indexgroup = [None] * p.pattern.groups
440 for k, i in groupindex.items():
441 indexgroup[i] = k
443 return _sre.compile(
444 pattern, flags, code,
445 p.pattern.groups-1,
446 groupindex, indexgroup