2 # Secret Labs' Regular Expression Engine
4 # convert template to internal format
6 # Copyright (c) 1997-2001 by Secret Labs AB. All rights reserved.
8 # See the sre.py file for information on usage and redistribution.
11 """Internal support module for sre"""
15 from sre_constants
import *
17 assert _sre
.MAGIC
== MAGIC
, "SRE module mismatch"
19 if _sre
.CODESIZE
== 2:
24 def _compile(code
, pattern
, flags
):
25 # internal: compile a (sub)pattern
27 for op
, av
in pattern
:
28 if op
in (LITERAL
, NOT_LITERAL
):
29 if flags
& SRE_FLAG_IGNORECASE
:
30 emit(OPCODES
[OP_IGNORE
[op
]])
31 emit(_sre
.getlower(av
, flags
))
36 if flags
& SRE_FLAG_IGNORECASE
:
37 emit(OPCODES
[OP_IGNORE
[op
]])
38 def fixup(literal
, flags
=flags
):
39 return _sre
.getlower(literal
, flags
)
43 skip
= len(code
); emit(0)
44 _compile_charset(av
, flags
, code
, fixup
)
45 code
[skip
] = len(code
) - skip
47 if flags
& SRE_FLAG_DOTALL
:
48 emit(OPCODES
[ANY_ALL
])
51 elif op
in (REPEAT
, MIN_REPEAT
, MAX_REPEAT
):
52 if flags
& SRE_FLAG_TEMPLATE
:
53 raise error
, "internal: unsupported template operator"
55 skip
= len(code
); emit(0)
58 _compile(code
, av
[2], flags
)
59 emit(OPCODES
[SUCCESS
])
60 code
[skip
] = len(code
) - skip
61 elif _simple(av
) and op
!= REPEAT
:
63 emit(OPCODES
[REPEAT_ONE
])
65 emit(OPCODES
[MIN_REPEAT_ONE
])
66 skip
= len(code
); emit(0)
69 _compile(code
, av
[2], flags
)
70 emit(OPCODES
[SUCCESS
])
71 code
[skip
] = len(code
) - skip
74 skip
= len(code
); emit(0)
77 _compile(code
, av
[2], flags
)
78 code
[skip
] = len(code
) - skip
80 emit(OPCODES
[MAX_UNTIL
])
82 emit(OPCODES
[MIN_UNTIL
])
83 elif op
is SUBPATTERN
:
87 # _compile_info(code, av[1], flags)
88 _compile(code
, av
[1], flags
)
92 elif op
in (SUCCESS
, FAILURE
):
94 elif op
in (ASSERT
, ASSERT_NOT
):
96 skip
= len(code
); emit(0)
100 lo
, hi
= av
[1].getwidth()
102 raise error
, "look-behind requires fixed-width pattern"
103 emit(lo
) # look behind
104 _compile(code
, av
[1], flags
)
105 emit(OPCODES
[SUCCESS
])
106 code
[skip
] = len(code
) - skip
109 skip
= len(code
); emit(0)
110 _compile(code
, av
, flags
)
111 emit(OPCODES
[SUCCESS
])
112 code
[skip
] = len(code
) - skip
115 if flags
& SRE_FLAG_MULTILINE
:
116 av
= AT_MULTILINE
.get(av
, av
)
117 if flags
& SRE_FLAG_LOCALE
:
118 av
= AT_LOCALE
.get(av
, av
)
119 elif flags
& SRE_FLAG_UNICODE
:
120 av
= AT_UNICODE
.get(av
, av
)
126 skip
= len(code
); emit(0)
127 # _compile_info(code, av, flags)
128 _compile(code
, av
, flags
)
130 tail
.append(len(code
)); emit(0)
131 code
[skip
] = len(code
) - skip
132 emit(0) # end of branch
134 code
[tail
] = len(code
) - tail
137 if flags
& SRE_FLAG_LOCALE
:
139 elif flags
& SRE_FLAG_UNICODE
:
143 if flags
& SRE_FLAG_IGNORECASE
:
144 emit(OPCODES
[OP_IGNORE
[op
]])
149 raise ValueError, ("unsupported operand type", op
)
151 def _compile_charset(charset
, flags
, code
, fixup
=None):
152 # compile charset subprogram
156 for op
, av
in _optimize_charset(charset
, fixup
):
167 elif op
is BIGCHARSET
:
170 if flags
& SRE_FLAG_LOCALE
:
171 emit(CHCODES
[CH_LOCALE
[av
]])
172 elif flags
& SRE_FLAG_UNICODE
:
173 emit(CHCODES
[CH_UNICODE
[av
]])
177 raise error
, "internal: unsupported set operator"
178 emit(OPCODES
[FAILURE
])
180 def _optimize_charset(charset
, fixup
):
181 # internal: optimize character set
185 for op
, av
in charset
:
189 charmap
[fixup(av
)] = 1
191 for i
in range(fixup(av
[0]), fixup(av
[1])+1):
194 # XXX: could append to charmap tail
195 return charset
# cannot compress
197 # character set contains unicode characters
198 return _optimize_unicode(charset
, fixup
)
199 # compress character map
217 out
.append((LITERAL
, p
))
219 out
.append((RANGE
, (p
, p
+n
-1)))
220 if len(out
) < len(charset
):
224 data
= _mk_bitmap(charmap
)
225 out
.append((CHARSET
, data
))
229 def _mk_bitmap(bits
):
231 if _sre
.CODESIZE
== 2:
245 # To represent a big charset, first a bitmap of all characters in the
246 # set is constructed. Then, this bitmap is sliced into chunks of 256
247 # characters, duplicate chunks are eliminitated, and each chunk is
248 # given a number. In the compiled expression, the charset is
249 # represented by a 16-bit word sequence, consisting of one word for
250 # the number of different chunks, a sequence of 256 bytes (128 words)
251 # of chunk numbers indexed by their original chunk position, and a
252 # sequence of chunks (16 words each).
254 # Compression is normally good: in a typical charset, large ranges of
255 # Unicode will be either completely excluded (e.g. if only cyrillic
256 # letters are to be matched), or completely included (e.g. if large
257 # subranges of Kanji match). These ranges will be represented by
258 # chunks of all one-bits or all zero-bits.
260 # Matching can be also done efficiently: the more significant byte of
261 # the Unicode character is an index into the chunk number, and the
262 # less significant byte is a bit index in the chunk (just like the
265 # In UCS-4 mode, the BIGCHARSET opcode still supports only subsets
266 # of the basic multilingual plane; an efficient representation
267 # for all of UTF-16 has not yet been developed. This means,
268 # in particular, that negated charsets cannot be represented as
271 def _optimize_unicode(charset
, fixup
):
279 for op
, av
in charset
:
283 charmap
[fixup(av
)] = 1
285 for i
in range(fixup(av
[0]), fixup(av
[1])+1):
288 # XXX: could expand category
289 return charset
# cannot compress
294 if sys
.maxunicode
!= 65535:
295 # XXX: negation does not work with big charsets
297 for i
in range(65536):
298 charmap
[i
] = not charmap
[i
]
304 chunk
= tuple(charmap
[i
*256:(i
+1)*256])
305 new
= comps
.setdefault(chunk
, block
)
309 data
= data
+ _mk_bitmap(chunk
)
311 if _sre
.CODESIZE
== 2:
315 # Convert block indices to byte array of 256 bytes
316 mapping
= array
.array('b', mapping
).tostring()
317 # Convert byte array to word array
318 mapping
= array
.array(code
, mapping
)
319 assert mapping
.itemsize
== _sre
.CODESIZE
320 header
= header
+ mapping
.tolist()
322 return [(BIGCHARSET
, data
)]
325 # check if av is a "simple" operator
326 lo
, hi
= av
[2].getwidth()
327 if lo
== 0 and hi
== MAXREPEAT
:
328 raise error
, "nothing to repeat"
329 return lo
== hi
== 1 and av
[2][0][0] != SUBPATTERN
331 def _compile_info(code
, pattern
, flags
):
332 # internal: compile an info block. in the current version,
333 # this contains min/max pattern width, and an optional literal
334 # prefix or a character map
335 lo
, hi
= pattern
.getwidth()
337 return # not worth it
338 # look for a literal prefix
341 charset
= [] # not used
342 if not (flags
& SRE_FLAG_IGNORECASE
):
343 # look for literal prefix
344 for op
, av
in pattern
.data
:
346 if len(prefix
) == prefix_skip
:
347 prefix_skip
= prefix_skip
+ 1
349 elif op
is SUBPATTERN
and len(av
[1]) == 1:
357 # if no prefix, look for charset prefix
358 if not prefix
and pattern
.data
:
359 op
, av
= pattern
.data
[0]
360 if op
is SUBPATTERN
and av
[1]:
363 charset
.append((op
, av
))
391 ## print "*** PREFIX", prefix, prefix_skip
393 ## print "*** CHARSET", charset
397 skip
= len(code
); emit(0)
401 mask
= SRE_INFO_PREFIX
402 if len(prefix
) == prefix_skip
== len(pattern
.data
):
403 mask
= mask
+ SRE_INFO_LITERAL
405 mask
= mask
+ SRE_INFO_CHARSET
412 prefix
= prefix
[:MAXCODE
]
419 emit(len(prefix
)) # length
420 emit(prefix_skip
) # skip
422 # generate overlap table
423 table
= [-1] + ([0]*len(prefix
))
424 for i
in range(len(prefix
)):
425 table
[i
+1] = table
[i
]+1
426 while table
[i
+1] > 0 and prefix
[i
] != prefix
[table
[i
+1]-1]:
427 table
[i
+1] = table
[table
[i
+1]-1]+1
428 code
.extend(table
[1:]) # don't store first entry
430 _compile_charset(charset
, flags
, code
)
431 code
[skip
] = len(code
) - skip
436 STRING_TYPES
= (type(""),)
438 STRING_TYPES
= (type(""), type(unicode("")))
441 for tp
in STRING_TYPES
:
442 if isinstance(obj
, tp
):
448 flags
= p
.pattern
.flags | flags
452 _compile_info(code
, p
, flags
)
454 # compile the pattern
455 _compile(code
, p
.data
, flags
)
457 code
.append(OPCODES
[SUCCESS
])
461 def compile(p
, flags
=0):
462 # internal: convert pattern list to internal format
467 p
= sre_parse
.parse(p
, flags
)
471 code
= _code(p
, flags
)
475 # XXX: <fl> get rid of this limitation!
476 assert p
.pattern
.groups
<= 100,\
477 "sorry, but this version only supports 100 named groups"
479 # map in either direction
480 groupindex
= p
.pattern
.groupdict
481 indexgroup
= [None] * p
.pattern
.groups
482 for k
, i
in groupindex
.items():
486 pattern
, flags
, code
,
488 groupindex
, indexgroup