Add a missing dependency to //extensions/extensions_renderer_resources
[chromium-blink-merge.git] / tools / idl_parser / idl_ppapi_lexer.py
blobac6f42cc2a427356c88f6a14573ac8795c77e689
1 #!/usr/bin/env python
2 # Copyright (c) 2013 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
6 """ Lexer for PPAPI IDL
8 The lexer uses the PLY library to build a tokenizer which understands both
9 WebIDL and Pepper tokens.
11 WebIDL, and WebIDL regular expressions can be found at:
12 http://www.w3.org/TR/2012/CR-WebIDL-20120419/
13 PLY can be found at:
14 http://www.dabeaz.com/ply/
15 """
17 from idl_lexer import IDLLexer
21 # IDL PPAPI Lexer
23 class IDLPPAPILexer(IDLLexer):
24 # Token definitions
26 # These need to be methods for lexer construction, despite not using self.
27 # pylint: disable=R0201
29 # Special multi-character operators
30 def t_LSHIFT(self, t):
31 r'<<'
32 return t
34 def t_RSHIFT(self, t):
35 r'>>'
36 return t
38 def t_INLINE(self, t):
39 r'\#inline (.|\n)*?\#endinl.*'
40 self.AddLines(t.value.count('\n'))
41 return t
43 # Return a "preprocessor" inline block
44 def __init__(self):
45 IDLLexer.__init__(self)
46 self._AddTokens(['INLINE', 'LSHIFT', 'RSHIFT'])
47 self._AddKeywords(['label', 'struct'])
49 # Add number types
50 self._AddKeywords(['char', 'int8_t', 'int16_t', 'int32_t', 'int64_t'])
51 self._AddKeywords(['uint8_t', 'uint16_t', 'uint32_t', 'uint64_t'])
52 self._AddKeywords(['double_t', 'float_t'])
54 # Add handle types
55 self._AddKeywords(['handle_t', 'PP_FileHandle'])
57 # Add pointer types (void*, char*, const char*, const void*)
58 self._AddKeywords(['mem_t', 'str_t', 'cstr_t', 'interface_t'])
60 # Remove JS types
61 self._DelKeywords(['boolean', 'byte', 'ByteString', 'Date', 'DOMString',
62 'double', 'float', 'long', 'object', 'octet', 'Promise',
63 'RegExp', 'short', 'unsigned'])
66 # If run by itself, attempt to build the lexer
67 if __name__ == '__main__':
68 lexer = IDLPPAPILexer()
69 lexer.Tokenize(open('test_parser/inline_ppapi.idl').read())
70 for tok in lexer.GetTokens():
71 print '\n' + str(tok)