Parser is done. Add iterator interfaces for Scanner and Parser.
[pyyaml/python3.git] / tests / test_tokens.py
blob2ccc30516cfc666b55eb28a93571b6dabdc2498f
2 import test_appliance
4 from yaml.reader import *
5 from yaml.tokens import *
6 from yaml.scanner import *
8 class TestTokens(test_appliance.TestAppliance):
10 # Tokens mnemonic:
11 # directive: %
12 # document_start: ---
13 # document_end: ...
14 # alias: *
15 # anchor: &
16 # tag: !
17 # scalar _
18 # block_sequence_start: [[
19 # block_mapping_start: {{
20 # block_end: ]}
21 # flow_sequence_start: [
22 # flow_sequence_end: ]
23 # flow_mapping_start: {
24 # flow_mapping_end: }
25 # entry: ,
26 # key: ?
27 # value: :
29 replaces = {
30 DirectiveToken: '%',
31 DocumentStartToken: '---',
32 DocumentEndToken: '...',
33 AliasToken: '*',
34 AnchorToken: '&',
35 TagToken: '!',
36 ScalarToken: '_',
37 BlockSequenceStartToken: '[[',
38 BlockMappingStartToken: '{{',
39 BlockEndToken: ']}',
40 FlowSequenceStartToken: '[',
41 FlowSequenceEndToken: ']',
42 FlowMappingStartToken: '{',
43 FlowMappingEndToken: '}',
44 BlockEntryToken: ',',
45 FlowEntryToken: ',',
46 KeyToken: '?',
47 ValueToken: ':',
50 def _testTokens(self, test_name, data_filename, tokens_filename):
51 tokens1 = None
52 tokens2 = file(tokens_filename, 'rb').read().split()
53 try:
54 scanner = Scanner(Reader(file(data_filename, 'rb')))
55 tokens1 = []
56 for token in scanner:
57 if not isinstance(token, StreamEndToken):
58 tokens1.append(token)
59 tokens1 = [self.replaces[t.__class__] for t in tokens1]
60 self.failUnlessEqual(tokens1, tokens2)
61 except:
62 print
63 print "DATA:"
64 print file(data_filename, 'rb').read()
65 print "TOKENS1:", tokens1
66 print "TOKENS2:", tokens2
67 raise
69 TestTokens.add_tests('testTokens', '.data', '.tokens')
71 class TestScanner(test_appliance.TestAppliance):
73 def _testScanner(self, test_name, data_filename, canonical_filename):
74 for filename in [canonical_filename, data_filename]:
75 tokens = None
76 try:
77 scanner = Scanner(Reader(file(filename, 'rb')))
78 tokens = []
79 for token in scanner:
80 if not isinstance(token, StreamEndToken):
81 tokens.append(token.__class__.__name__)
82 except:
83 print
84 print "DATA:"
85 print file(data_filename, 'rb').read()
86 print "TOKENS:", tokens
87 raise
89 TestScanner.add_tests('testScanner', '.data', '.canonical')