Fix invalid output of single-quoted scalars in cases when a single
[pyyaml/python3.git] / tests / test_tokens.py
blob73d07b34dc1089502d5ba7a8a7575f0c515908be
2 import test_appliance
4 from yaml import *
6 class TestTokens(test_appliance.TestAppliance):
8 # Tokens mnemonic:
9 # directive: %
10 # document_start: ---
11 # document_end: ...
12 # alias: *
13 # anchor: &
14 # tag: !
15 # scalar _
16 # block_sequence_start: [[
17 # block_mapping_start: {{
18 # block_end: ]}
19 # flow_sequence_start: [
20 # flow_sequence_end: ]
21 # flow_mapping_start: {
22 # flow_mapping_end: }
23 # entry: ,
24 # key: ?
25 # value: :
27 replaces = {
28 DirectiveToken: '%',
29 DocumentStartToken: '---',
30 DocumentEndToken: '...',
31 AliasToken: '*',
32 AnchorToken: '&',
33 TagToken: '!',
34 ScalarToken: '_',
35 BlockSequenceStartToken: '[[',
36 BlockMappingStartToken: '{{',
37 BlockEndToken: ']}',
38 FlowSequenceStartToken: '[',
39 FlowSequenceEndToken: ']',
40 FlowMappingStartToken: '{',
41 FlowMappingEndToken: '}',
42 BlockEntryToken: ',',
43 FlowEntryToken: ',',
44 KeyToken: '?',
45 ValueToken: ':',
48 def _testTokens(self, test_name, data_filename, tokens_filename):
49 tokens1 = None
50 tokens2 = file(tokens_filename, 'rb').read().split()
51 try:
52 tokens1 = []
53 for token in scan(file(data_filename, 'rb')):
54 if not isinstance(token, (StreamStartToken, StreamEndToken)):
55 tokens1.append(token)
56 tokens1 = [self.replaces[t.__class__] for t in tokens1]
57 self.failUnlessEqual(tokens1, tokens2)
58 except:
59 print
60 print "DATA:"
61 print file(data_filename, 'rb').read()
62 print "TOKENS1:", tokens1
63 print "TOKENS2:", tokens2
64 raise
66 TestTokens.add_tests('testTokens', '.data', '.tokens')
68 class TestScanner(test_appliance.TestAppliance):
70 def _testScanner(self, test_name, data_filename, canonical_filename):
71 for filename in [canonical_filename, data_filename]:
72 tokens = None
73 try:
74 tokens = []
75 for token in scan(file(filename, 'rb')):
76 if not isinstance(token, (StreamStartToken, StreamEndToken)):
77 tokens.append(token.__class__.__name__)
78 except:
79 print
80 print "DATA:"
81 print file(data_filename, 'rb').read()
82 print "TOKENS:", tokens
83 raise
85 TestScanner.add_tests('testScanner', '.data', '.canonical')