6 class TestTokens(test_appliance
.TestAppliance
):
16 # block_sequence_start: [[
17 # block_mapping_start: {{
19 # flow_sequence_start: [
20 # flow_sequence_end: ]
21 # flow_mapping_start: {
29 DocumentStartToken
: '---',
30 DocumentEndToken
: '...',
35 BlockSequenceStartToken
: '[[',
36 BlockMappingStartToken
: '{{',
38 FlowSequenceStartToken
: '[',
39 FlowSequenceEndToken
: ']',
40 FlowMappingStartToken
: '{',
41 FlowMappingEndToken
: '}',
48 def _testTokens(self
, test_name
, data_filename
, tokens_filename
):
50 tokens2
= file(tokens_filename
, 'rb').read().split()
53 for token
in scan(file(data_filename
, 'rb')):
54 if not isinstance(token
, (StreamStartToken
, StreamEndToken
)):
56 tokens1
= [self
.replaces
[t
.__class
__] for t
in tokens1
]
57 self
.failUnlessEqual(tokens1
, tokens2
)
61 print file(data_filename
, 'rb').read()
62 print "TOKENS1:", tokens1
63 print "TOKENS2:", tokens2
66 TestTokens
.add_tests('testTokens', '.data', '.tokens')
68 class TestScanner(test_appliance
.TestAppliance
):
70 def _testScanner(self
, test_name
, data_filename
, canonical_filename
):
71 for filename
in [canonical_filename
, data_filename
]:
75 for token
in scan(file(filename
, 'rb')):
76 if not isinstance(token
, (StreamStartToken
, StreamEndToken
)):
77 tokens
.append(token
.__class
__.__name
__)
81 print file(data_filename
, 'rb').read()
82 print "TOKENS:", tokens
85 TestScanner
.add_tests('testScanner', '.data', '.canonical')