4 from yaml
.reader
import *
5 from yaml
.tokens
import *
6 from yaml
.scanner
import *
8 class TestTokens(test_appliance
.TestAppliance
):
18 # block_sequence_start: [[
19 # block_mapping_start: {{
21 # flow_sequence_start: [
22 # flow_sequence_end: ]
23 # flow_mapping_start: {
31 DocumentStartToken
: '---',
32 DocumentEndToken
: '...',
37 BlockSequenceStartToken
: '[[',
38 BlockMappingStartToken
: '{{',
40 FlowSequenceStartToken
: '[',
41 FlowSequenceEndToken
: ']',
42 FlowMappingStartToken
: '{',
43 FlowMappingEndToken
: '}',
50 def _testTokens(self
, test_name
, data_filename
, tokens_filename
):
52 tokens2
= file(tokens_filename
, 'rb').read().split()
54 scanner
= Scanner(Reader(file(data_filename
, 'rb')))
57 if not isinstance(token
, (StreamStartToken
, StreamEndToken
)):
59 tokens1
= [self
.replaces
[t
.__class
__] for t
in tokens1
]
60 self
.failUnlessEqual(tokens1
, tokens2
)
64 print file(data_filename
, 'rb').read()
65 print "TOKENS1:", tokens1
66 print "TOKENS2:", tokens2
69 TestTokens
.add_tests('testTokens', '.data', '.tokens')
71 class TestScanner(test_appliance
.TestAppliance
):
73 def _testScanner(self
, test_name
, data_filename
, canonical_filename
):
74 for filename
in [canonical_filename
, data_filename
]:
77 scanner
= Scanner(Reader(file(filename
, 'rb')))
80 if not isinstance(token
, (StreamStartToken
, StreamEndToken
)):
81 tokens
.append(token
.__class
__.__name
__)
85 print file(data_filename
, 'rb').read()
86 print "TOKENS:", tokens
89 TestScanner
.add_tests('testScanner', '.data', '.canonical')