4 from yaml
.reader
import *
5 from yaml
.tokens
import *
6 from yaml
.scanner
import *
8 class TestTokens(test_appliance
.TestAppliance
):
18 # block_sequence_start: [[
19 # block_mapping_start: {{
21 # flow_sequence_start: [
22 # flow_sequence_end: ]
23 # flow_mapping_start: {
31 DocumentStartToken
: '---',
32 DocumentEndToken
: '...',
37 BlockSequenceStartToken
: '[[',
38 BlockMappingStartToken
: '{{',
40 FlowSequenceStartToken
: '[',
41 FlowSequenceEndToken
: ']',
42 FlowMappingStartToken
: '{',
43 FlowMappingEndToken
: '}',
49 def _testTokens(self
, test_name
, data_filename
, tokens_filename
):
51 tokens2
= file(tokens_filename
, 'rb').read().split()
53 scanner
= Scanner(Reader(file(data_filename
, 'rb')))
55 while not isinstance(scanner
.peek_token(), StreamEndToken
):
56 tokens1
.append(scanner
.get_token())
57 tokens1
= [self
.replaces
[t
.__class
__] for t
in tokens1
]
58 self
.failUnlessEqual(tokens1
, tokens2
)
62 print file(data_filename
, 'rb').read()
63 print "TOKENS1:", tokens1
64 print "TOKENS2:", tokens2
67 TestTokens
.add_tests('testTokens', '.data', '.tokens')
69 class TestScanner(test_appliance
.TestAppliance
):
71 def _testScanner(self
, test_name
, data_filename
, canonical_filename
):
72 for filename
in [canonical_filename
, data_filename
]:
75 scanner
= Scanner(Reader(file(filename
, 'rb')))
77 while not isinstance(scanner
.peek_token(), StreamEndToken
):
78 tokens
.append(scanner
.get_token().__class
__.__name
__)
82 print file(data_filename
, 'rb').read()
83 print "TOKENS:", tokens
86 TestScanner
.add_tests('testScanner', '.data', '.canonical')