4 from yaml
.reader
import *
5 from yaml
.tokens
import *
6 from yaml
.scanner
import *
8 class TestTokens(test_appliance
.TestAppliance
):
18 # block_sequence_start: [[
19 # block_mapping_start: {{
21 # flow_sequence_start: [
22 # flow_sequence_end: ]
23 # flow_mapping_start: {
30 YAMLDirectiveToken
: '%',
31 TagDirectiveToken
: '%',
32 ReservedDirectiveToken
: '%',
33 DocumentStartToken
: '---',
34 DocumentEndToken
: '...',
39 BlockSequenceStartToken
: '[[',
40 BlockMappingStartToken
: '{{',
42 FlowSequenceStartToken
: '[',
43 FlowSequenceEndToken
: ']',
44 FlowMappingStartToken
: '{',
45 FlowMappingEndToken
: '}',
51 def _testTokens(self
, test_name
, data_filename
, tokens_filename
):
53 tokens2
= file(tokens_filename
, 'rb').read().split()
55 scanner
= Scanner(Reader(file(data_filename
, 'rb')))
57 while not isinstance(scanner
.peek_token(), EndToken
):
58 tokens1
.append(scanner
.get_token())
59 tokens1
= [self
.replaces
[t
.__class
__] for t
in tokens1
]
60 self
.failUnlessEqual(tokens1
, tokens2
)
64 print file(data_filename
, 'rb').read()
65 print "TOKENS1:", tokens1
66 print "TOKENS2:", tokens2
69 TestTokens
.add_tests('testTokens', '.data', '.tokens')
71 class TestScanner(test_appliance
.TestAppliance
):
73 def _testScanner(self
, test_name
, data_filename
, canonical_filename
):
74 for filename
in [canonical_filename
, data_filename
]:
77 scanner
= Scanner(Reader(file(filename
, 'rb')))
79 while not isinstance(scanner
.peek_token(), EndToken
):
80 tokens
.append(scanner
.get_token().__class
__.__name
__)
84 print file(data_filename
, 'rb').read()
85 print "TOKENS:", tokens
88 TestScanner
.add_tests('testScanner', '.data', '.canonical')