1 local json
= require("json")
2 local lunit
= require("lunit")
3 local testutil
= require("testutil")
5 local encode
= json
.encode
6 -- DECODE NOT 'local' due to requirement for testutil to access it
7 decode
= json
.decode
.getDecoder(false)
11 module("lunit-strings", lunit
.testcase
, package
.seeall
)
13 local function assert_table_equal(expect
, t
)
14 if type(expect
) ~= 'table' then
15 return assert_equal(expect
, t
)
17 for k
,v
in pairs(expect
) do
18 if type(k
) ~= 'string' and type(k
) ~= 'number' and type(k
) ~= 'boolean' then
19 error("INVALID expected table key")
23 fail(tostring(k
) .. " not found but expected")
25 assert_table_equal(v
, t
[k
])
27 for k
,v
in pairs(t
) do
28 if nil == expect
[k
] then
29 fail(tostring(k
) .. " found but not expected")
35 -- Ensure that the decoder is reset
36 _G
["decode"] = json
.decode
.getDecoder(false)
39 function test_strict_quotes()
45 assert_error(function()
46 local decoder
= json
.decode
.getDecoder(opts
)
49 opts
.strings
.strict_quotes
= false
50 assert_equal("hello", json
.decode
.getDecoder(opts
)("'hello'"))
52 assert_equal("he'\"llo'", json
.decode
.getDecoder(opts
)("'he\\'\"llo\\''"))
56 local utf16_matches
= {
58 { '"\\u0000"', string.char(0x00) },
59 { '"\\u007F"', string.char(0x7F) },
61 { '"\\u0080"', string.char(0xC2, 0x80) },
62 { '"\\u00A2"', string.char(0xC2, 0xA2) },
63 { '"\\u07FF"', string.char(0xDF, 0xBF) },
65 { '"\\u0800"', string.char(0xE0, 0xA0, 0x80) },
66 { '"\\u20AC"', string.char(0xE2, 0x82, 0xAC) },
67 { '"\\uFEFF"', string.char(0xEF, 0xBB, 0xBF) },
68 { '"\\uFFFF"', string.char(0xEF, 0xBF, 0xBF) }
71 function test_utf16_decode()
72 for i
, v
in ipairs(utf16_matches
) do
73 -- Test that the default \u decoder outputs UTF8
74 local num
= tostring(i
) .. ' '
75 assert_equal(num
.. v
[2], num
.. json
.decode(v
[1]))
79 local BOM
= string.char(0xEF, 0xBB, 0xBF)
80 -- BOM skipping tests - here due to relation to UTF8/16
81 local BOM_skip_tests
= {
82 { BOM
.. '"x"', "x" },
83 { BOM
.. '["\\uFFFF",true]', { string.char(0xEF, 0xBF, 0xBF), true } },
84 -- Other uses of unicode spaces
87 function test_bom_skip()
88 for i
,v
in ipairs(BOM_skip_tests
) do
89 assert_table_equal(v
[2], json
.decode(v
[1]))
93 -- Unicode whitespace codepoints gleaned from unicode.org
116 "\\u200B", -- addition, zero-width space
122 "\\uFEFF" -- Zero-width non-breaking space (BOM)
125 local inject_ws_values
= {
127 " %WS%'the%WS blob' %WS%",
128 "%WS%{ key: %WS%\"valueMan\",%WS% key2:%WS%4.4}",
131 function test_whitespace_ignore()
132 for _
, ws
in ipairs(WHITESPACES
) do
133 ws
= json
.decode('"' .. ws
.. '"')
134 for _
, v
in ipairs(inject_ws_values
) do
135 v
= v
:gsub("%%WS%%", ws
)
136 assert_true(nil ~= json
.decode(v
))
141 function test_u_encoding()
142 local encoder
= json
.encode
.getEncoder()
143 local decoder
= json
.decode
.getDecoder()
145 local char
= string.char(i
)
146 assert_equal(char
, decoder(encoder(char
)))
150 function test_x_encoding()
151 local encoder
= json
.encode
.getEncoder({ strings
= { xEncode
= true } })
152 local decoder
= json
.decode
.getDecoder()
154 local char
= string.char(i
)
155 assert_equal(char
, decoder(encoder(char
)))
159 function test_strict_decoding()
160 local encoder
= json
.encode
.getEncoder(json
.encode
.strict
)
161 local decoder
= json
.decode
.getDecoder(json
.decode
.strict
)
163 local char
= string.char(i
)
164 -- Must wrap character in array due to decoder strict-ness
165 assert_equal(char
, decoder(encoder({char
}))[1])