3 # The author disclaims copyright to this source code. In place of
4 # a legal notice, here is a blessing:
6 # May you do good and not evil.
7 # May you find forgiveness for yourself and forgive others.
8 # May you share freely, never taking more than you give.
10 #*************************************************************************
11 # This file implements regression tests for SQLite library. The focus
12 # of this script is testing the pluggable tokeniser feature of the
15 # $Id: fts2token.test,v 1.3 2007/06/25 12:05:40 danielk1977 Exp $
18 set testdir [file dirname $argv0]
19 source $testdir/tester.tcl
21 # If SQLITE_ENABLE_FTS2 is defined, omit this file.
27 proc escape_string {str} {
29 foreach char [split $str ""] {
34 append out [format {\x%.4x} $i]
40 #--------------------------------------------------------------------------
41 # Test cases fts2token-1.* are the warm-body test for the SQL scalar
42 # function fts2_tokenizer(). The procedure is as follows:
44 # 1: Verify that there is no such fts2 tokenizer as 'blah'.
46 # 2: Query for the built-in tokenizer 'simple'. Insert a copy of the
47 # retrieved value as tokenizer 'blah'.
49 # 3: Test that the value returned for tokenizer 'blah' is now the
50 # same as that retrieved for 'simple'.
52 # 4: Test that it is now possible to create an fts2 table using
53 # tokenizer 'blah' (it was not possible in step 1).
55 # 5: Test that the table created to use tokenizer 'blah' is usable.
57 do_test fts2token-1.1 {
59 CREATE VIRTUAL TABLE t1 USING fts2(content, tokenize blah);
61 } {1 {unknown tokenizer: blah}}
62 do_test fts2token-1.2 {
64 SELECT fts2_tokenizer('blah', fts2_tokenizer('simple')) IS NULL;
67 do_test fts2token-1.3 {
69 SELECT fts2_tokenizer('blah') == fts2_tokenizer('simple');
72 do_test fts2token-1.4 {
74 CREATE VIRTUAL TABLE t1 USING fts2(content, tokenize blah);
77 do_test fts2token-1.5 {
79 INSERT INTO t1(content) VALUES('There was movement at the station');
80 INSERT INTO t1(content) VALUES('For the word has passed around');
81 INSERT INTO t1(content) VALUES('That the colt from ol regret had got away');
82 SELECT content FROM t1 WHERE content MATCH 'movement'
84 } {{There was movement at the station}}
86 #--------------------------------------------------------------------------
87 # Test cases fts2token-2.* test error cases in the scalar function based
88 # API for getting and setting tokenizers.
90 do_test fts2token-2.1 {
92 SELECT fts2_tokenizer('nosuchtokenizer');
94 } {1 {unknown tokenizer: nosuchtokenizer}}
96 #--------------------------------------------------------------------------
97 # Test cases fts2token-3.* test the three built-in tokenizers with a
98 # simple input string via the built-in test function. This is as much
99 # to test the test function as the tokenizer implementations.
101 do_test fts2token-3.1 {
103 SELECT fts2_tokenizer_test('simple', 'I don''t see how');
105 } {{0 i I 1 don don 2 t t 3 see see 4 how how}}
106 do_test fts2token-3.2 {
108 SELECT fts2_tokenizer_test('porter', 'I don''t see how');
110 } {{0 i I 1 don don 2 t t 3 see see 4 how how}}
112 do_test fts2token-3.3 {
114 SELECT fts2_tokenizer_test('icu', 'I don''t see how');
116 } {{0 i I 1 don't don't 2 see see 3 how how}}
119 #--------------------------------------------------------------------------
120 # Test cases fts2token-4.* test the ICU tokenizer. In practice, this
121 # tokenizer only has two modes - "thai" and "everybody else". Some other
122 # Asian languages (Lao, Khmer etc.) require the same special treatment as
123 # Thai, but ICU doesn't support them yet.
127 proc do_icu_test {name locale input output} {
128 set ::out [db eval { SELECT fts2_tokenizer_test('icu', $locale, $input) }]
134 do_icu_test fts2token-4.1 en_US {} {}
135 do_icu_test fts2token-4.2 en_US {Test cases fts2} [list \
136 0 test Test 1 cases cases 2 fts2 fts2
139 # The following test shows that ICU is smart enough to recognise
140 # Thai chararacters, even when the locale is set to English/United
143 set input "\u0e2d\u0e30\u0e44\u0e23\u0e19\u0e30\u0e04\u0e23\u0e31\u0e1a"
144 set output "0 \u0e2d\u0e30\u0e44\u0e23 \u0e2d\u0e30\u0e44\u0e23 "
145 append output "1 \u0e19\u0e30 \u0e19\u0e30 "
146 append output "2 \u0e04\u0e23\u0e31\u0e1a \u0e04\u0e23\u0e31\u0e1a"
148 do_icu_test fts2token-4.3 th_TH $input $output
149 do_icu_test fts2token-4.4 en_US $input $output
151 # ICU handles an unknown locale by falling back to the default.
152 # So this is not an error.
153 do_icu_test fts2token-4.5 MiddleOfTheOcean $input $output
155 set longtoken "AReallyReallyLongTokenOneThatWillSurelyRequire"
156 append longtoken "AReallocInTheIcuTokenizerCode"
158 set input "short tokens then "
159 append input $longtoken
160 set output "0 short short "
161 append output "1 tokens tokens "
162 append output "2 then then "
163 append output "3 [string tolower $longtoken] $longtoken"
165 do_icu_test fts2token-4.6 MiddleOfTheOcean $input $output
166 do_icu_test fts2token-4.7 th_TH $input $output
167 do_icu_test fts2token-4.8 en_US $input $output
170 do_test fts2token-internal {
171 execsql { SELECT fts2_tokenizer_internal_test() }