From 7bb481fda9ecb134804b49c2ce77ca28f7eea583 Mon Sep 17 00:00:00 2001 From: Hans-Christoph Steiner Date: Fri, 30 Mar 2012 20:42:12 -0400 Subject: Imported Upstream version 2.0.3 --- test/fts3atoken.test | 187 +++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 187 insertions(+) create mode 100644 test/fts3atoken.test (limited to 'test/fts3atoken.test') diff --git a/test/fts3atoken.test b/test/fts3atoken.test new file mode 100644 index 0000000..554259d --- /dev/null +++ b/test/fts3atoken.test @@ -0,0 +1,187 @@ +# 2007 June 21 +# +# The author disclaims copyright to this source code. In place of +# a legal notice, here is a blessing: +# +# May you do good and not evil. +# May you find forgiveness for yourself and forgive others. +# May you share freely, never taking more than you give. +# +#************************************************************************* +# This file implements regression tests for SQLite library. The focus +# of this script is testing the pluggable tokeniser feature of the +# FTS3 module. +# +# $Id: fts3atoken.test,v 1.1 2007/08/20 17:38:42 shess Exp $ +# + +set testdir [file dirname $argv0] +source $testdir/tester.tcl + +# If SQLITE_ENABLE_FTS3 is defined, omit this file. +ifcapable !fts3 { + finish_test + return +} + +set ::testprefix fts3token + +proc escape_string {str} { + set out "" + foreach char [split $str ""] { + scan $char %c i + if {$i<=127} { + append out $char + } else { + append out [format {\x%.4x} $i] + } + } + set out +} + +#-------------------------------------------------------------------------- +# Test cases fts3token-1.* are the warm-body test for the SQL scalar +# function fts3_tokenizer(). The procedure is as follows: +# +# 1: Verify that there is no such fts3 tokenizer as 'blah'. +# +# 2: Query for the built-in tokenizer 'simple'. Insert a copy of the +# retrieved value as tokenizer 'blah'. +# +# 3: Test that the value returned for tokenizer 'blah' is now the +# same as that retrieved for 'simple'. +# +# 4: Test that it is now possible to create an fts3 table using +# tokenizer 'blah' (it was not possible in step 1). +# +# 5: Test that the table created to use tokenizer 'blah' is usable. +# +do_test fts3token-1.1 { + catchsql { + CREATE VIRTUAL TABLE t1 USING fts3(content, tokenize blah); + } +} {1 {unknown tokenizer: blah}} +do_test fts3token-1.2 { + execsql { + SELECT fts3_tokenizer('blah', fts3_tokenizer('simple')) IS NULL; + } +} {0} +do_test fts3token-1.3 { + execsql { + SELECT fts3_tokenizer('blah') == fts3_tokenizer('simple'); + } +} {1} +do_test fts3token-1.4 { + catchsql { + CREATE VIRTUAL TABLE t1 USING fts3(content, tokenize blah); + } +} {0 {}} +do_test fts3token-1.5 { + execsql { + INSERT INTO t1(content) VALUES('There was movement at the station'); + INSERT INTO t1(content) VALUES('For the word has passed around'); + INSERT INTO t1(content) VALUES('That the colt from ol regret had got away'); + SELECT content FROM t1 WHERE content MATCH 'movement' + } +} {{There was movement at the station}} + +#-------------------------------------------------------------------------- +# Test cases fts3token-2.* test error cases in the scalar function based +# API for getting and setting tokenizers. +# +do_test fts3token-2.1 { + catchsql { + SELECT fts3_tokenizer('nosuchtokenizer'); + } +} {1 {unknown tokenizer: nosuchtokenizer}} + +#-------------------------------------------------------------------------- +# Test cases fts3token-3.* test the three built-in tokenizers with a +# simple input string via the built-in test function. This is as much +# to test the test function as the tokenizer implementations. +# +do_test fts3token-3.1 { + execsql { + SELECT fts3_tokenizer_test('simple', 'I don''t see how'); + } +} {{0 i I 1 don don 2 t t 3 see see 4 how how}} +do_test fts3token-3.2 { + execsql { + SELECT fts3_tokenizer_test('porter', 'I don''t see how'); + } +} {{0 i I 1 don don 2 t t 3 see see 4 how how}} +ifcapable icu { + do_test fts3token-3.3 { + execsql { + SELECT fts3_tokenizer_test('icu', 'I don''t see how'); + } + } {{0 i I 1 don't don't 2 see see 3 how how}} +} + +#-------------------------------------------------------------------------- +# Test cases fts3token-4.* test the ICU tokenizer. In practice, this +# tokenizer only has two modes - "thai" and "everybody else". Some other +# Asian languages (Lao, Khmer etc.) require the same special treatment as +# Thai, but ICU doesn't support them yet. +# +ifcapable icu { + + proc do_icu_test {name locale input output} { + set ::out [db eval { SELECT fts3_tokenizer_test('icu', $locale, $input) }] + do_test $name { + lindex $::out 0 + } $output + } + + do_icu_test fts3token-4.1 en_US {} {} + do_icu_test fts3token-4.2 en_US {Test cases fts3} [list \ + 0 test Test 1 cases cases 2 fts3 fts3 + ] + + # The following test shows that ICU is smart enough to recognise + # Thai chararacters, even when the locale is set to English/United + # States. + # + set input "\u0e2d\u0e30\u0e44\u0e23\u0e19\u0e30\u0e04\u0e23\u0e31\u0e1a" + set output "0 \u0e2d\u0e30\u0e44\u0e23 \u0e2d\u0e30\u0e44\u0e23 " + append output "1 \u0e19\u0e30 \u0e19\u0e30 " + append output "2 \u0e04\u0e23\u0e31\u0e1a \u0e04\u0e23\u0e31\u0e1a" + + do_icu_test fts3token-4.3 th_TH $input $output + do_icu_test fts3token-4.4 en_US $input $output + + # ICU handles an unknown locale by falling back to the default. + # So this is not an error. + do_icu_test fts3token-4.5 MiddleOfTheOcean $input $output + + set longtoken "AReallyReallyLongTokenOneThatWillSurelyRequire" + append longtoken "AReallocInTheIcuTokenizerCode" + + set input "short tokens then " + append input $longtoken + set output "0 short short " + append output "1 tokens tokens " + append output "2 then then " + append output "3 [string tolower $longtoken] $longtoken" + + do_icu_test fts3token-4.6 MiddleOfTheOcean $input $output + do_icu_test fts3token-4.7 th_TH $input $output + do_icu_test fts3token-4.8 en_US $input $output + + do_execsql_test 5.1 { + CREATE VIRTUAL TABLE x1 USING fts3(name,TOKENIZE icu en_US); + insert into x1 (name) values (NULL); + insert into x1 (name) values (NULL); + delete from x1; + } +} + + +do_test fts3token-internal { + execsql { SELECT fts3_tokenizer_internal_test() } +} {ok} + + +finish_test + + -- cgit v1.2.3