Kouhei Sutou
null+****@clear*****
Mon May 2 16:22:49 JST 2016
Kouhei Sutou 2016-05-02 16:22:49 +0900 (Mon, 02 May 2016) New Revision: c1fdb7340020721b685a8f10b8a3272946017c61 https://github.com/groonga/groonga/commit/c1fdb7340020721b685a8f10b8a3272946017c61 Message: table_create: stop ignoring nonexistent default tokenizer It's backward incompatibility change. But it'll be useful to find a typo in --default_tokenizer. The current behavior, ignoring nonexistent default tokenizer silently, causes a delay in finding problems. Modified files: lib/proc/proc_table.c test/command/suite/table_create/default_tokenizer/nonexistent.expected test/command/suite/table_create/default_tokenizer/nonexistent.test Modified: lib/proc/proc_table.c (+33 -16) =================================================================== --- lib/proc/proc_table.c 2016-05-02 16:15:23 +0900 (9fe09e0) +++ lib/proc/proc_table.c 2016-05-02 16:22:49 +0900 (ddaa9be) @@ -189,9 +189,9 @@ command_table_create(grn_ctx *ctx, grn_obj *flags_raw; grn_obj *key_type_name; grn_obj *value_type_name; - grn_obj *default_tokenizer; - grn_obj *normalizer; - grn_obj *token_filters; + grn_obj *default_tokenizer_name; + grn_obj *normalizer_name; + grn_obj *token_filters_name; grn_obj *table; const char *rest; grn_table_flags flags; @@ -200,11 +200,11 @@ command_table_create(grn_ctx *ctx, flags_raw = grn_plugin_proc_get_var(ctx, user_data, "flags", -1); key_type_name = grn_plugin_proc_get_var(ctx, user_data, "key_type", -1); value_type_name = grn_plugin_proc_get_var(ctx, user_data, "value_type", -1); - default_tokenizer = + default_tokenizer_name = grn_plugin_proc_get_var(ctx, user_data, "default_tokenizer", -1); - normalizer = + normalizer_name = grn_plugin_proc_get_var(ctx, user_data, "normalizer", -1); - token_filters = + token_filters_name = grn_plugin_proc_get_var(ctx, user_data, "token_filters", -1); flags = grn_atoi(GRN_TEXT_VALUE(flags_raw), @@ -274,22 +274,39 @@ command_table_create(grn_ctx *ctx, goto exit; } - { + if (GRN_TEXT_LEN(default_tokenizer_name) > 0) { + grn_obj *default_tokenizer; + + default_tokenizer = + grn_ctx_get(ctx, + GRN_TEXT_VALUE(default_tokenizer_name), + GRN_TEXT_LEN(default_tokenizer_name)); + if (!default_tokenizer) { + GRN_PLUGIN_ERROR(ctx, + GRN_INVALID_ARGUMENT, + "[table][create][%.*s] unknown tokenizer: <%.*s>", + (int)GRN_TEXT_LEN(name), + GRN_TEXT_VALUE(name), + (int)GRN_TEXT_LEN(default_tokenizer_name), + GRN_TEXT_VALUE(default_tokenizer_name)); + grn_obj_remove(ctx, table); + goto exit; + } grn_obj_set_info(ctx, table, GRN_INFO_DEFAULT_TOKENIZER, - grn_ctx_get(ctx, - GRN_TEXT_VALUE(default_tokenizer), - GRN_TEXT_LEN(default_tokenizer))); - if (GRN_TEXT_LEN(normalizer) > 0) { + default_tokenizer); + } + + if (GRN_TEXT_LEN(normalizer_name) > 0) { grn_obj_set_info(ctx, table, GRN_INFO_NORMALIZER, grn_ctx_get(ctx, - GRN_TEXT_VALUE(normalizer), - GRN_TEXT_LEN(normalizer))); - } - grn_proc_table_set_token_filters(ctx, table, token_filters); - grn_obj_unlink(ctx, table); + GRN_TEXT_VALUE(normalizer_name), + GRN_TEXT_LEN(normalizer_name))); } + + grn_proc_table_set_token_filters(ctx, table, token_filters_name); + grn_obj_unlink(ctx, table); } exit : Modified: test/command/suite/table_create/default_tokenizer/nonexistent.expected (+8 -71) =================================================================== --- test/command/suite/table_create/default_tokenizer/nonexistent.expected 2016-05-02 16:15:23 +0900 (6d72ca1) +++ test/command/suite/table_create/default_tokenizer/nonexistent.expected 2016-05-02 16:22:49 +0900 (4a3b610) @@ -1,77 +1,14 @@ table_create Tags TABLE_PAT_KEY ShortText --default_tokenizer TokenNonexistent -[[0,0.0,0.0],true] -table_create Movies TABLE_HASH_KEY ShortText -[[0,0.0,0.0],true] -column_create Movies tags COLUMN_VECTOR Tags -[[0,0.0,0.0],true] -column_create Tags movies_tags COLUMN_INDEX Movies tags -[[0,0.0,0.0],true] -load --table Movies -[ -{"_key": "Seven Samurai", "tags": "Samurai Japanese Japan"}, -{"_key": "The Last Samurai", "tags": "English Samurai Japanese US Japan"} -] -[[0,0.0,0.0],2] -select Tags --output_columns _key --limit -1 [ [ - 0, - 0.0, - 0.0 - ], - [ [ - [ - 2 - ], - [ - [ - "_key", - "ShortText" - ] - ], - [ - "English Samurai Japanese US Japan" - ], - [ - "Samurai Japanese Japan" - ] - ] - ] -] -select Movies --match_columns tags --query Samurai -[ - [ - 0, - 0.0, - 0.0 + -22, + 0.0, + 0.0 + ], + "[table][create][Tags] unknown tokenizer: <TokenNonexistent>" ], - [ - [ - [ - 1 - ], - [ - [ - "_id", - "UInt32" - ], - [ - "_key", - "ShortText" - ], - [ - "tags", - "Tags" - ] - ], - [ - 1, - "Seven Samurai", - [ - "Samurai Japanese Japan" - ] - ] - ] - ] + false ] +#|e| [table][create][Tags] unknown tokenizer: <TokenNonexistent> +dump Modified: test/command/suite/table_create/default_tokenizer/nonexistent.test (+1 -14) =================================================================== --- test/command/suite/table_create/default_tokenizer/nonexistent.test 2016-05-02 16:15:23 +0900 (8ed936d) +++ test/command/suite/table_create/default_tokenizer/nonexistent.test 2016-05-02 16:22:49 +0900 (8b3f204) @@ -1,17 +1,4 @@ table_create Tags TABLE_PAT_KEY ShortText \ --default_tokenizer TokenNonexistent -table_create Movies TABLE_HASH_KEY ShortText -column_create Movies tags COLUMN_VECTOR Tags - -column_create Tags movies_tags COLUMN_INDEX Movies tags - -load --table Movies -[ -{"_key": "Seven Samurai", "tags": "Samurai Japanese Japan"}, -{"_key": "The Last Samurai", "tags": "English Samurai Japanese US Japan"} -] - -select Tags --output_columns _key --limit -1 - -select Movies --match_columns tags --query Samurai +dump -------------- next part -------------- HTML����������������������������...下载