Kouhei Sutou
null+****@clear*****
Sat Oct 11 22:14:23 JST 2014
Kouhei Sutou 2014-10-11 22:14:23 +0900 (Sat, 11 Oct 2014) New Revision: b629eca100b963c6c12d39398ed1bbd17bdafc65 https://github.com/groonga/groonga/commit/b629eca100b963c6c12d39398ed1bbd17bdafc65 Message: dump: support "table_create --token_filters" Added files: test/command/suite/table_create/token_filters/one.expected test/command/suite/table_create/token_filters/one.test Modified files: lib/proc.c Modified: lib/proc.c (+20 -0) =================================================================== --- lib/proc.c 2014-10-10 09:35:52 +0900 (d4c5bd3) +++ lib/proc.c 2014-10-11 22:14:23 +0900 (656b9e9) @@ -2661,6 +2661,26 @@ dump_table(grn_ctx *ctx, grn_obj *outbuf, grn_obj *table, GRN_TEXT_PUTS(ctx, outbuf, " --normalizer "); dump_obj_name(ctx, outbuf, normalizer); } + { + grn_obj token_filters; + int n_token_filters; + + GRN_PTR_INIT(&token_filters, GRN_OBJ_VECTOR, GRN_ID_NIL); + grn_obj_get_info(ctx, table, GRN_INFO_TOKEN_FILTERS, &token_filters); + n_token_filters = GRN_BULK_VSIZE(&token_filters) / sizeof(grn_obj *); + if (n_token_filters > 0) { + int i; + GRN_TEXT_PUTS(ctx, outbuf, " --token_filters "); + for (i = 0; i < n_token_filters; i++) { + grn_obj *token_filter = GRN_PTR_VALUE_AT(&token_filters, i); + if (i > 0) { + GRN_TEXT_PUTC(ctx, outbuf, ','); + } + dump_obj_name(ctx, outbuf, token_filter); + } + } + GRN_OBJ_FIN(ctx, &token_filters); + } GRN_TEXT_PUTC(ctx, outbuf, '\n'); Added: test/command/suite/table_create/token_filters/one.expected (+7 -0) 100644 =================================================================== --- /dev/null +++ test/command/suite/table_create/token_filters/one.expected 2014-10-11 22:14:23 +0900 (fee262c) @@ -0,0 +1,7 @@ +register token_filters/stop_word +[[0,0.0,0.0],true] +table_create Terms TABLE_PAT_KEY ShortText --default_tokenizer TokenBigram --normalizer NormalizerAuto --token_filters TokenFilterStopWord +[[0,0.0,0.0],true] +dump +table_create Terms TABLE_PAT_KEY ShortText --default_tokenizer TokenBigram --normalizer NormalizerAuto --token_filters TokenFilterStopWord + Added: test/command/suite/table_create/token_filters/one.test (+8 -0) 100644 =================================================================== --- /dev/null +++ test/command/suite/table_create/token_filters/one.test 2014-10-11 22:14:23 +0900 (e3f4608) @@ -0,0 +1,8 @@ +register token_filters/stop_word + +table_create Terms TABLE_PAT_KEY ShortText \ + --default_tokenizer TokenBigram \ + --normalizer NormalizerAuto \ + --token_filters TokenFilterStopWord + +dump -------------- next part -------------- HTML����������������������������...Télécharger