[Groonga-commit] groonga/groonga at d9eb418 [master] tokenize: remove needless output on error

Back to archive index

Kouhei Sutou null+****@clear*****
Thu Aug 14 20:24:08 JST 2014


Kouhei Sutou	2014-08-14 20:24:08 +0900 (Thu, 14 Aug 2014)

  New Revision: d9eb418c0bc85a45160ac9ae0a06885b9f3fa709
  https://github.com/groonga/groonga/commit/d9eb418c0bc85a45160ac9ae0a06885b9f3fa709

  Message:
    tokenize: remove needless output on error

  Modified files:
    lib/proc.c
    test/command/suite/tokenize/invalid/flags/unknown.expected
    test/command/suite/tokenize/invalid/normalizer/invalid.expected
    test/command/suite/tokenize/invalid/normalizer/nonexistent.expected
    test/command/suite/tokenize/invalid/tokenizer/invalid.expected
    test/command/suite/tokenize/invalid/tokenizer/nonexistent.expected
    test/command/suite/tokenize/no_string.expected
    test/command/suite/tokenize/no_tokenizer.expected

  Modified: lib/proc.c (+2 -9)
===================================================================
--- lib/proc.c    2014-08-14 20:20:35 +0900 (31314af)
+++ lib/proc.c    2014-08-14 20:24:08 +0900 (9b259e1)
@@ -3169,12 +3169,9 @@ typedef struct {
 static void
 output_tokens(grn_ctx *ctx, grn_obj *tokens, grn_hash *lexicon)
 {
-  int i, n_tokens = 0;
-
-  if (tokens) {
-    n_tokens = GRN_BULK_VSIZE(tokens) / sizeof(tokenize_token);
-  }
+  int i, n_tokens;
 
+  n_tokens = GRN_BULK_VSIZE(tokens) / sizeof(tokenize_token);
   GRN_OUTPUT_ARRAY_OPEN("TOKENS", n_tokens);
   for (i = 0; i < n_tokens; i++) {
     tokenize_token *token;
@@ -3317,13 +3314,11 @@ proc_tokenize(grn_ctx *ctx, int nargs, grn_obj **args, grn_user_data *user_data)
 
   if (GRN_TEXT_LEN(tokenizer_name) == 0) {
     ERR(GRN_INVALID_ARGUMENT, "[tokenize] tokenizer name is missing");
-    output_tokens(ctx, NULL, NULL);
     return NULL;
   }
 
   if (GRN_TEXT_LEN(string) == 0) {
     ERR(GRN_INVALID_ARGUMENT, "[tokenize] string is missing");
-    output_tokens(ctx, NULL, NULL);
     return NULL;
   }
 
@@ -3333,13 +3328,11 @@ proc_tokenize(grn_ctx *ctx, int nargs, grn_obj **args, grn_user_data *user_data)
 
     flags = parse_tokenize_flags(ctx, flag_names);
     if (ctx->rc != GRN_SUCCESS) {
-      output_tokens(ctx, NULL, NULL);
       return NULL;
     }
 
     lexicon = create_lexicon_for_tokenize(ctx, tokenizer_name, normalizer_name);
     if (!lexicon) {
-      output_tokens(ctx, NULL, NULL);
       return NULL;
     }
 

  Modified: test/command/suite/tokenize/invalid/flags/unknown.expected (+1 -1)
===================================================================
--- test/command/suite/tokenize/invalid/flags/unknown.expected    2014-08-14 20:20:35 +0900 (31c7bd7)
+++ test/command/suite/tokenize/invalid/flags/unknown.expected    2014-08-14 20:24:08 +0900 (9947911)
@@ -1,3 +1,3 @@
 tokenize TokenDelimit "aBcDe 123" NormalizerAuto UNKNOWN
-[[[-22,0.0,0.0],"[tokenize] invalid flag: <UNKNOWN>"],[]]
+[[[-22,0.0,0.0],"[tokenize] invalid flag: <UNKNOWN>"]]
 #|e| [tokenize] invalid flag: <UNKNOWN>

  Modified: test/command/suite/tokenize/invalid/normalizer/invalid.expected (+0 -3)
===================================================================
--- test/command/suite/tokenize/invalid/normalizer/invalid.expected    2014-08-14 20:20:35 +0900 (e14ae9c)
+++ test/command/suite/tokenize/invalid/normalizer/invalid.expected    2014-08-14 20:24:08 +0900 (b5b511b)
@@ -7,9 +7,6 @@ tokenize TokenBigram "aBcDe 123" TokenDelimit
       0.0
     ],
     "[tokenize] not normalizer: #<proc:tokenizer TokenDelimit arguments:[$1, $2, $3]>"
-  ],
-  [
-
   ]
 ]
 #|e| [tokenize] not normalizer: #<proc:tokenizer TokenDelimit arguments:[$1, $2, $3]>

  Modified: test/command/suite/tokenize/invalid/normalizer/nonexistent.expected (+1 -13)
===================================================================
--- test/command/suite/tokenize/invalid/normalizer/nonexistent.expected    2014-08-14 20:20:35 +0900 (7e8f06b)
+++ test/command/suite/tokenize/invalid/normalizer/nonexistent.expected    2014-08-14 20:24:08 +0900 (2d76d6b)
@@ -1,15 +1,3 @@
 tokenize TokenBigram "aBcDe 123" NormalizerNonexistent
-[
-  [
-    [
-      -22,
-      0.0,
-      0.0
-    ],
-    "[tokenize] nonexistent normalizer: <NormalizerNonexistent>"
-  ],
-  [
-
-  ]
-]
+[[[-22,0.0,0.0],"[tokenize] nonexistent normalizer: <NormalizerNonexistent>"]]
 #|e| [tokenize] nonexistent normalizer: <NormalizerNonexistent>

  Modified: test/command/suite/tokenize/invalid/tokenizer/invalid.expected (+0 -3)
===================================================================
--- test/command/suite/tokenize/invalid/tokenizer/invalid.expected    2014-08-14 20:20:35 +0900 (23fe362)
+++ test/command/suite/tokenize/invalid/tokenizer/invalid.expected    2014-08-14 20:24:08 +0900 (fb143d4)
@@ -7,9 +7,6 @@ tokenize NormalizerAuto "aBcDe 123"
       0.0
     ],
     "[tokenize] not tokenizer: #<proc:normalizer NormalizerAuto arguments:[$1]>"
-  ],
-  [
-
   ]
 ]
 #|e| [tokenize] not tokenizer: #<proc:normalizer NormalizerAuto arguments:[$1]>

  Modified: test/command/suite/tokenize/invalid/tokenizer/nonexistent.expected (+1 -1)
===================================================================
--- test/command/suite/tokenize/invalid/tokenizer/nonexistent.expected    2014-08-14 20:20:35 +0900 (ee16c98)
+++ test/command/suite/tokenize/invalid/tokenizer/nonexistent.expected    2014-08-14 20:24:08 +0900 (6dea9b8)
@@ -1,3 +1,3 @@
 tokenize TokenNonexistent "aBcDe 123"
-[[[-22,0.0,0.0],"[tokenize] nonexistent tokenizer: <TokenNonexistent>"],[]]
+[[[-22,0.0,0.0],"[tokenize] nonexistent tokenizer: <TokenNonexistent>"]]
 #|e| [tokenize] nonexistent tokenizer: <TokenNonexistent>

  Modified: test/command/suite/tokenize/no_string.expected (+1 -1)
===================================================================
--- test/command/suite/tokenize/no_string.expected    2014-08-14 20:20:35 +0900 (62851cc)
+++ test/command/suite/tokenize/no_string.expected    2014-08-14 20:24:08 +0900 (b3b68e7)
@@ -1,3 +1,3 @@
 tokenize TokenBigram
-[[[-22,0.0,0.0],"[tokenize] string is missing"],[]]
+[[[-22,0.0,0.0],"[tokenize] string is missing"]]
 #|e| [tokenize] string is missing

  Modified: test/command/suite/tokenize/no_tokenizer.expected (+1 -1)
===================================================================
--- test/command/suite/tokenize/no_tokenizer.expected    2014-08-14 20:20:35 +0900 (4c38574)
+++ test/command/suite/tokenize/no_tokenizer.expected    2014-08-14 20:24:08 +0900 (fec6198)
@@ -1,3 +1,3 @@
 tokenize
-[[[-22,0.0,0.0],"[tokenize] tokenizer name is missing"],[]]
+[[[-22,0.0,0.0],"[tokenize] tokenizer name is missing"]]
 #|e| [tokenize] tokenizer name is missing
-------------- next part --------------
HTML����������������������������...
Télécharger 



More information about the Groonga-commit mailing list
Back to archive index