diff options
author | Vsevolod Stakhov <vsevolod@rambler-co.ru> | 2011-05-06 19:18:40 +0400 |
---|---|---|
committer | Vsevolod Stakhov <vsevolod@rambler-co.ru> | 2011-05-06 19:18:40 +0400 |
commit | 683b90f4c6c744557f7429ce6ff77c0f7d2175e1 (patch) | |
tree | 6e5f5cfdb0070cc7387d4045e955c6226d9f225d /src/filter.c | |
parent | 56586078f92c4cf71fad46e1f4888a49749a6313 (diff) | |
download | rspamd-683b90f4c6c744557f7429ce6ff77c0f7d2175e1.tar.gz rspamd-683b90f4c6c744557f7429ce6ff77c0f7d2175e1.zip |
* Major cleanup of cmake build system
* Add initial version of statshow utility for statfiles debugging
* Add debugging for statistics
* Remove unused utilities
Diffstat (limited to 'src/filter.c')
-rw-r--r-- | src/filter.c | 9 |
1 files changed, 3 insertions, 6 deletions
diff --git a/src/filter.c b/src/filter.c index df8e1a9e0..2f8b27060 100644 --- a/src/filter.c +++ b/src/filter.c @@ -36,9 +36,6 @@ #include "classifiers/classifiers.h" #include "tokenizers/tokenizers.h" -#ifndef WITHOUT_PERL -# include "perl.h" -#endif #ifdef WITH_LUA # include "lua/lua_common.h" #endif @@ -615,7 +612,7 @@ classifiers_callback (gpointer value, void *arg) c.len = strlen (cur->data); if (c.len > 0) { c.begin = cur->data; - if (!cl->tokenizer->tokenize_func (cl->tokenizer, task->task_pool, &c, &tokens)) { + if (!cl->tokenizer->tokenize_func (cl->tokenizer, task->task_pool, &c, &tokens, FALSE)) { msg_info ("cannot tokenize input"); return; } @@ -630,7 +627,7 @@ classifiers_callback (gpointer value, void *arg) c.begin = text_part->content->data; c.len = text_part->content->len; /* Tree would be freed at task pool freeing */ - if (!cl->tokenizer->tokenize_func (cl->tokenizer, task->task_pool, &c, &tokens)) { + if (!cl->tokenizer->tokenize_func (cl->tokenizer, task->task_pool, &c, &tokens, FALSE)) { msg_info ("cannot tokenize input"); return; } @@ -857,7 +854,7 @@ learn_task (const gchar *statfile, struct worker_task *task, GError **err) /* Get tokens */ if (!cl->tokenizer->tokenize_func ( cl->tokenizer, task->task_pool, - &c, &tokens)) { + &c, &tokens, FALSE)) { g_set_error (err, filter_error_quark(), 2, "Cannot tokenize message"); return FALSE; } |