diff options
author | Vsevolod Stakhov <vsevolod@rambler-co.ru> | 2011-02-24 18:51:13 +0300 |
---|---|---|
committer | Vsevolod Stakhov <vsevolod@rambler-co.ru> | 2011-02-24 18:51:13 +0300 |
commit | 121efbcddf8ec41eea91aa80574dab3730bf8976 (patch) | |
tree | 2d39b5895526d63c7994aa81575c2db15a46cee1 /src/tokenizers | |
parent | 7eb9b642db888b26a97b06394695e55173c45895 (diff) | |
download | rspamd-121efbcddf8ec41eea91aa80574dab3730bf8976.tar.gz rspamd-121efbcddf8ec41eea91aa80574dab3730bf8976.zip |
* Rewrite URL storage system
Diffstat (limited to 'src/tokenizers')
-rw-r--r-- | src/tokenizers/tokenizers.c | 32 | ||||
-rw-r--r-- | src/tokenizers/tokenizers.h | 2 |
2 files changed, 0 insertions, 34 deletions
diff --git a/src/tokenizers/tokenizers.c b/src/tokenizers/tokenizers.c index faa8f074b..b7318bdfc 100644 --- a/src/tokenizers/tokenizers.c +++ b/src/tokenizers/tokenizers.c @@ -143,38 +143,6 @@ get_next_word (f_str_t * buf, f_str_t * token) return token; } -int -tokenize_urls (memory_pool_t * pool, struct worker_task *task, GTree ** tree) -{ - token_node_t *new = NULL; - f_str_t url_domain; - struct uri *url; - GList *cur; - uint32_t h; - - if (*tree == NULL) { - *tree = g_tree_new (token_node_compare_func); - memory_pool_add_destructor (pool, (pool_destruct_func) g_tree_destroy, *tree); - } - - cur = task->urls; - while (cur) { - url = cur->data; - url_domain.begin = url->host; - url_domain.len = url->hostlen; - new = memory_pool_alloc (pool, sizeof (token_node_t)); - h = fstrhash (&url_domain); - new->h1 = h * primes[0]; - new->h2 = h * primes[1]; - if (g_tree_lookup (*tree, new) == NULL) { - g_tree_insert (*tree, new, new); - } - cur = g_list_next (cur); - } - - return TRUE; -} - /* Struct to access gmime headers */ struct raw_header { struct raw_header *next; diff --git a/src/tokenizers/tokenizers.h b/src/tokenizers/tokenizers.h index 21e454e6b..59a2684d0 100644 --- a/src/tokenizers/tokenizers.h +++ b/src/tokenizers/tokenizers.h @@ -36,8 +36,6 @@ struct tokenizer* get_tokenizer (char *name); f_str_t *get_next_word (f_str_t *buf, f_str_t *token); /* OSB tokenize function */ int osb_tokenize_text (struct tokenizer *tokenizer, memory_pool_t *pool, f_str_t *input, GTree **cur); -/* Common tokenizer for urls */ -int tokenize_urls (memory_pool_t *pool, struct worker_task *task, GTree **cur); /* Common tokenizer for headers */ int tokenize_headers (memory_pool_t *pool, struct worker_task *task, GTree **cur); /* Make tokens for a subject */ |