aboutsummaryrefslogtreecommitdiffstats
path: root/lualib
diff options
context:
space:
mode:
Diffstat (limited to 'lualib')
-rw-r--r--lualib/redis_scripts/bayes_cache_learn.lua5
-rw-r--r--lualib/redis_scripts/bayes_learn.lua4
2 files changed, 5 insertions, 4 deletions
diff --git a/lualib/redis_scripts/bayes_cache_learn.lua b/lualib/redis_scripts/bayes_cache_learn.lua
index 4185e5741..d8a2d878e 100644
--- a/lualib/redis_scripts/bayes_cache_learn.lua
+++ b/lualib/redis_scripts/bayes_cache_learn.lua
@@ -15,7 +15,8 @@ for i = 0, conf.cache_max_keys do
local have = redis.call('HGET', prefix, cache_id)
if have then
- -- Already in cache
+ -- Already in cache, but is_spam changes when relearning
+ redis.call('HSET', prefix, cache_id, is_spam)
return false
end
end
@@ -58,4 +59,4 @@ if not added then
end
end
-return true \ No newline at end of file
+return true
diff --git a/lualib/redis_scripts/bayes_learn.lua b/lualib/redis_scripts/bayes_learn.lua
index 80d86d803..5456165b6 100644
--- a/lualib/redis_scripts/bayes_learn.lua
+++ b/lualib/redis_scripts/bayes_learn.lua
@@ -26,7 +26,7 @@ redis.call('HSET', prefix, 'version', '2') -- new schema
redis.call('HINCRBY', prefix, learned_key, is_unlearn and -1 or 1) -- increase or decrease learned count
for i, token in ipairs(input_tokens) do
- redis.call('HINCRBY', token, hash_key, 1)
+ redis.call('HINCRBY', token, hash_key, is_unlearn and -1 or 1)
if text_tokens then
local tok1 = text_tokens[i * 2 - 1]
local tok2 = text_tokens[i * 2]
@@ -41,4 +41,4 @@ for i, token in ipairs(input_tokens) do
redis.call('ZINCRBY', prefix .. '_z', is_unlearn and -1 or 1, token)
end
end
-end \ No newline at end of file
+end