aboutsummaryrefslogtreecommitdiffstats
path: root/lualib/redis_scripts
diff options
context:
space:
mode:
authorVsevolod Stakhov <vsevolod@rspamd.com>2023-12-14 16:34:35 +0000
committerVsevolod Stakhov <vsevolod@rspamd.com>2023-12-14 16:34:35 +0000
commit65477c306436ca302709f32d15b975fbf4dc21eb (patch)
tree0bb197386c1738c624cd1da6384bf44a4cea1402 /lualib/redis_scripts
parent35479797646a4a1e5c4b0bfcb9a87d7e3b39dbcd (diff)
downloadrspamd-65477c306436ca302709f32d15b975fbf4dc21eb.tar.gz
rspamd-65477c306436ca302709f32d15b975fbf4dc21eb.zip
[Project] Try to do strings concatenation in C
Diffstat (limited to 'lualib/redis_scripts')
-rw-r--r--lualib/redis_scripts/bayes_classify.lua5
-rw-r--r--lualib/redis_scripts/bayes_learn.lua3
2 files changed, 3 insertions, 5 deletions
diff --git a/lualib/redis_scripts/bayes_classify.lua b/lualib/redis_scripts/bayes_classify.lua
index 1a4734cf5..e94f645fd 100644
--- a/lualib/redis_scripts/bayes_classify.lua
+++ b/lualib/redis_scripts/bayes_classify.lua
@@ -9,7 +9,6 @@ local output_ham = {}
local learned_ham = tonumber(redis.call('HGET', prefix, 'learns_ham')) or 0
local learned_spam = tonumber(redis.call('HGET', prefix, 'learns_spam')) or 0
-local prefix_underscore = prefix .. '_'
-- Output is a set of pairs (token_index, token_count), tokens that are not
-- found are not filled.
@@ -18,11 +17,11 @@ local prefix_underscore = prefix .. '_'
if learned_ham > 0 and learned_spam > 0 then
local input_tokens = cmsgpack.unpack(KEYS[2])
for i, token in ipairs(input_tokens) do
- local token_data = redis.call('HMGET', prefix_underscore .. token, 'H', 'S')
+ local token_data = redis.call('HMGET', token, 'H', 'S')
if token_data then
local ham_count = token_data[1]
- local spam_count = tonumber(token_data[2]) or 0
+ local spam_count = token_data[2]
if ham_count then
table.insert(output_ham, { i, tonumber(ham_count) })
diff --git a/lualib/redis_scripts/bayes_learn.lua b/lualib/redis_scripts/bayes_learn.lua
index 7536f6808..244be43f6 100644
--- a/lualib/redis_scripts/bayes_learn.lua
+++ b/lualib/redis_scripts/bayes_learn.lua
@@ -12,7 +12,6 @@ local symbol = KEYS[3]
local is_unlearn = KEYS[4] == 'true' and true or false
local input_tokens = cmsgpack.unpack(KEYS[5])
-local prefix_underscore = prefix .. '_'
local hash_key = is_spam and 'S' or 'H'
local learned_key = is_spam and 'learns_spam' or 'learns_ham'
@@ -21,5 +20,5 @@ redis.call('HSET', prefix, 'version', '2') -- new schema
redis.call('HINCRBY', prefix, learned_key, is_unlearn and -1 or 1) -- increase or decrease learned count
for _, token in ipairs(input_tokens) do
- redis.call('HINCRBY', prefix_underscore .. token, hash_key, 1)
+ redis.call('HINCRBY', token, hash_key, 1)
end \ No newline at end of file