aboutsummaryrefslogtreecommitdiffstats
path: root/src/plugins/lua/neural.lua
diff options
context:
space:
mode:
authorVsevolod Stakhov <vsevolod@highsecure.ru>2019-03-28 15:53:17 +0000
committerVsevolod Stakhov <vsevolod@highsecure.ru>2019-03-28 15:53:17 +0000
commita3b2c0f9db42a0b6d4d68d48654367e5b17b892a (patch)
treeb56dc2372523861f7615b509a3413de1c127a26b /src/plugins/lua/neural.lua
parent781f3162dda790d0d4d00431204ef49b326cd699 (diff)
downloadrspamd-a3b2c0f9db42a0b6d4d68d48654367e5b17b892a.tar.gz
rspamd-a3b2c0f9db42a0b6d4d68d48654367e5b17b892a.zip
[Minor] Fix stupid torch that uses `print` for logging
Diffstat (limited to 'src/plugins/lua/neural.lua')
-rw-r--r--src/plugins/lua/neural.lua6
1 files changed, 4 insertions, 2 deletions
diff --git a/src/plugins/lua/neural.lua b/src/plugins/lua/neural.lua
index b75adf468..30c4fee0f 100644
--- a/src/plugins/lua/neural.lua
+++ b/src/plugins/lua/neural.lua
@@ -671,11 +671,13 @@ local function train_ann(rule, _, ev_base, elt, worker)
trainer.learning_rate = rule.train.learning_rate
trainer.verbose = false
trainer.maxIteration = rule.train.max_iterations
- trainer.hookIteration = function(self, iteration, currentError)
+ trainer.hookIteration = function(_, iteration, currentError)
rspamd_logger.infox(rspamd_config, "learned %s iterations, error: %s",
iteration, currentError)
end
-
+ trainer.logger = function(s)
+ rspamd_logger.infox(rspamd_config, 'training: %s', s)
+ end
trainer:train(dataset)
local out = torch.MemoryFile()
out:writeObject(rule.anns[elt].ann_train)