summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorVsevolod Stakhov <vsevolod@highsecure.ru>2020-08-24 14:13:47 +0100
committerGitHub <noreply@github.com>2020-08-24 14:13:47 +0100
commit775087ebc3f7a5e3852c8bd39e3364fd3da8f10e (patch)
tree390d22dd8bc3cb0b51125e22fdf789cc60132c6a
parent17569d8f3eb6f17b14a997fd92d5b0dd7e3b4c3a (diff)
parent2c873e1d61925293b471f8b4cdb9d6b7d86fac8c (diff)
downloadrspamd-775087ebc3f7a5e3852c8bd39e3364fd3da8f10e.tar.gz
rspamd-775087ebc3f7a5e3852c8bd39e3364fd3da8f10e.zip
Merge pull request #3478 from cpragadeesh/master
[Fix] Change neural plugin's loss function
-rw-r--r--src/plugins/lua/neural.lua4
1 files changed, 2 insertions, 2 deletions
diff --git a/src/plugins/lua/neural.lua b/src/plugins/lua/neural.lua
index e3518d3bd..99efe720e 100644
--- a/src/plugins/lua/neural.lua
+++ b/src/plugins/lua/neural.lua
@@ -342,8 +342,8 @@ local function create_ann(n, nlayers)
local nhidden = math.floor((n + 1) / 2)
local t = rspamd_kann.layer.input(n)
t = rspamd_kann.transform.relu(t)
- t = rspamd_kann.transform.tanh(rspamd_kann.layer.dense(t, nhidden));
- t = rspamd_kann.layer.cost(t, 1, rspamd_kann.cost.mse)
+ t = rspamd_kann.layer.dense(t, nhidden);
+ t = rspamd_kann.layer.cost(t, 1, rspamd_kann.cost.ceb_neg)
return rspamd_kann.new.kann(t)
end