summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorVsevolod Stakhov <vsevolod@rspamd.com>2024-07-22 13:06:51 +0100
committerVsevolod Stakhov <vsevolod@rspamd.com>2024-07-22 13:06:51 +0100
commitd019b0dfc11ca01c19a2889db0605ff1a776562b (patch)
treee3209e4ab67d0d9084b95be26880714b8ad80c48
parentd94a2b8ae8842ff9525107bef3f29a814aae8fdb (diff)
downloadrspamd-d019b0dfc11ca01c19a2889db0605ff1a776562b.tar.gz
rspamd-d019b0dfc11ca01c19a2889db0605ff1a776562b.zip
[Minor] Remove top_p reduce temperature to 0
-rw-r--r--conf/modules.d/gpt.conf4
-rw-r--r--src/plugins/lua/gpt.lua8
2 files changed, 3 insertions, 9 deletions
diff --git a/conf/modules.d/gpt.conf b/conf/modules.d/gpt.conf
index 7a2e11d40..eac9952e5 100644
--- a/conf/modules.d/gpt.conf
+++ b/conf/modules.d/gpt.conf
@@ -22,9 +22,7 @@ gpt {
# Maximum tokens to generate
max_tokens = 1000;
# Temperature for sampling
- temperature = 0.7;
- # Top p for sampling
- top_p = 0.9;
+ temperature = 0.0;
# Timeout for requests
timeout = 10s;
# Prompt for the model (use default if not set)
diff --git a/src/plugins/lua/gpt.lua b/src/plugins/lua/gpt.lua
index 6adbce3bf..046fe5dd7 100644
--- a/src/plugins/lua/gpt.lua
+++ b/src/plugins/lua/gpt.lua
@@ -31,9 +31,7 @@ gpt {
# Maximum tokens to generate
max_tokens = 1000;
# Temperature for sampling
- temperature = 0.7;
- # Top p for sampling
- top_p = 0.9;
+ temperature = 0.0;
# Timeout for requests
timeout = 10s;
# Prompt for the model (use default if not set)
@@ -73,8 +71,7 @@ local settings = {
api_key = nil,
model = 'gpt-3.5-turbo',
max_tokens = 1000,
- temperature = 0.7,
- top_p = 0.9,
+ temperature = 0.0,
timeout = 10,
prompt = nil,
condition = nil,
@@ -276,7 +273,6 @@ local function openai_gpt_check(task)
model = settings.model,
max_tokens = settings.max_tokens,
temperature = settings.temperature,
- top_p = settings.top_p,
messages = {
{
role = 'system',