diff options
author | Vsevolod Stakhov <vsevolod@rspamd.com> | 2025-05-14 18:23:40 +0600 |
---|---|---|
committer | GitHub <noreply@github.com> | 2025-05-14 18:23:40 +0600 |
commit | 947216642a7fdf44d36addcec1a40d1d4f0b0805 (patch) | |
tree | 9af9090da2b568aeab320bd81257d8b88ba906ed | |
parent | 5cf2bdb83c8bec117e787cea30e3b1f79d8dedee (diff) | |
parent | 85c43d5d3c8279e005c6b3c37a979bd177c0ed2d (diff) | |
download | rspamd-947216642a7fdf44d36addcec1a40d1d4f0b0805.tar.gz rspamd-947216642a7fdf44d36addcec1a40d1d4f0b0805.zip |
Merge pull request #5459 from NameWeb/patch-1
gpt.lua: Remove "thinking" and fix errorlog
-rw-r--r-- | src/plugins/lua/gpt.lua | 27 |
1 files changed, 20 insertions, 7 deletions
diff --git a/src/plugins/lua/gpt.lua b/src/plugins/lua/gpt.lua index 5d1cf5e06..bdec927e6 100644 --- a/src/plugins/lua/gpt.lua +++ b/src/plugins/lua/gpt.lua @@ -22,7 +22,7 @@ if confighelp then rspamd_config:add_example(nil, 'gpt', "Performs postfiltering using GPT model", [[ -gpt { + gpt { # Supported types: openai, ollama type = "openai"; # Your key to access the API @@ -53,7 +53,7 @@ gpt { reason_header = "X-GPT-Reason"; # Use JSON format for response json = false; -} + } ]]) return end @@ -359,10 +359,19 @@ local function default_openai_plain_conversion(task, input) return spam_score, reason, categories end - rspamd_logger.errx(task, 'cannot parse plain gpt reply: %s (all: %s)', lines[1]) + rspamd_logger.errx(task, 'cannot parse plain gpt reply: %s (all: %s)', lines[1], first_message) return end +-- Helper function to remove <think>...</think> and trim leading newlines +local function clean_gpt_response(text) + -- Remove <think>...</think> including multiline + text = text:gsub("<think>.-</think>", "") + -- Trim leading whitespace and newlines + text = text:gsub("^%s*\n*", "") + return text +end + local function default_ollama_plain_conversion(task, input) local parser = ucl.parser() local res, err = parser:parse_string(input) @@ -387,6 +396,10 @@ local function default_ollama_plain_conversion(task, input) rspamd_logger.errx(task, 'no content in the first message') return end + + -- Clean message + first_message = clean_gpt_response(first_message) + local lines = lua_util.str_split(first_message, '\n') local first_line = clean_reply_line(lines[1]) local spam_score = tonumber(first_line) @@ -397,7 +410,7 @@ local function default_ollama_plain_conversion(task, input) return spam_score, reason, categories end - rspamd_logger.errx(task, 'cannot parse plain gpt reply: %s', lines[1]) + rspamd_logger.errx(task, 'cannot parse plain gpt reply: %s (all: %s)', lines[1], first_message) return end @@ -514,9 +527,9 @@ local function insert_results(task, result, sel_part) end end if result.reason and settings.reason_header then - lua_mime.modify_headers(task, - { add = { [settings.reason_header] = { value = tostring(result.reason), order = 1 } } }) - end + lua_mime.modify_headers(task, + { add = { [settings.reason_header] = { value = tostring(result.reason), order = 1 } } }) + end if cache_context then lua_cache.cache_set(task, redis_cache_key(sel_part), result, cache_context) |