diff options
author | Vsevolod Stakhov <vsevolod@rspamd.com> | 2024-12-20 17:59:34 +0000 |
---|---|---|
committer | Vsevolod Stakhov <vsevolod@rspamd.com> | 2024-12-20 17:59:34 +0000 |
commit | ad33f36f12f0c679e1fe0bf215e949a9a0106a3a (patch) | |
tree | af7b59f204e6b1f443d38cf86ff723b217811d91 | |
parent | 42afa211ee94e57b666695e6d0acda73811dbc9d (diff) | |
download | rspamd-vstakhov-llm-anonymize.tar.gz rspamd-vstakhov-llm-anonymize.zip |
[Minor] Fixes, fixesvstakhov-llm-anonymize
-rw-r--r-- | lualib/lua_mime.lua | 17 |
1 files changed, 13 insertions, 4 deletions
diff --git a/lualib/lua_mime.lua b/lualib/lua_mime.lua index 5422c24e4..fe221f599 100644 --- a/lualib/lua_mime.lua +++ b/lualib/lua_mime.lua @@ -1151,8 +1151,8 @@ exports.anonymize_message = function(task, settings) url = settings.url, }) -- Do not use prompt settings from the module - llm_settings.prompt = settings.gpt_prompt or 'Anonymize the following message content by removing or replacing ' .. - 'any sensitive information while retaining the general structure and meaning, return just the anonymized content:' + llm_settings.prompt = settings.gpt_prompt or 'Remove all personal data from the following email ' .. + 'and return just the anonymized content' local request_body = { model = llm_settings.model, @@ -1170,6 +1170,10 @@ exports.anonymize_message = function(task, settings) } } + if llm_settings.type == 'ollama' then + request_body.stream = false + end + -- Make the HTTP request to the LLM API local http_params = { url = llm_settings.url, @@ -1190,14 +1194,19 @@ exports.anonymize_message = function(task, settings) end local parser = ucl.parser() - local res, parse_err = parser:parse_string(data) + local res, parse_err = parser:parse_string(data.content) if not res then logger.errx(task, 'Cannot parse LLM response: %s', parse_err) return end local reply = parser:get_object() - local anonymized_content = reply.choices and reply.choices[1] and reply.choices[1].message and reply.choices[1].message.content + local anonymized_content + if llm_settings.type == 'openai' then + anonymized_content = reply.choices and reply.choices[1] and reply.choices[1].message and reply.choices[1].message.content + elseif llm_settings.type == 'ollama' then + anonymized_content = reply.message.content + end if anonymized_content then -- Replace the original content with the anonymized content -- sel_part:set_content(anonymized_content) -- Not available, so rebuild message instead |