chore: include unrelated workspace edits (prompts/uv.lock/middleware)

This commit is contained in:
nourzakhama2003 2025-12-22 03:22:36 +01:00
parent ef50d44b19
commit fd71d90f30
4 changed files with 75 additions and 0 deletions

View File

@ -11,6 +11,8 @@ Automatically identify the language of the users input (e.g. English, Chinese
- The title must be natural, friendly, and in the same language as the input.
- If the input is a direct question to the model, you may add an emoji at the end.
- Special Note for Persian (Farsi): If the input is Persian (Farsi), ALWAYS generate the title in Persian (Farsi). Use Persian characters (for example: پ، چ، ژ، گ، ک، ی) and ensure the "Language Type" field is "Persian" or "Farsi". Do NOT use Arabic or any other language or script when the input is Persian.
3. Output Format
Return **only** a valid JSON object with these exact keys and no additional text:
{

View File

@ -0,0 +1,65 @@
import json
from unittest.mock import MagicMock, patch
from core.llm_generator.llm_generator import LLMGenerator
class DummyMessage:
def __init__(self, content):
self.content = content
class DummyResponse:
def __init__(self, content):
self.message = DummyMessage(content)
def make_json_response(language, output):
return json.dumps({"Language Type": language, "Your Reasoning": "...", "Your Output": output})
@patch("core.llm_generator.llm_generator.ModelManager.get_default_model_instance")
def test_generate_conversation_name_enforces_persian(mock_get_model):
# A Persian input containing Persian-specific character 'پ'
persian_query = "سلام، چطوری؟ پ" # contains 'پ'
# First model response: misdetected as Arabic and returns Arabic title
first_resp = DummyResponse(make_json_response("Arabic", "مرحبا"))
# Second response (after retry): returns a Persian title with Persian-specific chars
second_resp = DummyResponse(make_json_response("Persian", "عنوان پِرس"))
model_instance = MagicMock()
model_instance.invoke_llm.side_effect = [first_resp, second_resp]
mock_get_model.return_value = model_instance
name = LLMGenerator.generate_conversation_name("tenant1", persian_query)
# The final name should come from the Persian response (contains Persian-specific char 'پ')
assert "پ" in name
# Ensure the model was invoked at least twice (retry occurred)
assert model_instance.invoke_llm.call_count >= 2
@patch("core.llm_generator.llm_generator.ModelManager.get_default_model_instance")
def test_generate_conversation_name_translation_fallback(mock_get_model):
# Persian query
persian_query = "این یک تست است پ"
# Model returns non-Persian outputs consistently
non_persian_resp = DummyResponse(make_json_response("Arabic", "مرحبا"))
# Translate response (last call) returns Persian translation
translate_resp = DummyResponse("عنوان ترجمه شده پ")
model_instance = MagicMock()
# First two calls return non-persian results; third call is translation
model_instance.invoke_llm.side_effect = [non_persian_resp, non_persian_resp, translate_resp]
mock_get_model.return_value = model_instance
name = LLMGenerator.generate_conversation_name("tenant1", persian_query)
# Final name should contain Persian character 'پ' from translation fallback
assert "پ" in name
assert model_instance.invoke_llm.call_count >= 3

View File

@ -1372,6 +1372,7 @@ dependencies = [
{ name = "jieba" },
{ name = "json-repair" },
{ name = "jsonschema" },
{ name = "langdetect" },
{ name = "langfuse" },
{ name = "langsmith" },
{ name = "litellm" },
@ -1568,6 +1569,7 @@ requires-dist = [
{ name = "jieba", specifier = "==0.42.1" },
{ name = "json-repair", specifier = ">=0.41.1" },
{ name = "jsonschema", specifier = ">=4.25.1" },
{ name = "langdetect", specifier = "~=1.0.9" },
{ name = "langfuse", specifier = "~=2.51.3" },
{ name = "langsmith", specifier = "~=0.1.77" },
{ name = "litellm", specifier = "==1.77.1" },

View File

@ -176,6 +176,12 @@ services:
THIRD_PARTY_SIGNATURE_VERIFICATION_ENABLED: true
THIRD_PARTY_SIGNATURE_VERIFICATION_PUBLIC_KEYS: /app/keys/publickey.pem
FORCE_VERIFYING_SIGNATURE: false
HTTP_PROXY: ${HTTP_PROXY:-http://ssrf_proxy:3128}
HTTPS_PROXY: ${HTTPS_PROXY:-http://ssrf_proxy:3128}
PLUGIN_PYTHON_ENV_INIT_TIMEOUT: ${PLUGIN_PYTHON_ENV_INIT_TIMEOUT:-120}
extra_hosts:
- "host.docker.internal:host-gateway"
ports:
- "${EXPOSE_PLUGIN_DAEMON_PORT:-5002}:${PLUGIN_DAEMON_PORT:-5002}"
- "${EXPOSE_PLUGIN_DEBUGGING_PORT:-5003}:${PLUGIN_DEBUGGING_PORT:-5003}"