mirror of https://github.com/langgenius/dify.git
Merge remote-tracking branch 'origin/main' into feat/trigger
This commit is contained in:
commit
02222752f0
|
|
@ -1,4 +1,3 @@
|
|||
from fastapi.encoders import jsonable_encoder
|
||||
from flask import make_response, redirect, request
|
||||
from flask_login import current_user
|
||||
from flask_restx import Resource, reqparse
|
||||
|
|
@ -11,6 +10,7 @@ from controllers.console.wraps import (
|
|||
setup_required,
|
||||
)
|
||||
from core.model_runtime.errors.validate import CredentialsValidateFailedError
|
||||
from core.model_runtime.utils.encoders import jsonable_encoder
|
||||
from core.plugin.impl.oauth import OAuthHandler
|
||||
from libs.helper import StrLen
|
||||
from libs.login import login_required
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import uuid
|
||||
from typing import Literal, cast
|
||||
|
||||
from core.app.app_config.entities import (
|
||||
DatasetEntity,
|
||||
|
|
@ -74,6 +75,9 @@ class DatasetConfigManager:
|
|||
return None
|
||||
query_variable = config.get("dataset_query_variable")
|
||||
|
||||
metadata_model_config_dict = dataset_configs.get("metadata_model_config")
|
||||
metadata_filtering_conditions_dict = dataset_configs.get("metadata_filtering_conditions")
|
||||
|
||||
if dataset_configs["retrieval_model"] == "single":
|
||||
return DatasetEntity(
|
||||
dataset_ids=dataset_ids,
|
||||
|
|
@ -82,18 +86,23 @@ class DatasetConfigManager:
|
|||
retrieve_strategy=DatasetRetrieveConfigEntity.RetrieveStrategy.value_of(
|
||||
dataset_configs["retrieval_model"]
|
||||
),
|
||||
metadata_filtering_mode=dataset_configs.get("metadata_filtering_mode", "disabled"),
|
||||
metadata_model_config=ModelConfig(**dataset_configs.get("metadata_model_config"))
|
||||
if dataset_configs.get("metadata_model_config")
|
||||
metadata_filtering_mode=cast(
|
||||
Literal["disabled", "automatic", "manual"],
|
||||
dataset_configs.get("metadata_filtering_mode", "disabled"),
|
||||
),
|
||||
metadata_model_config=ModelConfig(**metadata_model_config_dict)
|
||||
if isinstance(metadata_model_config_dict, dict)
|
||||
else None,
|
||||
metadata_filtering_conditions=MetadataFilteringCondition(
|
||||
**dataset_configs.get("metadata_filtering_conditions", {})
|
||||
)
|
||||
if dataset_configs.get("metadata_filtering_conditions")
|
||||
metadata_filtering_conditions=MetadataFilteringCondition(**metadata_filtering_conditions_dict)
|
||||
if isinstance(metadata_filtering_conditions_dict, dict)
|
||||
else None,
|
||||
),
|
||||
)
|
||||
else:
|
||||
score_threshold_val = dataset_configs.get("score_threshold")
|
||||
reranking_model_val = dataset_configs.get("reranking_model")
|
||||
weights_val = dataset_configs.get("weights")
|
||||
|
||||
return DatasetEntity(
|
||||
dataset_ids=dataset_ids,
|
||||
retrieve_config=DatasetRetrieveConfigEntity(
|
||||
|
|
@ -101,22 +110,23 @@ class DatasetConfigManager:
|
|||
retrieve_strategy=DatasetRetrieveConfigEntity.RetrieveStrategy.value_of(
|
||||
dataset_configs["retrieval_model"]
|
||||
),
|
||||
top_k=dataset_configs.get("top_k", 4),
|
||||
score_threshold=dataset_configs.get("score_threshold")
|
||||
if dataset_configs.get("score_threshold_enabled", False)
|
||||
top_k=int(dataset_configs.get("top_k", 4)),
|
||||
score_threshold=float(score_threshold_val)
|
||||
if dataset_configs.get("score_threshold_enabled", False) and score_threshold_val is not None
|
||||
else None,
|
||||
reranking_model=dataset_configs.get("reranking_model"),
|
||||
weights=dataset_configs.get("weights"),
|
||||
reranking_enabled=dataset_configs.get("reranking_enabled", True),
|
||||
reranking_model=reranking_model_val if isinstance(reranking_model_val, dict) else None,
|
||||
weights=weights_val if isinstance(weights_val, dict) else None,
|
||||
reranking_enabled=bool(dataset_configs.get("reranking_enabled", True)),
|
||||
rerank_mode=dataset_configs.get("reranking_mode", "reranking_model"),
|
||||
metadata_filtering_mode=dataset_configs.get("metadata_filtering_mode", "disabled"),
|
||||
metadata_model_config=ModelConfig(**dataset_configs.get("metadata_model_config"))
|
||||
if dataset_configs.get("metadata_model_config")
|
||||
metadata_filtering_mode=cast(
|
||||
Literal["disabled", "automatic", "manual"],
|
||||
dataset_configs.get("metadata_filtering_mode", "disabled"),
|
||||
),
|
||||
metadata_model_config=ModelConfig(**metadata_model_config_dict)
|
||||
if isinstance(metadata_model_config_dict, dict)
|
||||
else None,
|
||||
metadata_filtering_conditions=MetadataFilteringCondition(
|
||||
**dataset_configs.get("metadata_filtering_conditions", {})
|
||||
)
|
||||
if dataset_configs.get("metadata_filtering_conditions")
|
||||
metadata_filtering_conditions=MetadataFilteringCondition(**metadata_filtering_conditions_dict)
|
||||
if isinstance(metadata_filtering_conditions_dict, dict)
|
||||
else None,
|
||||
),
|
||||
)
|
||||
|
|
@ -134,18 +144,17 @@ class DatasetConfigManager:
|
|||
config = cls.extract_dataset_config_for_legacy_compatibility(tenant_id, app_mode, config)
|
||||
|
||||
# dataset_configs
|
||||
if not config.get("dataset_configs"):
|
||||
config["dataset_configs"] = {"retrieval_model": "single"}
|
||||
if "dataset_configs" not in config or not config.get("dataset_configs"):
|
||||
config["dataset_configs"] = {}
|
||||
config["dataset_configs"]["retrieval_model"] = config["dataset_configs"].get("retrieval_model", "single")
|
||||
|
||||
if not isinstance(config["dataset_configs"], dict):
|
||||
raise ValueError("dataset_configs must be of object type")
|
||||
|
||||
if not config["dataset_configs"].get("datasets"):
|
||||
if "datasets" not in config["dataset_configs"] or not config["dataset_configs"].get("datasets"):
|
||||
config["dataset_configs"]["datasets"] = {"strategy": "router", "datasets": []}
|
||||
|
||||
need_manual_query_datasets = config.get("dataset_configs") and config["dataset_configs"].get(
|
||||
"datasets", {}
|
||||
).get("datasets")
|
||||
need_manual_query_datasets = config.get("dataset_configs", {}).get("datasets", {}).get("datasets")
|
||||
|
||||
if need_manual_query_datasets and app_mode == AppMode.COMPLETION:
|
||||
# Only check when mode is completion
|
||||
|
|
@ -166,8 +175,8 @@ class DatasetConfigManager:
|
|||
:param config: app model config args
|
||||
"""
|
||||
# Extract dataset config for legacy compatibility
|
||||
if not config.get("agent_mode"):
|
||||
config["agent_mode"] = {"enabled": False, "tools": []}
|
||||
if "agent_mode" not in config or not config.get("agent_mode"):
|
||||
config["agent_mode"] = {}
|
||||
|
||||
if not isinstance(config["agent_mode"], dict):
|
||||
raise ValueError("agent_mode must be of object type")
|
||||
|
|
@ -180,19 +189,22 @@ class DatasetConfigManager:
|
|||
raise ValueError("enabled in agent_mode must be of boolean type")
|
||||
|
||||
# tools
|
||||
if not config["agent_mode"].get("tools"):
|
||||
if "tools" not in config["agent_mode"] or not config["agent_mode"].get("tools"):
|
||||
config["agent_mode"]["tools"] = []
|
||||
|
||||
if not isinstance(config["agent_mode"]["tools"], list):
|
||||
raise ValueError("tools in agent_mode must be a list of objects")
|
||||
|
||||
# strategy
|
||||
if not config["agent_mode"].get("strategy"):
|
||||
if "strategy" not in config["agent_mode"] or not config["agent_mode"].get("strategy"):
|
||||
config["agent_mode"]["strategy"] = PlanningStrategy.ROUTER.value
|
||||
|
||||
has_datasets = False
|
||||
if config["agent_mode"]["strategy"] in {PlanningStrategy.ROUTER.value, PlanningStrategy.REACT_ROUTER.value}:
|
||||
for tool in config["agent_mode"]["tools"]:
|
||||
if config.get("agent_mode", {}).get("strategy") in {
|
||||
PlanningStrategy.ROUTER.value,
|
||||
PlanningStrategy.REACT_ROUTER.value,
|
||||
}:
|
||||
for tool in config.get("agent_mode", {}).get("tools", []):
|
||||
key = list(tool.keys())[0]
|
||||
if key == "dataset":
|
||||
# old style, use tool name as key
|
||||
|
|
@ -217,7 +229,7 @@ class DatasetConfigManager:
|
|||
|
||||
has_datasets = True
|
||||
|
||||
need_manual_query_datasets = has_datasets and config["agent_mode"]["enabled"]
|
||||
need_manual_query_datasets = has_datasets and config.get("agent_mode", {}).get("enabled")
|
||||
|
||||
if need_manual_query_datasets and app_mode == AppMode.COMPLETION:
|
||||
# Only check when mode is completion
|
||||
|
|
|
|||
|
|
@ -107,7 +107,6 @@ class MessageCycleManager:
|
|||
if dify_config.DEBUG:
|
||||
logger.exception("generate conversation name failed, conversation_id: %s", conversation_id)
|
||||
|
||||
db.session.merge(conversation)
|
||||
db.session.commit()
|
||||
db.session.close()
|
||||
|
||||
|
|
|
|||
|
|
@ -178,10 +178,10 @@ dev = [
|
|||
# Required for storage clients
|
||||
############################################################
|
||||
storage = [
|
||||
"azure-storage-blob==12.13.0",
|
||||
"azure-storage-blob==12.26.0",
|
||||
"bce-python-sdk~=0.9.23",
|
||||
"cos-python-sdk-v5==1.9.38",
|
||||
"esdk-obs-python==3.24.6.1",
|
||||
"esdk-obs-python==3.25.8",
|
||||
"google-cloud-storage==2.16.0",
|
||||
"opendal~=0.46.0",
|
||||
"oss2==2.18.5",
|
||||
|
|
|
|||
|
|
@ -4,8 +4,7 @@
|
|||
"tests/",
|
||||
".venv",
|
||||
"migrations/",
|
||||
"core/rag",
|
||||
"core/app/app_config/easy_ui_based_app/dataset"
|
||||
"core/rag"
|
||||
],
|
||||
"typeCheckingMode": "strict",
|
||||
"allowedUntypedLibraries": [
|
||||
|
|
|
|||
|
|
@ -346,14 +346,10 @@ class BuiltinToolManageService:
|
|||
provider_controller = ToolManager.get_builtin_provider(default_provider.provider, tenant_id)
|
||||
|
||||
credentials: list[ToolProviderCredentialApiEntity] = []
|
||||
encrypters = {}
|
||||
for provider in providers:
|
||||
credential_type = provider.credential_type
|
||||
if credential_type not in encrypters:
|
||||
encrypters[credential_type] = BuiltinToolManageService.create_tool_encrypter(
|
||||
tenant_id, provider, provider.provider, provider_controller
|
||||
)[0]
|
||||
encrypter = encrypters[credential_type]
|
||||
encrypter, _ = BuiltinToolManageService.create_tool_encrypter(
|
||||
tenant_id, provider, provider.provider, provider_controller
|
||||
)
|
||||
decrypt_credential = encrypter.mask_tool_credentials(encrypter.decrypt(provider.credentials))
|
||||
credential_entity = ToolTransformService.convert_builtin_provider_to_credential_entity(
|
||||
provider=provider,
|
||||
|
|
|
|||
|
|
@ -29,23 +29,10 @@ def priority_rag_pipeline_run_task(
|
|||
tenant_id: str,
|
||||
):
|
||||
"""
|
||||
Async Run rag pipeline
|
||||
:param rag_pipeline_invoke_entities: Rag pipeline invoke entities
|
||||
rag_pipeline_invoke_entities include:
|
||||
:param pipeline_id: Pipeline ID
|
||||
:param user_id: User ID
|
||||
:param tenant_id: Tenant ID
|
||||
:param workflow_id: Workflow ID
|
||||
:param invoke_from: Invoke source (debugger, published, etc.)
|
||||
:param streaming: Whether to stream results
|
||||
:param datasource_type: Type of datasource
|
||||
:param datasource_info: Datasource information dict
|
||||
:param batch: Batch identifier
|
||||
:param document_id: Document ID (optional)
|
||||
:param start_node_id: Starting node ID
|
||||
:param inputs: Input parameters dict
|
||||
:param workflow_execution_id: Workflow execution ID
|
||||
:param workflow_thread_pool_id: Thread pool ID for workflow execution
|
||||
Async Run rag pipeline task using high priority queue.
|
||||
|
||||
:param rag_pipeline_invoke_entities_file_id: File ID containing serialized RAG pipeline invoke entities
|
||||
:param tenant_id: Tenant ID for the pipeline execution
|
||||
"""
|
||||
# run with threading, thread pool size is 10
|
||||
|
||||
|
|
|
|||
|
|
@ -30,23 +30,10 @@ def rag_pipeline_run_task(
|
|||
tenant_id: str,
|
||||
):
|
||||
"""
|
||||
Async Run rag pipeline
|
||||
:param rag_pipeline_invoke_entities: Rag pipeline invoke entities
|
||||
rag_pipeline_invoke_entities include:
|
||||
:param pipeline_id: Pipeline ID
|
||||
:param user_id: User ID
|
||||
:param tenant_id: Tenant ID
|
||||
:param workflow_id: Workflow ID
|
||||
:param invoke_from: Invoke source (debugger, published, etc.)
|
||||
:param streaming: Whether to stream results
|
||||
:param datasource_type: Type of datasource
|
||||
:param datasource_info: Datasource information dict
|
||||
:param batch: Batch identifier
|
||||
:param document_id: Document ID (optional)
|
||||
:param start_node_id: Starting node ID
|
||||
:param inputs: Input parameters dict
|
||||
:param workflow_execution_id: Workflow execution ID
|
||||
:param workflow_thread_pool_id: Thread pool ID for workflow execution
|
||||
Async Run rag pipeline task using regular priority queue.
|
||||
|
||||
:param rag_pipeline_invoke_entities_file_id: File ID containing serialized RAG pipeline invoke entities
|
||||
:param tenant_id: Tenant ID for the pipeline execution
|
||||
"""
|
||||
# run with threading, thread pool size is 10
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,282 @@
|
|||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from faker import Faker
|
||||
|
||||
from libs.email_i18n import EmailType
|
||||
from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole
|
||||
from tasks.mail_change_mail_task import send_change_mail_completed_notification_task, send_change_mail_task
|
||||
|
||||
|
||||
class TestMailChangeMailTask:
|
||||
"""Integration tests for mail_change_mail_task using testcontainers."""
|
||||
|
||||
@pytest.fixture
|
||||
def mock_external_service_dependencies(self):
|
||||
"""Mock setup for external service dependencies."""
|
||||
with (
|
||||
patch("tasks.mail_change_mail_task.mail") as mock_mail,
|
||||
patch("tasks.mail_change_mail_task.get_email_i18n_service") as mock_get_email_i18n_service,
|
||||
):
|
||||
# Setup mock mail service
|
||||
mock_mail.is_inited.return_value = True
|
||||
|
||||
# Setup mock email i18n service
|
||||
mock_email_service = MagicMock()
|
||||
mock_get_email_i18n_service.return_value = mock_email_service
|
||||
|
||||
yield {
|
||||
"mail": mock_mail,
|
||||
"email_i18n_service": mock_email_service,
|
||||
"get_email_i18n_service": mock_get_email_i18n_service,
|
||||
}
|
||||
|
||||
def _create_test_account(self, db_session_with_containers):
|
||||
"""
|
||||
Helper method to create a test account for testing.
|
||||
|
||||
Args:
|
||||
db_session_with_containers: Database session from testcontainers infrastructure
|
||||
|
||||
Returns:
|
||||
Account: Created account instance
|
||||
"""
|
||||
fake = Faker()
|
||||
|
||||
# Create account
|
||||
account = Account(
|
||||
email=fake.email(),
|
||||
name=fake.name(),
|
||||
interface_language="en-US",
|
||||
status="active",
|
||||
)
|
||||
db_session_with_containers.add(account)
|
||||
db_session_with_containers.commit()
|
||||
|
||||
# Create tenant
|
||||
tenant = Tenant(
|
||||
name=fake.company(),
|
||||
status="normal",
|
||||
)
|
||||
db_session_with_containers.add(tenant)
|
||||
db_session_with_containers.commit()
|
||||
|
||||
# Create tenant-account join
|
||||
join = TenantAccountJoin(
|
||||
tenant_id=tenant.id,
|
||||
account_id=account.id,
|
||||
role=TenantAccountRole.OWNER.value,
|
||||
current=True,
|
||||
)
|
||||
db_session_with_containers.add(join)
|
||||
db_session_with_containers.commit()
|
||||
|
||||
return account
|
||||
|
||||
def test_send_change_mail_task_success_old_email_phase(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
):
|
||||
"""
|
||||
Test successful change email task execution for old_email phase.
|
||||
|
||||
This test verifies:
|
||||
- Proper mail service initialization check
|
||||
- Correct email service method call with old_email phase
|
||||
- Successful task completion
|
||||
"""
|
||||
# Arrange: Create test data
|
||||
account = self._create_test_account(db_session_with_containers)
|
||||
test_language = "en-US"
|
||||
test_email = account.email
|
||||
test_code = "123456"
|
||||
test_phase = "old_email"
|
||||
|
||||
# Act: Execute the task
|
||||
send_change_mail_task(test_language, test_email, test_code, test_phase)
|
||||
|
||||
# Assert: Verify the expected outcomes
|
||||
mock_external_service_dependencies["mail"].is_inited.assert_called_once()
|
||||
mock_external_service_dependencies["get_email_i18n_service"].assert_called_once()
|
||||
mock_external_service_dependencies["email_i18n_service"].send_change_email.assert_called_once_with(
|
||||
language_code=test_language,
|
||||
to=test_email,
|
||||
code=test_code,
|
||||
phase=test_phase,
|
||||
)
|
||||
|
||||
def test_send_change_mail_task_success_new_email_phase(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
):
|
||||
"""
|
||||
Test successful change email task execution for new_email phase.
|
||||
|
||||
This test verifies:
|
||||
- Proper mail service initialization check
|
||||
- Correct email service method call with new_email phase
|
||||
- Successful task completion
|
||||
"""
|
||||
# Arrange: Create test data
|
||||
account = self._create_test_account(db_session_with_containers)
|
||||
test_language = "zh-Hans"
|
||||
test_email = "new@example.com"
|
||||
test_code = "789012"
|
||||
test_phase = "new_email"
|
||||
|
||||
# Act: Execute the task
|
||||
send_change_mail_task(test_language, test_email, test_code, test_phase)
|
||||
|
||||
# Assert: Verify the expected outcomes
|
||||
mock_external_service_dependencies["mail"].is_inited.assert_called_once()
|
||||
mock_external_service_dependencies["get_email_i18n_service"].assert_called_once()
|
||||
mock_external_service_dependencies["email_i18n_service"].send_change_email.assert_called_once_with(
|
||||
language_code=test_language,
|
||||
to=test_email,
|
||||
code=test_code,
|
||||
phase=test_phase,
|
||||
)
|
||||
|
||||
def test_send_change_mail_task_mail_not_initialized(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
):
|
||||
"""
|
||||
Test change email task when mail service is not initialized.
|
||||
|
||||
This test verifies:
|
||||
- Early return when mail service is not initialized
|
||||
- No email service calls when mail is not available
|
||||
"""
|
||||
# Arrange: Setup mail service as not initialized
|
||||
mock_external_service_dependencies["mail"].is_inited.return_value = False
|
||||
test_language = "en-US"
|
||||
test_email = "test@example.com"
|
||||
test_code = "123456"
|
||||
test_phase = "old_email"
|
||||
|
||||
# Act: Execute the task
|
||||
send_change_mail_task(test_language, test_email, test_code, test_phase)
|
||||
|
||||
# Assert: Verify no email service calls
|
||||
mock_external_service_dependencies["mail"].is_inited.assert_called_once()
|
||||
mock_external_service_dependencies["get_email_i18n_service"].assert_not_called()
|
||||
mock_external_service_dependencies["email_i18n_service"].send_change_email.assert_not_called()
|
||||
|
||||
def test_send_change_mail_task_email_service_exception(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
):
|
||||
"""
|
||||
Test change email task when email service raises an exception.
|
||||
|
||||
This test verifies:
|
||||
- Exception is properly caught and logged
|
||||
- Task completes without raising exception
|
||||
"""
|
||||
# Arrange: Setup email service to raise exception
|
||||
mock_external_service_dependencies["email_i18n_service"].send_change_email.side_effect = Exception(
|
||||
"Email service failed"
|
||||
)
|
||||
test_language = "en-US"
|
||||
test_email = "test@example.com"
|
||||
test_code = "123456"
|
||||
test_phase = "old_email"
|
||||
|
||||
# Act: Execute the task (should not raise exception)
|
||||
send_change_mail_task(test_language, test_email, test_code, test_phase)
|
||||
|
||||
# Assert: Verify email service was called despite exception
|
||||
mock_external_service_dependencies["mail"].is_inited.assert_called_once()
|
||||
mock_external_service_dependencies["get_email_i18n_service"].assert_called_once()
|
||||
mock_external_service_dependencies["email_i18n_service"].send_change_email.assert_called_once_with(
|
||||
language_code=test_language,
|
||||
to=test_email,
|
||||
code=test_code,
|
||||
phase=test_phase,
|
||||
)
|
||||
|
||||
def test_send_change_mail_completed_notification_task_success(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
):
|
||||
"""
|
||||
Test successful change email completed notification task execution.
|
||||
|
||||
This test verifies:
|
||||
- Proper mail service initialization check
|
||||
- Correct email service method call with CHANGE_EMAIL_COMPLETED type
|
||||
- Template context is properly constructed
|
||||
- Successful task completion
|
||||
"""
|
||||
# Arrange: Create test data
|
||||
account = self._create_test_account(db_session_with_containers)
|
||||
test_language = "en-US"
|
||||
test_email = account.email
|
||||
|
||||
# Act: Execute the task
|
||||
send_change_mail_completed_notification_task(test_language, test_email)
|
||||
|
||||
# Assert: Verify the expected outcomes
|
||||
mock_external_service_dependencies["mail"].is_inited.assert_called_once()
|
||||
mock_external_service_dependencies["get_email_i18n_service"].assert_called_once()
|
||||
mock_external_service_dependencies["email_i18n_service"].send_email.assert_called_once_with(
|
||||
email_type=EmailType.CHANGE_EMAIL_COMPLETED,
|
||||
language_code=test_language,
|
||||
to=test_email,
|
||||
template_context={
|
||||
"to": test_email,
|
||||
"email": test_email,
|
||||
},
|
||||
)
|
||||
|
||||
def test_send_change_mail_completed_notification_task_mail_not_initialized(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
):
|
||||
"""
|
||||
Test change email completed notification task when mail service is not initialized.
|
||||
|
||||
This test verifies:
|
||||
- Early return when mail service is not initialized
|
||||
- No email service calls when mail is not available
|
||||
"""
|
||||
# Arrange: Setup mail service as not initialized
|
||||
mock_external_service_dependencies["mail"].is_inited.return_value = False
|
||||
test_language = "en-US"
|
||||
test_email = "test@example.com"
|
||||
|
||||
# Act: Execute the task
|
||||
send_change_mail_completed_notification_task(test_language, test_email)
|
||||
|
||||
# Assert: Verify no email service calls
|
||||
mock_external_service_dependencies["mail"].is_inited.assert_called_once()
|
||||
mock_external_service_dependencies["get_email_i18n_service"].assert_not_called()
|
||||
mock_external_service_dependencies["email_i18n_service"].send_email.assert_not_called()
|
||||
|
||||
def test_send_change_mail_completed_notification_task_email_service_exception(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
):
|
||||
"""
|
||||
Test change email completed notification task when email service raises an exception.
|
||||
|
||||
This test verifies:
|
||||
- Exception is properly caught and logged
|
||||
- Task completes without raising exception
|
||||
"""
|
||||
# Arrange: Setup email service to raise exception
|
||||
mock_external_service_dependencies["email_i18n_service"].send_email.side_effect = Exception(
|
||||
"Email service failed"
|
||||
)
|
||||
test_language = "en-US"
|
||||
test_email = "test@example.com"
|
||||
|
||||
# Act: Execute the task (should not raise exception)
|
||||
send_change_mail_completed_notification_task(test_language, test_email)
|
||||
|
||||
# Assert: Verify email service was called despite exception
|
||||
mock_external_service_dependencies["mail"].is_inited.assert_called_once()
|
||||
mock_external_service_dependencies["get_email_i18n_service"].assert_called_once()
|
||||
mock_external_service_dependencies["email_i18n_service"].send_email.assert_called_once_with(
|
||||
email_type=EmailType.CHANGE_EMAIL_COMPLETED,
|
||||
language_code=test_language,
|
||||
to=test_email,
|
||||
template_context={
|
||||
"to": test_email,
|
||||
"email": test_email,
|
||||
},
|
||||
)
|
||||
|
|
@ -0,0 +1,261 @@
|
|||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from faker import Faker
|
||||
|
||||
from tasks.mail_inner_task import send_inner_email_task
|
||||
|
||||
|
||||
class TestMailInnerTask:
|
||||
"""Integration tests for send_inner_email_task using testcontainers."""
|
||||
|
||||
@pytest.fixture
|
||||
def mock_external_service_dependencies(self):
|
||||
"""Mock setup for external service dependencies."""
|
||||
with (
|
||||
patch("tasks.mail_inner_task.mail") as mock_mail,
|
||||
patch("tasks.mail_inner_task.get_email_i18n_service") as mock_get_email_i18n_service,
|
||||
patch("tasks.mail_inner_task._render_template_with_strategy") as mock_render_template,
|
||||
):
|
||||
# Setup mock mail service
|
||||
mock_mail.is_inited.return_value = True
|
||||
|
||||
# Setup mock email i18n service
|
||||
mock_email_service = MagicMock()
|
||||
mock_get_email_i18n_service.return_value = mock_email_service
|
||||
|
||||
# Setup mock template rendering
|
||||
mock_render_template.return_value = "<html>Test email content</html>"
|
||||
|
||||
yield {
|
||||
"mail": mock_mail,
|
||||
"email_service": mock_email_service,
|
||||
"render_template": mock_render_template,
|
||||
}
|
||||
|
||||
def _create_test_email_data(self, fake: Faker) -> dict:
|
||||
"""
|
||||
Helper method to create test email data for testing.
|
||||
|
||||
Args:
|
||||
fake: Faker instance for generating test data
|
||||
|
||||
Returns:
|
||||
dict: Test email data including recipients, subject, body, and substitutions
|
||||
"""
|
||||
return {
|
||||
"to": [fake.email() for _ in range(3)],
|
||||
"subject": fake.sentence(nb_words=4),
|
||||
"body": "Hello {{name}}, this is a test email from {{company}}.",
|
||||
"substitutions": {
|
||||
"name": fake.name(),
|
||||
"company": fake.company(),
|
||||
"date": fake.date(),
|
||||
},
|
||||
}
|
||||
|
||||
def test_send_inner_email_success(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
"""
|
||||
Test successful email sending with valid data.
|
||||
|
||||
This test verifies:
|
||||
- Proper email service initialization check
|
||||
- Template rendering with substitutions
|
||||
- Email service integration
|
||||
- Multiple recipient handling
|
||||
"""
|
||||
# Arrange: Create test data
|
||||
fake = Faker()
|
||||
email_data = self._create_test_email_data(fake)
|
||||
|
||||
# Act: Execute the task
|
||||
send_inner_email_task(
|
||||
to=email_data["to"],
|
||||
subject=email_data["subject"],
|
||||
body=email_data["body"],
|
||||
substitutions=email_data["substitutions"],
|
||||
)
|
||||
|
||||
# Assert: Verify the expected outcomes
|
||||
# Verify mail service was checked for initialization
|
||||
mock_external_service_dependencies["mail"].is_inited.assert_called_once()
|
||||
|
||||
# Verify template rendering was called with correct parameters
|
||||
mock_external_service_dependencies["render_template"].assert_called_once_with(
|
||||
email_data["body"], email_data["substitutions"]
|
||||
)
|
||||
|
||||
# Verify email service was called once with the full recipient list
|
||||
mock_email_service = mock_external_service_dependencies["email_service"]
|
||||
mock_email_service.send_raw_email.assert_called_once_with(
|
||||
to=email_data["to"],
|
||||
subject=email_data["subject"],
|
||||
html_content="<html>Test email content</html>",
|
||||
)
|
||||
|
||||
def test_send_inner_email_single_recipient(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
"""
|
||||
Test email sending with single recipient.
|
||||
|
||||
This test verifies:
|
||||
- Single recipient handling
|
||||
- Template rendering
|
||||
- Email service integration
|
||||
"""
|
||||
# Arrange: Create test data with single recipient
|
||||
fake = Faker()
|
||||
email_data = {
|
||||
"to": [fake.email()],
|
||||
"subject": fake.sentence(nb_words=3),
|
||||
"body": "Welcome {{user_name}}!",
|
||||
"substitutions": {
|
||||
"user_name": fake.name(),
|
||||
},
|
||||
}
|
||||
|
||||
# Act: Execute the task
|
||||
send_inner_email_task(
|
||||
to=email_data["to"],
|
||||
subject=email_data["subject"],
|
||||
body=email_data["body"],
|
||||
substitutions=email_data["substitutions"],
|
||||
)
|
||||
|
||||
# Assert: Verify the expected outcomes
|
||||
mock_email_service = mock_external_service_dependencies["email_service"]
|
||||
mock_email_service.send_raw_email.assert_called_once_with(
|
||||
to=email_data["to"],
|
||||
subject=email_data["subject"],
|
||||
html_content="<html>Test email content</html>",
|
||||
)
|
||||
|
||||
def test_send_inner_email_empty_substitutions(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
"""
|
||||
Test email sending with empty substitutions.
|
||||
|
||||
This test verifies:
|
||||
- Template rendering with empty substitutions
|
||||
- Email service integration
|
||||
- Handling of minimal template context
|
||||
"""
|
||||
# Arrange: Create test data with empty substitutions
|
||||
fake = Faker()
|
||||
email_data = {
|
||||
"to": [fake.email()],
|
||||
"subject": fake.sentence(nb_words=3),
|
||||
"body": "This is a simple email without variables.",
|
||||
"substitutions": {},
|
||||
}
|
||||
|
||||
# Act: Execute the task
|
||||
send_inner_email_task(
|
||||
to=email_data["to"],
|
||||
subject=email_data["subject"],
|
||||
body=email_data["body"],
|
||||
substitutions=email_data["substitutions"],
|
||||
)
|
||||
|
||||
# Assert: Verify the expected outcomes
|
||||
mock_external_service_dependencies["render_template"].assert_called_once_with(email_data["body"], {})
|
||||
|
||||
mock_email_service = mock_external_service_dependencies["email_service"]
|
||||
mock_email_service.send_raw_email.assert_called_once_with(
|
||||
to=email_data["to"],
|
||||
subject=email_data["subject"],
|
||||
html_content="<html>Test email content</html>",
|
||||
)
|
||||
|
||||
def test_send_inner_email_mail_not_initialized(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
):
|
||||
"""
|
||||
Test email sending when mail service is not initialized.
|
||||
|
||||
This test verifies:
|
||||
- Early return when mail service is not initialized
|
||||
- No template rendering occurs
|
||||
- No email service calls
|
||||
- No exceptions raised
|
||||
"""
|
||||
# Arrange: Setup mail service as not initialized
|
||||
mock_external_service_dependencies["mail"].is_inited.return_value = False
|
||||
|
||||
fake = Faker()
|
||||
email_data = self._create_test_email_data(fake)
|
||||
|
||||
# Act: Execute the task
|
||||
send_inner_email_task(
|
||||
to=email_data["to"],
|
||||
subject=email_data["subject"],
|
||||
body=email_data["body"],
|
||||
substitutions=email_data["substitutions"],
|
||||
)
|
||||
|
||||
# Assert: Verify no processing occurred
|
||||
mock_external_service_dependencies["render_template"].assert_not_called()
|
||||
mock_external_service_dependencies["email_service"].send_raw_email.assert_not_called()
|
||||
|
||||
def test_send_inner_email_template_rendering_error(
|
||||
self, db_session_with_containers, mock_external_service_dependencies
|
||||
):
|
||||
"""
|
||||
Test email sending when template rendering fails.
|
||||
|
||||
This test verifies:
|
||||
- Exception handling during template rendering
|
||||
- No email service calls when template fails
|
||||
"""
|
||||
# Arrange: Setup template rendering to raise an exception
|
||||
mock_external_service_dependencies["render_template"].side_effect = Exception("Template rendering failed")
|
||||
|
||||
fake = Faker()
|
||||
email_data = self._create_test_email_data(fake)
|
||||
|
||||
# Act: Execute the task
|
||||
send_inner_email_task(
|
||||
to=email_data["to"],
|
||||
subject=email_data["subject"],
|
||||
body=email_data["body"],
|
||||
substitutions=email_data["substitutions"],
|
||||
)
|
||||
|
||||
# Assert: Verify template rendering was attempted
|
||||
mock_external_service_dependencies["render_template"].assert_called_once()
|
||||
|
||||
# Verify no email service calls due to exception
|
||||
mock_external_service_dependencies["email_service"].send_raw_email.assert_not_called()
|
||||
|
||||
def test_send_inner_email_service_error(self, db_session_with_containers, mock_external_service_dependencies):
|
||||
"""
|
||||
Test email sending when email service fails.
|
||||
|
||||
This test verifies:
|
||||
- Exception handling during email sending
|
||||
- Graceful error handling
|
||||
"""
|
||||
# Arrange: Setup email service to raise an exception
|
||||
mock_external_service_dependencies["email_service"].send_raw_email.side_effect = Exception(
|
||||
"Email service failed"
|
||||
)
|
||||
|
||||
fake = Faker()
|
||||
email_data = self._create_test_email_data(fake)
|
||||
|
||||
# Act: Execute the task
|
||||
send_inner_email_task(
|
||||
to=email_data["to"],
|
||||
subject=email_data["subject"],
|
||||
body=email_data["body"],
|
||||
substitutions=email_data["substitutions"],
|
||||
)
|
||||
|
||||
# Assert: Verify template rendering occurred
|
||||
mock_external_service_dependencies["render_template"].assert_called_once()
|
||||
|
||||
# Verify email service was called (and failed)
|
||||
mock_email_service = mock_external_service_dependencies["email_service"]
|
||||
mock_email_service.send_raw_email.assert_called_once_with(
|
||||
to=email_data["to"],
|
||||
subject=email_data["subject"],
|
||||
html_content="<html>Test email content</html>",
|
||||
)
|
||||
35
api/uv.lock
35
api/uv.lock
|
|
@ -445,16 +445,17 @@ wheels = [
|
|||
|
||||
[[package]]
|
||||
name = "azure-storage-blob"
|
||||
version = "12.13.0"
|
||||
version = "12.26.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "azure-core" },
|
||||
{ name = "cryptography" },
|
||||
{ name = "msrest" },
|
||||
{ name = "isodate" },
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/b1/93/b13bf390e940a79a399981f75ac8d2e05a70112a95ebb7b41e9b752d2921/azure-storage-blob-12.13.0.zip", hash = "sha256:53f0d4cd32970ac9ff9b9753f83dd2fb3f9ac30e1d01e71638c436c509bfd884", size = 684838, upload-time = "2022-07-07T22:35:44.543Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/96/95/3e3414491ce45025a1cde107b6ae72bf72049e6021597c201cd6a3029b9a/azure_storage_blob-12.26.0.tar.gz", hash = "sha256:5dd7d7824224f7de00bfeb032753601c982655173061e242f13be6e26d78d71f", size = 583332, upload-time = "2025-07-16T21:34:07.644Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/0e/2a/b8246df35af68d64fb7292c93dbbde63cd25036f2f669a9d9ae59e518c76/azure_storage_blob-12.13.0-py3-none-any.whl", hash = "sha256:280a6ab032845bab9627582bee78a50497ca2f14772929b5c5ee8b4605af0cb3", size = 377309, upload-time = "2022-07-07T22:35:41.905Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5b/64/63dbfdd83b31200ac58820a7951ddfdeed1fbee9285b0f3eae12d1357155/azure_storage_blob-12.26.0-py3-none-any.whl", hash = "sha256:8c5631b8b22b4f53ec5fff2f3bededf34cfef111e2af613ad42c9e6de00a77fe", size = 412907, upload-time = "2025-07-16T21:34:09.367Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -1638,10 +1639,10 @@ dev = [
|
|||
{ name = "types-ujson", specifier = ">=5.10.0" },
|
||||
]
|
||||
storage = [
|
||||
{ name = "azure-storage-blob", specifier = "==12.13.0" },
|
||||
{ name = "azure-storage-blob", specifier = "==12.26.0" },
|
||||
{ name = "bce-python-sdk", specifier = "~=0.9.23" },
|
||||
{ name = "cos-python-sdk-v5", specifier = "==1.9.38" },
|
||||
{ name = "esdk-obs-python", specifier = "==3.24.6.1" },
|
||||
{ name = "esdk-obs-python", specifier = "==3.25.8" },
|
||||
{ name = "google-cloud-storage", specifier = "==2.16.0" },
|
||||
{ name = "opendal", specifier = "~=0.46.0" },
|
||||
{ name = "oss2", specifier = "==2.18.5" },
|
||||
|
|
@ -1792,12 +1793,14 @@ wheels = [
|
|||
|
||||
[[package]]
|
||||
name = "esdk-obs-python"
|
||||
version = "3.24.6.1"
|
||||
version = "3.25.8"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "crcmod" },
|
||||
{ name = "pycryptodome" },
|
||||
{ name = "requests" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f7/af/d83276f9e288bd6a62f44d67ae1eafd401028ba1b2b643ae4014b51da5bd/esdk-obs-python-3.24.6.1.tar.gz", hash = "sha256:c45fed143e99d9256c8560c1d78f651eae0d2e809d16e962f8b286b773c33bf0", size = 85798, upload-time = "2024-07-26T13:13:22.467Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/40/99/52362d6e081a642d6de78f6ab53baa5e3f82f2386c48954e18ee7b4ab22b/esdk-obs-python-3.25.8.tar.gz", hash = "sha256:aeded00b27ecd5a25ffaec38a2cc9416b51923d48db96c663f1a735f859b5273", size = 96302, upload-time = "2025-09-01T11:35:20.432Z" }
|
||||
|
||||
[[package]]
|
||||
name = "et-xmlfile"
|
||||
|
|
@ -3382,22 +3385,6 @@ wheels = [
|
|||
{ url = "https://files.pythonhosted.org/packages/5e/75/bd9b7bb966668920f06b200e84454c8f3566b102183bc55c5473d96cb2b9/msal_extensions-1.3.1-py3-none-any.whl", hash = "sha256:96d3de4d034504e969ac5e85bae8106c8373b5c6568e4c8fa7af2eca9dbe6bca", size = 20583, upload-time = "2025-03-14T23:51:03.016Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "msrest"
|
||||
version = "0.7.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "azure-core" },
|
||||
{ name = "certifi" },
|
||||
{ name = "isodate" },
|
||||
{ name = "requests" },
|
||||
{ name = "requests-oauthlib" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/68/77/8397c8fb8fc257d8ea0fa66f8068e073278c65f05acb17dcb22a02bfdc42/msrest-0.7.1.zip", hash = "sha256:6e7661f46f3afd88b75667b7187a92829924446c7ea1d169be8c4bb7eeb788b9", size = 175332, upload-time = "2022-06-13T22:41:25.111Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/15/cf/f2966a2638144491f8696c27320d5219f48a072715075d168b31d3237720/msrest-0.7.1-py3-none-any.whl", hash = "sha256:21120a810e1233e5e6cc7fe40b474eeb4ec6f757a15d7cf86702c369f9567c32", size = 85384, upload-time = "2022-06-13T22:41:22.42Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "multidict"
|
||||
version = "6.6.4"
|
||||
|
|
|
|||
|
|
@ -8,12 +8,14 @@ import {
|
|||
import ActionButton from '@/app/components/base/action-button'
|
||||
import CopyIcon from '@/app/components/base/copy-icon'
|
||||
import SVGBtn from '@/app/components/base/svg'
|
||||
import Flowchart from '@/app/components/base/mermaid'
|
||||
import { Theme } from '@/types/app'
|
||||
import useTheme from '@/hooks/use-theme'
|
||||
import SVGRenderer from '../svg-gallery' // Assumes svg-gallery.tsx is in /base directory
|
||||
import MarkdownMusic from '@/app/components/base/markdown-blocks/music'
|
||||
import ErrorBoundary from '@/app/components/base/markdown/error-boundary'
|
||||
import dynamic from 'next/dynamic'
|
||||
|
||||
const Flowchart = dynamic(() => import('@/app/components/base/mermaid'), { ssr: false })
|
||||
|
||||
// Available language https://github.com/react-syntax-highlighter/react-syntax-highlighter/blob/master/AVAILABLE_LANGUAGES_HLJS.MD
|
||||
const capitalizationLanguageNameMap: Record<string, string> = {
|
||||
|
|
|
|||
|
|
@ -1,25 +1,11 @@
|
|||
import ReactMarkdown from 'react-markdown'
|
||||
import dynamic from 'next/dynamic'
|
||||
import 'katex/dist/katex.min.css'
|
||||
import RemarkMath from 'remark-math'
|
||||
import RemarkBreaks from 'remark-breaks'
|
||||
import RehypeKatex from 'rehype-katex'
|
||||
import RemarkGfm from 'remark-gfm'
|
||||
import RehypeRaw from 'rehype-raw'
|
||||
import { flow } from 'lodash-es'
|
||||
import cn from '@/utils/classnames'
|
||||
import { customUrlTransform, preprocessLaTeX, preprocessThinkTag } from './markdown-utils'
|
||||
import {
|
||||
AudioBlock,
|
||||
CodeBlock,
|
||||
Img,
|
||||
Link,
|
||||
MarkdownButton,
|
||||
MarkdownForm,
|
||||
Paragraph,
|
||||
ScriptBlock,
|
||||
ThinkBlock,
|
||||
VideoBlock,
|
||||
} from '@/app/components/base/markdown-blocks'
|
||||
import { preprocessLaTeX, preprocessThinkTag } from './markdown-utils'
|
||||
import type { ReactMarkdownWrapperProps } from './react-markdown-wrapper'
|
||||
|
||||
const ReactMarkdown = dynamic(() => import('./react-markdown-wrapper').then(mod => mod.ReactMarkdownWrapper), { ssr: false })
|
||||
|
||||
/**
|
||||
* @fileoverview Main Markdown rendering component.
|
||||
|
|
@ -31,9 +17,7 @@ import {
|
|||
export type MarkdownProps = {
|
||||
content: string
|
||||
className?: string
|
||||
customDisallowedElements?: string[]
|
||||
customComponents?: Record<string, React.ComponentType<any>>
|
||||
}
|
||||
} & Pick<ReactMarkdownWrapperProps, 'customComponents' | 'customDisallowedElements'>
|
||||
|
||||
export const Markdown = (props: MarkdownProps) => {
|
||||
const { customComponents = {} } = props
|
||||
|
|
@ -44,53 +28,7 @@ export const Markdown = (props: MarkdownProps) => {
|
|||
|
||||
return (
|
||||
<div className={cn('markdown-body', '!text-text-primary', props.className)}>
|
||||
<ReactMarkdown
|
||||
remarkPlugins={[
|
||||
RemarkGfm,
|
||||
[RemarkMath, { singleDollarTextMath: false }],
|
||||
RemarkBreaks,
|
||||
]}
|
||||
rehypePlugins={[
|
||||
RehypeKatex,
|
||||
RehypeRaw as any,
|
||||
// The Rehype plug-in is used to remove the ref attribute of an element
|
||||
() => {
|
||||
return (tree: any) => {
|
||||
const iterate = (node: any) => {
|
||||
if (node.type === 'element' && node.properties?.ref)
|
||||
delete node.properties.ref
|
||||
|
||||
if (node.type === 'element' && !/^[a-z][a-z0-9]*$/i.test(node.tagName)) {
|
||||
node.type = 'text'
|
||||
node.value = `<${node.tagName}`
|
||||
}
|
||||
|
||||
if (node.children)
|
||||
node.children.forEach(iterate)
|
||||
}
|
||||
tree.children.forEach(iterate)
|
||||
}
|
||||
},
|
||||
]}
|
||||
urlTransform={customUrlTransform}
|
||||
disallowedElements={['iframe', 'head', 'html', 'meta', 'link', 'style', 'body', ...(props.customDisallowedElements || [])]}
|
||||
components={{
|
||||
code: CodeBlock,
|
||||
img: Img,
|
||||
video: VideoBlock,
|
||||
audio: AudioBlock,
|
||||
a: Link,
|
||||
p: Paragraph,
|
||||
button: MarkdownButton,
|
||||
form: MarkdownForm,
|
||||
script: ScriptBlock as any,
|
||||
details: ThinkBlock,
|
||||
...customComponents,
|
||||
}}
|
||||
>
|
||||
{/* Markdown detect has problem. */}
|
||||
{latexContent}
|
||||
</ReactMarkdown>
|
||||
<ReactMarkdown latexContent={latexContent} customComponents={customComponents} customDisallowedElements={props.customDisallowedElements} />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,82 @@
|
|||
import ReactMarkdown from 'react-markdown'
|
||||
import RemarkMath from 'remark-math'
|
||||
import RemarkBreaks from 'remark-breaks'
|
||||
import RehypeKatex from 'rehype-katex'
|
||||
import RemarkGfm from 'remark-gfm'
|
||||
import RehypeRaw from 'rehype-raw'
|
||||
import AudioBlock from '@/app/components/base/markdown-blocks/audio-block'
|
||||
import Img from '@/app/components/base/markdown-blocks/img'
|
||||
import Link from '@/app/components/base/markdown-blocks/link'
|
||||
import MarkdownButton from '@/app/components/base/markdown-blocks/button'
|
||||
import MarkdownForm from '@/app/components/base/markdown-blocks/form'
|
||||
import Paragraph from '@/app/components/base/markdown-blocks/paragraph'
|
||||
import ScriptBlock from '@/app/components/base/markdown-blocks/script-block'
|
||||
import ThinkBlock from '@/app/components/base/markdown-blocks/think-block'
|
||||
import VideoBlock from '@/app/components/base/markdown-blocks/video-block'
|
||||
import { customUrlTransform } from './markdown-utils'
|
||||
|
||||
import type { FC } from 'react'
|
||||
|
||||
import dynamic from 'next/dynamic'
|
||||
|
||||
const CodeBlock = dynamic(() => import('@/app/components/base/markdown-blocks/code-block'), { ssr: false })
|
||||
|
||||
export type ReactMarkdownWrapperProps = {
|
||||
latexContent: any
|
||||
customDisallowedElements?: string[]
|
||||
customComponents?: Record<string, React.ComponentType<any>>
|
||||
}
|
||||
|
||||
export const ReactMarkdownWrapper: FC<ReactMarkdownWrapperProps> = (props) => {
|
||||
const { customComponents, latexContent } = props
|
||||
|
||||
return (
|
||||
<ReactMarkdown
|
||||
remarkPlugins={[
|
||||
RemarkGfm,
|
||||
[RemarkMath, { singleDollarTextMath: false }],
|
||||
RemarkBreaks,
|
||||
]}
|
||||
rehypePlugins={[
|
||||
RehypeKatex,
|
||||
RehypeRaw as any,
|
||||
// The Rehype plug-in is used to remove the ref attribute of an element
|
||||
() => {
|
||||
return (tree: any) => {
|
||||
const iterate = (node: any) => {
|
||||
if (node.type === 'element' && node.properties?.ref)
|
||||
delete node.properties.ref
|
||||
|
||||
if (node.type === 'element' && !/^[a-z][a-z0-9]*$/i.test(node.tagName)) {
|
||||
node.type = 'text'
|
||||
node.value = `<${node.tagName}`
|
||||
}
|
||||
|
||||
if (node.children)
|
||||
node.children.forEach(iterate)
|
||||
}
|
||||
tree.children.forEach(iterate)
|
||||
}
|
||||
},
|
||||
]}
|
||||
urlTransform={customUrlTransform}
|
||||
disallowedElements={['iframe', 'head', 'html', 'meta', 'link', 'style', 'body', ...(props.customDisallowedElements || [])]}
|
||||
components={{
|
||||
code: CodeBlock,
|
||||
img: Img,
|
||||
video: VideoBlock,
|
||||
audio: AudioBlock,
|
||||
a: Link,
|
||||
p: Paragraph,
|
||||
button: MarkdownButton,
|
||||
form: MarkdownForm,
|
||||
script: ScriptBlock as any,
|
||||
details: ThinkBlock,
|
||||
...customComponents,
|
||||
}}
|
||||
>
|
||||
{/* Markdown detect has problem. */}
|
||||
{latexContent}
|
||||
</ReactMarkdown>
|
||||
)
|
||||
}
|
||||
|
|
@ -57,7 +57,34 @@ const CustomizedPagination: FC<Props> = ({
|
|||
if (isNaN(Number.parseInt(value)))
|
||||
return setInputValue('')
|
||||
setInputValue(Number.parseInt(value))
|
||||
handlePaging(value)
|
||||
}
|
||||
|
||||
const handleInputConfirm = () => {
|
||||
if (inputValue !== '' && String(inputValue) !== String(current + 1)) {
|
||||
handlePaging(String(inputValue))
|
||||
return
|
||||
}
|
||||
|
||||
if (inputValue === '')
|
||||
setInputValue(current + 1)
|
||||
|
||||
setShowInput(false)
|
||||
}
|
||||
|
||||
const handleInputKeyDown = (e: React.KeyboardEvent<HTMLInputElement>) => {
|
||||
if (e.key === 'Enter') {
|
||||
e.preventDefault()
|
||||
handleInputConfirm()
|
||||
}
|
||||
else if (e.key === 'Escape') {
|
||||
e.preventDefault()
|
||||
setInputValue(current + 1)
|
||||
setShowInput(false)
|
||||
}
|
||||
}
|
||||
|
||||
const handleInputBlur = () => {
|
||||
handleInputConfirm()
|
||||
}
|
||||
|
||||
return (
|
||||
|
|
@ -105,7 +132,8 @@ const CustomizedPagination: FC<Props> = ({
|
|||
autoFocus
|
||||
value={inputValue}
|
||||
onChange={handleInputChange}
|
||||
onBlur={() => setShowInput(false)}
|
||||
onKeyDown={handleInputKeyDown}
|
||||
onBlur={handleInputBlur}
|
||||
/>
|
||||
)}
|
||||
<Pagination.NextButton
|
||||
|
|
|
|||
|
|
@ -7,7 +7,6 @@ import DocumentFileIcon from '@/app/components/datasets/common/document-file-ico
|
|||
import cn from '@/utils/classnames'
|
||||
import type { CustomFile as File, FileItem } from '@/models/datasets'
|
||||
import { ToastContext } from '@/app/components/base/toast'
|
||||
import SimplePieChart from '@/app/components/base/simple-pie-chart'
|
||||
import { upload } from '@/service/base'
|
||||
import I18n from '@/context/i18n'
|
||||
import { LanguagesSupported } from '@/i18n-config/language'
|
||||
|
|
@ -17,6 +16,9 @@ import useTheme from '@/hooks/use-theme'
|
|||
import { useFileUploadConfig } from '@/service/use-common'
|
||||
import { useDataSourceStore, useDataSourceStoreWithSelector } from '../store'
|
||||
import produce from 'immer'
|
||||
import dynamic from 'next/dynamic'
|
||||
|
||||
const SimplePieChart = dynamic(() => import('@/app/components/base/simple-pie-chart'), { ssr: false })
|
||||
|
||||
const FILES_NUMBER_LIMIT = 20
|
||||
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ import {
|
|||
} from '@/app/components/workflow/types'
|
||||
import {
|
||||
useWorkflowInit,
|
||||
} from './hooks'
|
||||
} from './hooks/use-workflow-init'
|
||||
import { useAppTriggers } from '@/service/use-tools'
|
||||
import { useTriggerStatusStore } from '@/app/components/workflow/store/trigger-status'
|
||||
import { useStore as useAppStore } from '@/app/components/app/store'
|
||||
|
|
|
|||
|
|
@ -16,23 +16,25 @@ import type { WorkflowHistoryEventMeta } from '../workflow-history-store'
|
|||
* - InputChange events in Node Panels do not trigger state changes.
|
||||
* - Resizing UI elements does not trigger state changes.
|
||||
*/
|
||||
export enum WorkflowHistoryEvent {
|
||||
NodeTitleChange = 'NodeTitleChange',
|
||||
NodeDescriptionChange = 'NodeDescriptionChange',
|
||||
NodeDragStop = 'NodeDragStop',
|
||||
NodeChange = 'NodeChange',
|
||||
NodeConnect = 'NodeConnect',
|
||||
NodePaste = 'NodePaste',
|
||||
NodeDelete = 'NodeDelete',
|
||||
EdgeDelete = 'EdgeDelete',
|
||||
EdgeDeleteByDeleteBranch = 'EdgeDeleteByDeleteBranch',
|
||||
NodeAdd = 'NodeAdd',
|
||||
NodeResize = 'NodeResize',
|
||||
NoteAdd = 'NoteAdd',
|
||||
NoteChange = 'NoteChange',
|
||||
NoteDelete = 'NoteDelete',
|
||||
LayoutOrganize = 'LayoutOrganize',
|
||||
}
|
||||
export const WorkflowHistoryEvent = {
|
||||
NodeTitleChange: 'NodeTitleChange',
|
||||
NodeDescriptionChange: 'NodeDescriptionChange',
|
||||
NodeDragStop: 'NodeDragStop',
|
||||
NodeChange: 'NodeChange',
|
||||
NodeConnect: 'NodeConnect',
|
||||
NodePaste: 'NodePaste',
|
||||
NodeDelete: 'NodeDelete',
|
||||
EdgeDelete: 'EdgeDelete',
|
||||
EdgeDeleteByDeleteBranch: 'EdgeDeleteByDeleteBranch',
|
||||
NodeAdd: 'NodeAdd',
|
||||
NodeResize: 'NodeResize',
|
||||
NoteAdd: 'NoteAdd',
|
||||
NoteChange: 'NoteChange',
|
||||
NoteDelete: 'NoteDelete',
|
||||
LayoutOrganize: 'LayoutOrganize',
|
||||
} as const
|
||||
|
||||
export type WorkflowHistoryEventT = keyof typeof WorkflowHistoryEvent
|
||||
|
||||
export const useWorkflowHistory = () => {
|
||||
const store = useStoreApi()
|
||||
|
|
@ -65,7 +67,7 @@ export const useWorkflowHistory = () => {
|
|||
// Some events may be triggered multiple times in a short period of time.
|
||||
// We debounce the history state update to avoid creating multiple history states
|
||||
// with minimal changes.
|
||||
const saveStateToHistoryRef = useRef(debounce((event: WorkflowHistoryEvent, meta?: WorkflowHistoryEventMeta) => {
|
||||
const saveStateToHistoryRef = useRef(debounce((event: WorkflowHistoryEventT, meta?: WorkflowHistoryEventMeta) => {
|
||||
workflowHistoryStore.setState({
|
||||
workflowHistoryEvent: event,
|
||||
workflowHistoryEventMeta: meta,
|
||||
|
|
@ -74,7 +76,7 @@ export const useWorkflowHistory = () => {
|
|||
})
|
||||
}, 500))
|
||||
|
||||
const saveStateToHistory = useCallback((event: WorkflowHistoryEvent, meta?: WorkflowHistoryEventMeta) => {
|
||||
const saveStateToHistory = useCallback((event: WorkflowHistoryEventT, meta?: WorkflowHistoryEventMeta) => {
|
||||
switch (event) {
|
||||
case WorkflowHistoryEvent.NoteChange:
|
||||
// Hint: Note change does not trigger when note text changes,
|
||||
|
|
@ -105,7 +107,7 @@ export const useWorkflowHistory = () => {
|
|||
}
|
||||
}, [])
|
||||
|
||||
const getHistoryLabel = useCallback((event: WorkflowHistoryEvent) => {
|
||||
const getHistoryLabel = useCallback((event: WorkflowHistoryEventT) => {
|
||||
switch (event) {
|
||||
case WorkflowHistoryEvent.NodeTitleChange:
|
||||
return t('workflow.changeHistory.nodeTitleChange')
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ import { type StoreApi, create } from 'zustand'
|
|||
import { type TemporalState, temporal } from 'zundo'
|
||||
import isDeepEqual from 'fast-deep-equal'
|
||||
import type { Edge, Node } from './types'
|
||||
import type { WorkflowHistoryEvent } from './hooks'
|
||||
import type { WorkflowHistoryEventT } from './hooks'
|
||||
import { noop } from 'lodash-es'
|
||||
|
||||
export const WorkflowHistoryStoreContext = createContext<WorkflowHistoryStoreContextType>({ store: null, shortcutsEnabled: true, setShortcutsEnabled: noop })
|
||||
|
|
@ -98,7 +98,7 @@ function createStore({
|
|||
export type WorkflowHistoryStore = {
|
||||
nodes: Node[]
|
||||
edges: Edge[]
|
||||
workflowHistoryEvent: WorkflowHistoryEvent | undefined
|
||||
workflowHistoryEvent: WorkflowHistoryEventT | undefined
|
||||
workflowHistoryEventMeta?: WorkflowHistoryEventMeta
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -108,7 +108,7 @@
|
|||
"react": "19.1.1",
|
||||
"react-18-input-autosize": "^3.0.0",
|
||||
"react-dom": "19.1.1",
|
||||
"react-easy-crop": "^5.1.0",
|
||||
"react-easy-crop": "^5.5.3",
|
||||
"react-hook-form": "^7.53.1",
|
||||
"react-hotkeys-hook": "^4.6.1",
|
||||
"react-i18next": "^15.1.0",
|
||||
|
|
@ -179,7 +179,7 @@
|
|||
"@types/sortablejs": "^1.15.1",
|
||||
"@types/uuid": "^10.0.0",
|
||||
"autoprefixer": "^10.4.20",
|
||||
"babel-loader": "^9.2.1",
|
||||
"babel-loader": "^10.0.0",
|
||||
"bing-translate-api": "^4.0.2",
|
||||
"code-inspector-plugin": "1.2.9",
|
||||
"cross-env": "^7.0.3",
|
||||
|
|
|
|||
|
|
@ -245,8 +245,8 @@ importers:
|
|||
specifier: 19.1.1
|
||||
version: 19.1.1(react@19.1.1)
|
||||
react-easy-crop:
|
||||
specifier: ^5.1.0
|
||||
version: 5.5.0(react-dom@19.1.1(react@19.1.1))(react@19.1.1)
|
||||
specifier: ^5.5.3
|
||||
version: 5.5.3(react-dom@19.1.1(react@19.1.1))(react@19.1.1)
|
||||
react-hook-form:
|
||||
specifier: ^7.53.1
|
||||
version: 7.60.0(react@19.1.1)
|
||||
|
|
@ -453,8 +453,8 @@ importers:
|
|||
specifier: ^10.4.20
|
||||
version: 10.4.21(postcss@8.5.6)
|
||||
babel-loader:
|
||||
specifier: ^9.2.1
|
||||
version: 9.2.1(@babel/core@7.28.3)(webpack@5.100.2(esbuild@0.25.0)(uglify-js@3.19.3))
|
||||
specifier: ^10.0.0
|
||||
version: 10.0.0(@babel/core@7.28.3)(webpack@5.100.2(esbuild@0.25.0)(uglify-js@3.19.3))
|
||||
bing-translate-api:
|
||||
specifier: ^4.0.2
|
||||
version: 4.1.0
|
||||
|
|
@ -3939,6 +3939,13 @@ packages:
|
|||
peerDependencies:
|
||||
'@babel/core': ^7.8.0
|
||||
|
||||
babel-loader@10.0.0:
|
||||
resolution: {integrity: sha512-z8jt+EdS61AMw22nSfoNJAZ0vrtmhPRVi6ghL3rCeRZI8cdNYFiV5xeV3HbE7rlZZNmGH8BVccwWt8/ED0QOHA==}
|
||||
engines: {node: ^18.20.0 || ^20.10.0 || >=22.0.0}
|
||||
peerDependencies:
|
||||
'@babel/core': ^7.12.0
|
||||
webpack: '>=5.61.0'
|
||||
|
||||
babel-loader@8.4.1:
|
||||
resolution: {integrity: sha512-nXzRChX+Z1GoE6yWavBQg6jDslyFF3SDjl2paADuoQtQW10JqShJt62R6eJQ5m/pjJFDT8xgKIWSP85OY8eXeA==}
|
||||
engines: {node: '>= 8.9'}
|
||||
|
|
@ -7311,8 +7318,8 @@ packages:
|
|||
react: '>= 16.3.0'
|
||||
react-dom: '>= 16.3.0'
|
||||
|
||||
react-easy-crop@5.5.0:
|
||||
resolution: {integrity: sha512-OZzU+yXMhe69vLkDex+5QxcfT94FdcgVCyW2dBUw35ZoC3Is42TUxUy04w8nH1mfMKaizVdC3rh/wUfNW1mK4w==}
|
||||
react-easy-crop@5.5.3:
|
||||
resolution: {integrity: sha512-iKwFTnAsq+IVuyF6N0Q3zjRx9DG1NMySkwWxVfM/xAOeHYH1vhvM+V2kFiq5HOIQGWouITjfltCx54mbDpMpmA==}
|
||||
peerDependencies:
|
||||
react: '>=16.4.0'
|
||||
react-dom: '>=16.4.0'
|
||||
|
|
@ -12843,6 +12850,12 @@ snapshots:
|
|||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
babel-loader@10.0.0(@babel/core@7.28.3)(webpack@5.100.2(esbuild@0.25.0)(uglify-js@3.19.3)):
|
||||
dependencies:
|
||||
'@babel/core': 7.28.3
|
||||
find-up: 5.0.0
|
||||
webpack: 5.100.2(esbuild@0.25.0)(uglify-js@3.19.3)
|
||||
|
||||
babel-loader@8.4.1(@babel/core@7.28.3)(webpack@5.100.2(esbuild@0.25.0)(uglify-js@3.19.3)):
|
||||
dependencies:
|
||||
'@babel/core': 7.28.3
|
||||
|
|
@ -17087,7 +17100,7 @@ snapshots:
|
|||
react: 19.1.1
|
||||
react-dom: 19.1.1(react@19.1.1)
|
||||
|
||||
react-easy-crop@5.5.0(react-dom@19.1.1(react@19.1.1))(react@19.1.1):
|
||||
react-easy-crop@5.5.3(react-dom@19.1.1(react@19.1.1))(react@19.1.1):
|
||||
dependencies:
|
||||
normalize-wheel: 1.0.1
|
||||
react: 19.1.1
|
||||
|
|
|
|||
Loading…
Reference in New Issue