From bbbb6e04cb0117d707bb3abb2bf3a9be47f317a6 Mon Sep 17 00:00:00 2001 From: yihong Date: Sun, 5 Oct 2025 12:43:40 +0800 Subject: [PATCH 01/31] fix: delete useless db session commit (#26572) Signed-off-by: yihong0618 --- api/services/workflow/workflow_converter.py | 1 - 1 file changed, 1 deletion(-) diff --git a/api/services/workflow/workflow_converter.py b/api/services/workflow/workflow_converter.py index dccd891981..ce7d16b3bd 100644 --- a/api/services/workflow/workflow_converter.py +++ b/api/services/workflow/workflow_converter.py @@ -79,7 +79,6 @@ class WorkflowConverter: new_app.updated_by = account.id db.session.add(new_app) db.session.flush() - db.session.commit() workflow.app_id = new_app.id db.session.commit() From 00fb468f2ed657baa1e8f109746428062a67135a Mon Sep 17 00:00:00 2001 From: NeatGuyCoding <15627489+NeatGuyCoding@users.noreply.github.com> Date: Sun, 5 Oct 2025 12:44:40 +0800 Subject: [PATCH 02/31] Feature add test containers mail email code login task (#26580) Signed-off-by: NeatGuyCoding <15627489+NeatGuyCoding@users.noreply.github.com> Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> --- .../conftest.py | 41 +- .../tasks/test_mail_email_code_login_task.py | 598 ++++++++++++++++++ 2 files changed, 630 insertions(+), 9 deletions(-) create mode 100644 api/tests/test_containers_integration_tests/tasks/test_mail_email_code_login_task.py diff --git a/api/tests/test_containers_integration_tests/conftest.py b/api/tests/test_containers_integration_tests/conftest.py index 243c8d1d62..180ee1c963 100644 --- a/api/tests/test_containers_integration_tests/conftest.py +++ b/api/tests/test_containers_integration_tests/conftest.py @@ -18,6 +18,7 @@ from flask.testing import FlaskClient from sqlalchemy import Engine, text from sqlalchemy.orm import Session from testcontainers.core.container import DockerContainer +from testcontainers.core.network import Network from testcontainers.core.waiting_utils import wait_for_logs from testcontainers.postgres import PostgresContainer from testcontainers.redis import RedisContainer @@ -41,6 +42,7 @@ class DifyTestContainers: def __init__(self): """Initialize container management with default configurations.""" + self.network: Network | None = None self.postgres: PostgresContainer | None = None self.redis: RedisContainer | None = None self.dify_sandbox: DockerContainer | None = None @@ -62,12 +64,18 @@ class DifyTestContainers: logger.info("Starting test containers for Dify integration tests...") + # Create Docker network for container communication + logger.info("Creating Docker network for container communication...") + self.network = Network() + self.network.create() + logger.info("Docker network created successfully with name: %s", self.network.name) + # Start PostgreSQL container for main application database # PostgreSQL is used for storing user data, workflows, and application state logger.info("Initializing PostgreSQL container...") self.postgres = PostgresContainer( image="postgres:14-alpine", - ) + ).with_network(self.network) self.postgres.start() db_host = self.postgres.get_container_host_ip() db_port = self.postgres.get_exposed_port(5432) @@ -137,7 +145,7 @@ class DifyTestContainers: # Start Redis container for caching and session management # Redis is used for storing session data, cache entries, and temporary data logger.info("Initializing Redis container...") - self.redis = RedisContainer(image="redis:6-alpine", port=6379) + self.redis = RedisContainer(image="redis:6-alpine", port=6379).with_network(self.network) self.redis.start() redis_host = self.redis.get_container_host_ip() redis_port = self.redis.get_exposed_port(6379) @@ -153,7 +161,7 @@ class DifyTestContainers: # Start Dify Sandbox container for code execution environment # Dify Sandbox provides a secure environment for executing user code logger.info("Initializing Dify Sandbox container...") - self.dify_sandbox = DockerContainer(image="langgenius/dify-sandbox:latest") + self.dify_sandbox = DockerContainer(image="langgenius/dify-sandbox:latest").with_network(self.network) self.dify_sandbox.with_exposed_ports(8194) self.dify_sandbox.env = { "API_KEY": "test_api_key", @@ -173,22 +181,28 @@ class DifyTestContainers: # Start Dify Plugin Daemon container for plugin management # Dify Plugin Daemon provides plugin lifecycle management and execution logger.info("Initializing Dify Plugin Daemon container...") - self.dify_plugin_daemon = DockerContainer(image="langgenius/dify-plugin-daemon:0.3.0-local") + self.dify_plugin_daemon = DockerContainer(image="langgenius/dify-plugin-daemon:0.3.0-local").with_network( + self.network + ) self.dify_plugin_daemon.with_exposed_ports(5002) + # Get container internal network addresses + postgres_container_name = self.postgres.get_wrapped_container().name + redis_container_name = self.redis.get_wrapped_container().name + self.dify_plugin_daemon.env = { - "DB_HOST": db_host, - "DB_PORT": str(db_port), + "DB_HOST": postgres_container_name, # Use container name for internal network communication + "DB_PORT": "5432", # Use internal port "DB_USERNAME": self.postgres.username, "DB_PASSWORD": self.postgres.password, "DB_DATABASE": "dify_plugin", - "REDIS_HOST": redis_host, - "REDIS_PORT": str(redis_port), + "REDIS_HOST": redis_container_name, # Use container name for internal network communication + "REDIS_PORT": "6379", # Use internal port "REDIS_PASSWORD": "", "SERVER_PORT": "5002", "SERVER_KEY": "test_plugin_daemon_key", "MAX_PLUGIN_PACKAGE_SIZE": "52428800", "PPROF_ENABLED": "false", - "DIFY_INNER_API_URL": f"http://{db_host}:5001", + "DIFY_INNER_API_URL": f"http://{postgres_container_name}:5001", "DIFY_INNER_API_KEY": "test_inner_api_key", "PLUGIN_REMOTE_INSTALLING_HOST": "0.0.0.0", "PLUGIN_REMOTE_INSTALLING_PORT": "5003", @@ -253,6 +267,15 @@ class DifyTestContainers: # Log error but don't fail the test cleanup logger.warning("Failed to stop container %s: %s", container, e) + # Stop and remove the network + if self.network: + try: + logger.info("Removing Docker network...") + self.network.remove() + logger.info("Successfully removed Docker network") + except Exception as e: + logger.warning("Failed to remove Docker network: %s", e) + self._containers_started = False logger.info("All test containers stopped and cleaned up successfully") diff --git a/api/tests/test_containers_integration_tests/tasks/test_mail_email_code_login_task.py b/api/tests/test_containers_integration_tests/tasks/test_mail_email_code_login_task.py new file mode 100644 index 0000000000..e6a804784a --- /dev/null +++ b/api/tests/test_containers_integration_tests/tasks/test_mail_email_code_login_task.py @@ -0,0 +1,598 @@ +""" +TestContainers-based integration tests for send_email_code_login_mail_task. + +This module provides comprehensive integration tests for the email code login mail task +using TestContainers infrastructure. The tests ensure that the task properly sends +email verification codes for login with internationalization support and handles +various error scenarios in a real database environment. + +All tests use the testcontainers infrastructure to ensure proper database isolation +and realistic testing scenarios with actual PostgreSQL and Redis instances. +""" + +from unittest.mock import MagicMock, patch + +import pytest +from faker import Faker + +from libs.email_i18n import EmailType +from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole +from tasks.mail_email_code_login import send_email_code_login_mail_task + + +class TestSendEmailCodeLoginMailTask: + """ + Comprehensive integration tests for send_email_code_login_mail_task using testcontainers. + + This test class covers all major functionality of the email code login mail task: + - Successful email sending with different languages + - Email service integration and template rendering + - Error handling for various failure scenarios + - Performance metrics and logging verification + - Edge cases and boundary conditions + + All tests use the testcontainers infrastructure to ensure proper database isolation + and realistic testing environment with actual database interactions. + """ + + @pytest.fixture(autouse=True) + def cleanup_database(self, db_session_with_containers): + """Clean up database before each test to ensure isolation.""" + from extensions.ext_redis import redis_client + + # Clear all test data + db_session_with_containers.query(TenantAccountJoin).delete() + db_session_with_containers.query(Tenant).delete() + db_session_with_containers.query(Account).delete() + db_session_with_containers.commit() + + # Clear Redis cache + redis_client.flushdb() + + @pytest.fixture + def mock_external_service_dependencies(self): + """Mock setup for external service dependencies.""" + with ( + patch("tasks.mail_email_code_login.mail") as mock_mail, + patch("tasks.mail_email_code_login.get_email_i18n_service") as mock_email_service, + ): + # Setup default mock returns + mock_mail.is_inited.return_value = True + + # Mock email service + mock_email_service_instance = MagicMock() + mock_email_service_instance.send_email.return_value = None + mock_email_service.return_value = mock_email_service_instance + + yield { + "mail": mock_mail, + "email_service": mock_email_service, + "email_service_instance": mock_email_service_instance, + } + + def _create_test_account(self, db_session_with_containers, fake=None): + """ + Helper method to create a test account for testing. + + Args: + db_session_with_containers: Database session from testcontainers infrastructure + fake: Faker instance for generating test data + + Returns: + Account: Created account instance + """ + if fake is None: + fake = Faker() + + # Create account + account = Account( + email=fake.email(), + name=fake.name(), + interface_language="en-US", + status="active", + ) + + db_session_with_containers.add(account) + db_session_with_containers.commit() + + return account + + def _create_test_tenant_and_account(self, db_session_with_containers, fake=None): + """ + Helper method to create a test tenant and account for testing. + + Args: + db_session_with_containers: Database session from testcontainers infrastructure + fake: Faker instance for generating test data + + Returns: + tuple: (Account, Tenant) created instances + """ + if fake is None: + fake = Faker() + + # Create account using the existing helper method + account = self._create_test_account(db_session_with_containers, fake) + + # Create tenant + tenant = Tenant( + name=fake.company(), + plan="basic", + status="active", + ) + + db_session_with_containers.add(tenant) + db_session_with_containers.commit() + + # Create tenant-account relationship + tenant_account_join = TenantAccountJoin( + tenant_id=tenant.id, + account_id=account.id, + role=TenantAccountRole.OWNER, + ) + + db_session_with_containers.add(tenant_account_join) + db_session_with_containers.commit() + + return account, tenant + + def test_send_email_code_login_mail_task_success_english( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test successful email code login mail sending in English. + + This test verifies that the task can successfully: + 1. Send email code login mail with English language + 2. Use proper email service integration + 3. Pass correct template context to email service + 4. Log performance metrics correctly + 5. Complete task execution without errors + """ + # Arrange: Setup test data + fake = Faker() + test_email = fake.email() + test_code = "123456" + test_language = "en-US" + + # Act: Execute the task + send_email_code_login_mail_task( + language=test_language, + to=test_email, + code=test_code, + ) + + # Assert: Verify expected outcomes + mock_mail = mock_external_service_dependencies["mail"] + mock_email_service_instance = mock_external_service_dependencies["email_service_instance"] + + # Verify mail service was checked for initialization + mock_mail.is_inited.assert_called_once() + + # Verify email service was called with correct parameters + mock_email_service_instance.send_email.assert_called_once_with( + email_type=EmailType.EMAIL_CODE_LOGIN, + language_code=test_language, + to=test_email, + template_context={ + "to": test_email, + "code": test_code, + }, + ) + + def test_send_email_code_login_mail_task_success_chinese( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test successful email code login mail sending in Chinese. + + This test verifies that the task can successfully: + 1. Send email code login mail with Chinese language + 2. Handle different language codes properly + 3. Use correct template context for Chinese emails + 4. Complete task execution without errors + """ + # Arrange: Setup test data + fake = Faker() + test_email = fake.email() + test_code = "789012" + test_language = "zh-Hans" + + # Act: Execute the task + send_email_code_login_mail_task( + language=test_language, + to=test_email, + code=test_code, + ) + + # Assert: Verify expected outcomes + mock_email_service_instance = mock_external_service_dependencies["email_service_instance"] + + # Verify email service was called with Chinese language + mock_email_service_instance.send_email.assert_called_once_with( + email_type=EmailType.EMAIL_CODE_LOGIN, + language_code=test_language, + to=test_email, + template_context={ + "to": test_email, + "code": test_code, + }, + ) + + def test_send_email_code_login_mail_task_success_multiple_languages( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test successful email code login mail sending with multiple languages. + + This test verifies that the task can successfully: + 1. Handle various language codes correctly + 2. Send emails with different language configurations + 3. Maintain proper template context for each language + 4. Complete multiple task executions without conflicts + """ + # Arrange: Setup test data + fake = Faker() + test_languages = ["en-US", "zh-Hans", "zh-CN", "ja-JP", "ko-KR"] + test_emails = [fake.email() for _ in test_languages] + test_codes = [fake.numerify("######") for _ in test_languages] + + # Act: Execute the task for each language + for i, language in enumerate(test_languages): + send_email_code_login_mail_task( + language=language, + to=test_emails[i], + code=test_codes[i], + ) + + # Assert: Verify expected outcomes + mock_email_service_instance = mock_external_service_dependencies["email_service_instance"] + + # Verify email service was called for each language + assert mock_email_service_instance.send_email.call_count == len(test_languages) + + # Verify each call had correct parameters + for i, language in enumerate(test_languages): + call_args = mock_email_service_instance.send_email.call_args_list[i] + assert call_args[1]["email_type"] == EmailType.EMAIL_CODE_LOGIN + assert call_args[1]["language_code"] == language + assert call_args[1]["to"] == test_emails[i] + assert call_args[1]["template_context"]["code"] == test_codes[i] + + def test_send_email_code_login_mail_task_mail_not_initialized( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test email code login mail task when mail service is not initialized. + + This test verifies that the task can properly: + 1. Check mail service initialization status + 2. Return early when mail is not initialized + 3. Not attempt to send email when service is unavailable + 4. Handle gracefully without errors + """ + # Arrange: Setup test data + fake = Faker() + test_email = fake.email() + test_code = "123456" + test_language = "en-US" + + # Mock mail service as not initialized + mock_mail = mock_external_service_dependencies["mail"] + mock_mail.is_inited.return_value = False + + # Act: Execute the task + send_email_code_login_mail_task( + language=test_language, + to=test_email, + code=test_code, + ) + + # Assert: Verify expected outcomes + mock_email_service_instance = mock_external_service_dependencies["email_service_instance"] + + # Verify mail service was checked for initialization + mock_mail.is_inited.assert_called_once() + + # Verify email service was not called + mock_email_service_instance.send_email.assert_not_called() + + def test_send_email_code_login_mail_task_email_service_exception( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test email code login mail task when email service raises an exception. + + This test verifies that the task can properly: + 1. Handle email service exceptions gracefully + 2. Log appropriate error messages + 3. Continue execution without crashing + 4. Maintain proper error handling + """ + # Arrange: Setup test data + fake = Faker() + test_email = fake.email() + test_code = "123456" + test_language = "en-US" + + # Mock email service to raise an exception + mock_email_service_instance = mock_external_service_dependencies["email_service_instance"] + mock_email_service_instance.send_email.side_effect = Exception("Email service unavailable") + + # Act: Execute the task - it should handle the exception gracefully + send_email_code_login_mail_task( + language=test_language, + to=test_email, + code=test_code, + ) + + # Assert: Verify expected outcomes + mock_mail = mock_external_service_dependencies["mail"] + mock_email_service_instance = mock_external_service_dependencies["email_service_instance"] + + # Verify mail service was checked for initialization + mock_mail.is_inited.assert_called_once() + + # Verify email service was called (and failed) + mock_email_service_instance.send_email.assert_called_once_with( + email_type=EmailType.EMAIL_CODE_LOGIN, + language_code=test_language, + to=test_email, + template_context={ + "to": test_email, + "code": test_code, + }, + ) + + def test_send_email_code_login_mail_task_invalid_parameters( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test email code login mail task with invalid parameters. + + This test verifies that the task can properly: + 1. Handle empty or None email addresses + 2. Process empty or None verification codes + 3. Handle invalid language codes + 4. Maintain proper error handling for invalid inputs + """ + # Arrange: Setup test data + fake = Faker() + test_language = "en-US" + + # Test cases for invalid parameters + invalid_test_cases = [ + {"email": "", "code": "123456", "description": "empty email"}, + {"email": None, "code": "123456", "description": "None email"}, + {"email": fake.email(), "code": "", "description": "empty code"}, + {"email": fake.email(), "code": None, "description": "None code"}, + {"email": "invalid-email", "code": "123456", "description": "invalid email format"}, + ] + + for test_case in invalid_test_cases: + # Reset mocks for each test case + mock_email_service_instance = mock_external_service_dependencies["email_service_instance"] + mock_email_service_instance.reset_mock() + + # Act: Execute the task with invalid parameters + send_email_code_login_mail_task( + language=test_language, + to=test_case["email"], + code=test_case["code"], + ) + + # Assert: Verify that email service was still called + # The task should pass parameters to email service as-is + # and let the email service handle validation + mock_email_service_instance.send_email.assert_called_once() + + def test_send_email_code_login_mail_task_edge_cases( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test email code login mail task with edge cases and boundary conditions. + + This test verifies that the task can properly: + 1. Handle very long email addresses + 2. Process very long verification codes + 3. Handle special characters in parameters + 4. Process extreme language codes + """ + # Arrange: Setup test data + fake = Faker() + test_language = "en-US" + + # Edge case test data + edge_cases = [ + { + "email": "a" * 100 + "@example.com", # Very long email + "code": "1" * 20, # Very long code + "description": "very long email and code", + }, + { + "email": "test+tag@example.com", # Email with special characters + "code": "123-456", # Code with special characters + "description": "special characters", + }, + { + "email": "test@sub.domain.example.com", # Complex domain + "code": "000000", # All zeros + "description": "complex domain and all zeros code", + }, + { + "email": "test@example.co.uk", # International domain + "code": "999999", # All nines + "description": "international domain and all nines code", + }, + ] + + for test_case in edge_cases: + # Reset mocks for each test case + mock_email_service_instance = mock_external_service_dependencies["email_service_instance"] + mock_email_service_instance.reset_mock() + + # Act: Execute the task with edge case data + send_email_code_login_mail_task( + language=test_language, + to=test_case["email"], + code=test_case["code"], + ) + + # Assert: Verify that email service was called with edge case data + mock_email_service_instance.send_email.assert_called_once_with( + email_type=EmailType.EMAIL_CODE_LOGIN, + language_code=test_language, + to=test_case["email"], + template_context={ + "to": test_case["email"], + "code": test_case["code"], + }, + ) + + def test_send_email_code_login_mail_task_database_integration( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test email code login mail task with database integration. + + This test verifies that the task can properly: + 1. Work with real database connections + 2. Handle database session management + 3. Maintain proper database state + 4. Complete without database-related errors + """ + # Arrange: Setup test data with database + fake = Faker() + account, tenant = self._create_test_tenant_and_account(db_session_with_containers, fake) + + test_email = account.email + test_code = "123456" + test_language = "en-US" + + # Act: Execute the task + send_email_code_login_mail_task( + language=test_language, + to=test_email, + code=test_code, + ) + + # Assert: Verify expected outcomes + mock_email_service_instance = mock_external_service_dependencies["email_service_instance"] + + # Verify email service was called with database account email + mock_email_service_instance.send_email.assert_called_once_with( + email_type=EmailType.EMAIL_CODE_LOGIN, + language_code=test_language, + to=test_email, + template_context={ + "to": test_email, + "code": test_code, + }, + ) + + # Verify database state is maintained + db_session_with_containers.refresh(account) + assert account.email == test_email + assert account.status == "active" + + def test_send_email_code_login_mail_task_redis_integration( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test email code login mail task with Redis integration. + + This test verifies that the task can properly: + 1. Work with Redis cache connections + 2. Handle Redis operations without errors + 3. Maintain proper cache state + 4. Complete without Redis-related errors + """ + # Arrange: Setup test data + fake = Faker() + test_email = fake.email() + test_code = "123456" + test_language = "en-US" + + # Setup Redis cache data + from extensions.ext_redis import redis_client + + cache_key = f"email_code_login_test_{test_email}" + redis_client.set(cache_key, "test_value", ex=300) + + # Act: Execute the task + send_email_code_login_mail_task( + language=test_language, + to=test_email, + code=test_code, + ) + + # Assert: Verify expected outcomes + mock_email_service_instance = mock_external_service_dependencies["email_service_instance"] + + # Verify email service was called + mock_email_service_instance.send_email.assert_called_once() + + # Verify Redis cache is still accessible + assert redis_client.exists(cache_key) == 1 + assert redis_client.get(cache_key) == b"test_value" + + # Clean up Redis cache + redis_client.delete(cache_key) + + def test_send_email_code_login_mail_task_error_handling_comprehensive( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test comprehensive error handling for email code login mail task. + + This test verifies that the task can properly: + 1. Handle various types of exceptions + 2. Log appropriate error messages + 3. Continue execution despite errors + 4. Maintain proper error reporting + """ + # Arrange: Setup test data + fake = Faker() + test_email = fake.email() + test_code = "123456" + test_language = "en-US" + + # Test different exception types + exception_types = [ + ("ValueError", ValueError("Invalid email format")), + ("RuntimeError", RuntimeError("Service unavailable")), + ("ConnectionError", ConnectionError("Network error")), + ("TimeoutError", TimeoutError("Request timeout")), + ("Exception", Exception("Generic error")), + ] + + for error_name, exception in exception_types: + # Reset mocks for each test case + mock_email_service_instance = mock_external_service_dependencies["email_service_instance"] + mock_email_service_instance.reset_mock() + mock_email_service_instance.send_email.side_effect = exception + + # Mock logging to capture error messages + with patch("tasks.mail_email_code_login.logger") as mock_logger: + # Act: Execute the task - it should handle the exception gracefully + send_email_code_login_mail_task( + language=test_language, + to=test_email, + code=test_code, + ) + + # Assert: Verify error handling + # Verify email service was called (and failed) + mock_email_service_instance.send_email.assert_called_once() + + # Verify error was logged + error_calls = [ + call + for call in mock_logger.exception.call_args_list + if f"Send email code login mail to {test_email} failed" in str(call) + ] + # Check if any exception call was made (the exact message format may vary) + assert mock_logger.exception.call_count >= 1, f"Error should be logged for {error_name}" + + # Reset side effect for next iteration + mock_email_service_instance.send_email.side_effect = None From b1d189324ab22c95758cf1073b494e9e479167bc Mon Sep 17 00:00:00 2001 From: NeatGuyCoding <15627489+NeatGuyCoding@users.noreply.github.com> Date: Sun, 5 Oct 2025 12:47:17 +0800 Subject: [PATCH 03/31] Feature add test containers mail account deletion task 1858 (#26555) Signed-off-by: NeatGuyCoding <15627489+NeatGuyCoding@users.noreply.github.com> Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> --- .../tasks/test_clean_dataset_task.py | 127 --------- .../test_enable_segments_to_index_task.py | 55 ---- .../tasks/test_mail_account_deletion_task.py | 242 ++++++++++++++++++ 3 files changed, 242 insertions(+), 182 deletions(-) create mode 100644 api/tests/test_containers_integration_tests/tasks/test_mail_account_deletion_task.py diff --git a/api/tests/test_containers_integration_tests/tasks/test_clean_dataset_task.py b/api/tests/test_containers_integration_tests/tasks/test_clean_dataset_task.py index e0c2da63b9..99061d215f 100644 --- a/api/tests/test_containers_integration_tests/tasks/test_clean_dataset_task.py +++ b/api/tests/test_containers_integration_tests/tasks/test_clean_dataset_task.py @@ -784,133 +784,6 @@ class TestCleanDatasetTask: print(f"Total cleanup time: {cleanup_duration:.3f} seconds") print(f"Average time per document: {cleanup_duration / len(documents):.3f} seconds") - def test_clean_dataset_task_concurrent_cleanup_scenarios( - self, db_session_with_containers, mock_external_service_dependencies - ): - """ - Test dataset cleanup with concurrent cleanup scenarios and race conditions. - - This test verifies that the task can properly: - 1. Handle multiple cleanup operations on the same dataset - 2. Prevent data corruption during concurrent access - 3. Maintain data consistency across multiple cleanup attempts - 4. Handle race conditions gracefully - 5. Ensure idempotent cleanup operations - """ - # Create test data - account, tenant = self._create_test_account_and_tenant(db_session_with_containers) - dataset = self._create_test_dataset(db_session_with_containers, account, tenant) - document = self._create_test_document(db_session_with_containers, account, tenant, dataset) - segment = self._create_test_segment(db_session_with_containers, account, tenant, dataset, document) - upload_file = self._create_test_upload_file(db_session_with_containers, account, tenant) - - # Update document with file reference - import json - - document.data_source_info = json.dumps({"upload_file_id": upload_file.id}) - from extensions.ext_database import db - - db.session.commit() - - # Save IDs for verification - dataset_id = dataset.id - tenant_id = tenant.id - upload_file_id = upload_file.id - - # Mock storage to simulate slow operations - mock_storage = mock_external_service_dependencies["storage"] - original_delete = mock_storage.delete - - def slow_delete(key): - import time - - time.sleep(0.1) # Simulate slow storage operation - return original_delete(key) - - mock_storage.delete.side_effect = slow_delete - - # Execute multiple cleanup operations concurrently - import threading - - cleanup_results = [] - cleanup_errors = [] - - def run_cleanup(): - try: - clean_dataset_task( - dataset_id=dataset_id, - tenant_id=tenant_id, - indexing_technique="high_quality", - index_struct='{"type": "paragraph"}', - collection_binding_id=str(uuid.uuid4()), - doc_form="paragraph_index", - ) - cleanup_results.append("success") - except Exception as e: - cleanup_errors.append(str(e)) - - # Start multiple cleanup threads - threads = [] - for i in range(3): - thread = threading.Thread(target=run_cleanup) - threads.append(thread) - thread.start() - - # Wait for all threads to complete - for thread in threads: - thread.join() - - # Verify results - # Check that all documents were deleted (only once) - remaining_documents = db.session.query(Document).filter_by(dataset_id=dataset_id).all() - assert len(remaining_documents) == 0 - - # Check that all segments were deleted (only once) - remaining_segments = db.session.query(DocumentSegment).filter_by(dataset_id=dataset_id).all() - assert len(remaining_segments) == 0 - - # Check that upload file was deleted (only once) - # Note: In concurrent scenarios, the first thread deletes documents and segments, - # subsequent threads may not find the related data to clean up upload files - # This demonstrates the idempotent nature of the cleanup process - remaining_files = db.session.query(UploadFile).filter_by(id=upload_file_id).all() - # The upload file should be deleted by the first successful cleanup operation - # However, in concurrent scenarios, this may not always happen due to race conditions - # This test demonstrates the idempotent nature of the cleanup process - if len(remaining_files) > 0: - print(f"Warning: Upload file {upload_file_id} was not deleted in concurrent scenario") - print("This is expected behavior demonstrating the idempotent nature of cleanup") - # We don't assert here as the behavior depends on timing and race conditions - - # Verify that storage.delete was called (may be called multiple times in concurrent scenarios) - # In concurrent scenarios, storage operations may be called multiple times due to race conditions - assert mock_storage.delete.call_count > 0 - - # Verify that index processor was called (may be called multiple times in concurrent scenarios) - mock_index_processor = mock_external_service_dependencies["index_processor"] - assert mock_index_processor.clean.call_count > 0 - - # Check cleanup results - assert len(cleanup_results) == 3, "All cleanup operations should complete" - assert len(cleanup_errors) == 0, "No cleanup errors should occur" - - # Verify idempotency by running cleanup again on the same dataset - # This should not perform any additional operations since data is already cleaned - clean_dataset_task( - dataset_id=dataset_id, - tenant_id=tenant_id, - indexing_technique="high_quality", - index_struct='{"type": "paragraph"}', - collection_binding_id=str(uuid.uuid4()), - doc_form="paragraph_index", - ) - - # Verify that no additional storage operations were performed - # Note: In concurrent scenarios, the exact count may vary due to race conditions - print(f"Final storage delete calls: {mock_storage.delete.call_count}") - print(f"Final index processor calls: {mock_index_processor.clean.call_count}") - print("Note: Multiple calls in concurrent scenarios are expected due to race conditions") - def test_clean_dataset_task_storage_exception_handling( self, db_session_with_containers, mock_external_service_dependencies ): diff --git a/api/tests/test_containers_integration_tests/tasks/test_enable_segments_to_index_task.py b/api/tests/test_containers_integration_tests/tasks/test_enable_segments_to_index_task.py index 0c03828ec5..38056496e7 100644 --- a/api/tests/test_containers_integration_tests/tasks/test_enable_segments_to_index_task.py +++ b/api/tests/test_containers_integration_tests/tasks/test_enable_segments_to_index_task.py @@ -148,61 +148,6 @@ class TestEnableSegmentsToIndexTask: db.session.commit() return segments - def test_enable_segments_to_index_success(self, db_session_with_containers, mock_external_service_dependencies): - """ - Test successful segments indexing with paragraph index type. - - This test verifies: - - Proper dataset and document retrieval from database - - Correct segment processing and document creation - - Index processor integration - - Database state updates - - Redis cache key deletion - """ - # Arrange: Create test data - dataset, document = self._create_test_dataset_and_document( - db_session_with_containers, mock_external_service_dependencies - ) - segments = self._create_test_segments(db_session_with_containers, document, dataset) - - # Set up Redis cache keys to simulate indexing in progress - segment_ids = [segment.id for segment in segments] - for segment in segments: - indexing_cache_key = f"segment_{segment.id}_indexing" - redis_client.set(indexing_cache_key, "processing", ex=300) # 5 minutes expiry - - # Verify cache keys exist - for segment in segments: - indexing_cache_key = f"segment_{segment.id}_indexing" - assert redis_client.exists(indexing_cache_key) == 1 - - # Act: Execute the task - enable_segments_to_index_task(segment_ids, dataset.id, document.id) - - # Assert: Verify the expected outcomes - # Verify index processor was called correctly - mock_external_service_dependencies["index_processor_factory"].assert_called_once_with(IndexType.PARAGRAPH_INDEX) - mock_external_service_dependencies["index_processor"].load.assert_called_once() - - # Verify the load method was called with correct parameters - call_args = mock_external_service_dependencies["index_processor"].load.call_args - assert call_args is not None - documents = call_args[0][1] # Second argument should be documents list - assert len(documents) == 3 - - # Verify document structure - for i, doc in enumerate(documents): - assert doc.page_content == segments[i].content - assert doc.metadata["doc_id"] == segments[i].index_node_id - assert doc.metadata["doc_hash"] == segments[i].index_node_hash - assert doc.metadata["document_id"] == document.id - assert doc.metadata["dataset_id"] == dataset.id - - # Verify Redis cache keys were deleted - for segment in segments: - indexing_cache_key = f"segment_{segment.id}_indexing" - assert redis_client.exists(indexing_cache_key) == 0 - def test_enable_segments_to_index_with_different_index_type( self, db_session_with_containers, mock_external_service_dependencies ): diff --git a/api/tests/test_containers_integration_tests/tasks/test_mail_account_deletion_task.py b/api/tests/test_containers_integration_tests/tasks/test_mail_account_deletion_task.py new file mode 100644 index 0000000000..2f38246787 --- /dev/null +++ b/api/tests/test_containers_integration_tests/tasks/test_mail_account_deletion_task.py @@ -0,0 +1,242 @@ +from unittest.mock import MagicMock, patch + +import pytest +from faker import Faker + +from extensions.ext_database import db +from libs.email_i18n import EmailType +from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole +from tasks.mail_account_deletion_task import send_account_deletion_verification_code, send_deletion_success_task + + +class TestMailAccountDeletionTask: + """Integration tests for mail account deletion tasks using testcontainers.""" + + @pytest.fixture + def mock_external_service_dependencies(self): + """Mock setup for external service dependencies.""" + with ( + patch("tasks.mail_account_deletion_task.mail") as mock_mail, + patch("tasks.mail_account_deletion_task.get_email_i18n_service") as mock_get_email_service, + ): + # Setup mock mail service + mock_mail.is_inited.return_value = True + + # Setup mock email service + mock_email_service = MagicMock() + mock_get_email_service.return_value = mock_email_service + + yield { + "mail": mock_mail, + "get_email_service": mock_get_email_service, + "email_service": mock_email_service, + } + + def _create_test_account(self, db_session_with_containers): + """ + Helper method to create a test account for testing. + + Args: + db_session_with_containers: Database session from testcontainers infrastructure + + Returns: + Account: Created account instance + """ + fake = Faker() + + # Create account + account = Account( + email=fake.email(), + name=fake.name(), + interface_language="en-US", + status="active", + ) + db.session.add(account) + db.session.commit() + + # Create tenant + tenant = Tenant( + name=fake.company(), + status="normal", + ) + db.session.add(tenant) + db.session.commit() + + # Create tenant-account join + join = TenantAccountJoin( + tenant_id=tenant.id, + account_id=account.id, + role=TenantAccountRole.OWNER.value, + current=True, + ) + db.session.add(join) + db.session.commit() + + return account + + def test_send_deletion_success_task_success(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test successful account deletion success email sending. + + This test verifies: + - Proper email service initialization check + - Correct email service method calls + - Template context is properly formatted + - Email type is correctly specified + """ + # Arrange: Create test data + account = self._create_test_account(db_session_with_containers) + test_email = account.email + test_language = "en-US" + + # Act: Execute the task + send_deletion_success_task(test_email, test_language) + + # Assert: Verify the expected outcomes + # Verify mail service was checked + mock_external_service_dependencies["mail"].is_inited.assert_called_once() + + # Verify email service was retrieved + mock_external_service_dependencies["get_email_service"].assert_called_once() + + # Verify email was sent with correct parameters + mock_external_service_dependencies["email_service"].send_email.assert_called_once_with( + email_type=EmailType.ACCOUNT_DELETION_SUCCESS, + language_code=test_language, + to=test_email, + template_context={ + "to": test_email, + "email": test_email, + }, + ) + + def test_send_deletion_success_task_mail_not_initialized( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test account deletion success email when mail service is not initialized. + + This test verifies: + - Early return when mail service is not initialized + - No email service calls are made + - No exceptions are raised + """ + # Arrange: Setup mail service to return not initialized + mock_external_service_dependencies["mail"].is_inited.return_value = False + account = self._create_test_account(db_session_with_containers) + test_email = account.email + + # Act: Execute the task + send_deletion_success_task(test_email) + + # Assert: Verify no email service calls were made + mock_external_service_dependencies["get_email_service"].assert_not_called() + mock_external_service_dependencies["email_service"].send_email.assert_not_called() + + def test_send_deletion_success_task_email_service_exception( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test account deletion success email when email service raises exception. + + This test verifies: + - Exception is properly caught and logged + - Task completes without raising exception + - Error logging is recorded + """ + # Arrange: Setup email service to raise exception + mock_external_service_dependencies["email_service"].send_email.side_effect = Exception("Email service failed") + account = self._create_test_account(db_session_with_containers) + test_email = account.email + + # Act: Execute the task (should not raise exception) + send_deletion_success_task(test_email) + + # Assert: Verify email service was called but exception was handled + mock_external_service_dependencies["email_service"].send_email.assert_called_once() + + def test_send_account_deletion_verification_code_success( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test successful account deletion verification code email sending. + + This test verifies: + - Proper email service initialization check + - Correct email service method calls + - Template context includes verification code + - Email type is correctly specified + """ + # Arrange: Create test data + account = self._create_test_account(db_session_with_containers) + test_email = account.email + test_code = "123456" + test_language = "en-US" + + # Act: Execute the task + send_account_deletion_verification_code(test_email, test_code, test_language) + + # Assert: Verify the expected outcomes + # Verify mail service was checked + mock_external_service_dependencies["mail"].is_inited.assert_called_once() + + # Verify email service was retrieved + mock_external_service_dependencies["get_email_service"].assert_called_once() + + # Verify email was sent with correct parameters + mock_external_service_dependencies["email_service"].send_email.assert_called_once_with( + email_type=EmailType.ACCOUNT_DELETION_VERIFICATION, + language_code=test_language, + to=test_email, + template_context={ + "to": test_email, + "code": test_code, + }, + ) + + def test_send_account_deletion_verification_code_mail_not_initialized( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test account deletion verification code email when mail service is not initialized. + + This test verifies: + - Early return when mail service is not initialized + - No email service calls are made + - No exceptions are raised + """ + # Arrange: Setup mail service to return not initialized + mock_external_service_dependencies["mail"].is_inited.return_value = False + account = self._create_test_account(db_session_with_containers) + test_email = account.email + test_code = "123456" + + # Act: Execute the task + send_account_deletion_verification_code(test_email, test_code) + + # Assert: Verify no email service calls were made + mock_external_service_dependencies["get_email_service"].assert_not_called() + mock_external_service_dependencies["email_service"].send_email.assert_not_called() + + def test_send_account_deletion_verification_code_email_service_exception( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test account deletion verification code email when email service raises exception. + + This test verifies: + - Exception is properly caught and logged + - Task completes without raising exception + - Error logging is recorded + """ + # Arrange: Setup email service to raise exception + mock_external_service_dependencies["email_service"].send_email.side_effect = Exception("Email service failed") + account = self._create_test_account(db_session_with_containers) + test_email = account.email + test_code = "123456" + + # Act: Execute the task (should not raise exception) + send_account_deletion_verification_code(test_email, test_code) + + # Assert: Verify email service was called but exception was handled + mock_external_service_dependencies["email_service"].send_email.assert_called_once() From 7b7d33223926cc0cfefc8d0ea32e10aebf262e54 Mon Sep 17 00:00:00 2001 From: lyzno1 <92089059+lyzno1@users.noreply.github.com> Date: Sun, 5 Oct 2025 12:47:32 +0800 Subject: [PATCH 04/31] refactor(docs): Reorganize documentation with standard language codes (#26534) --- README.md | 24 +++++----- README/README_AR.md => docs/ar-SA/README.md | 40 +++++++---------- README/README_BN.md => docs/bn-BD/README.md | 35 ++++++++------- .../de-DE/CONTRIBUTING.md | 2 +- README/README_DE.md => docs/de-DE/README.md | 37 ++++++++-------- .../es-ES/CONTRIBUTING.md | 2 +- README/README_ES.md => docs/es-ES/README.md | 44 ++++++++----------- .../fr-FR/CONTRIBUTING.md | 2 +- README/README_FR.md => docs/fr-FR/README.md | 44 ++++++++----------- .../ja-JP/CONTRIBUTING.md | 2 +- README/README_JA.md => docs/ja-JP/README.md | 36 ++++++++------- .../ko-KR/CONTRIBUTING.md | 2 +- README/README_KR.md => docs/ko-KR/README.md | 36 ++++++++------- .../pt-BR/CONTRIBUTING.md | 2 +- README/README_PT.md => docs/pt-BR/README.md | 38 ++++++++-------- README/README_SI.md => docs/sl-SI/README.md | 32 +++++++------- README/README_KL.md => docs/tlh/README.md | 37 ++++++++-------- .../tr-TR/CONTRIBUTING.md | 2 +- README/README_TR.md => docs/tr-TR/README.md | 36 ++++++++------- .../vi-VN/CONTRIBUTING.md | 2 +- README/README_VI.md => docs/vi-VN/README.md | 36 ++++++++------- .../zh-CN/CONTRIBUTING.md | 2 +- README/README_CN.md => docs/zh-CN/README.md | 36 ++++++++------- .../zh-TW/CONTRIBUTING.md | 2 +- README/README_TW.md => docs/zh-TW/README.md | 36 +++++++-------- 25 files changed, 282 insertions(+), 285 deletions(-) rename README/README_AR.md => docs/ar-SA/README.md (81%) rename README/README_BN.md => docs/bn-BD/README.md (85%) rename CONTRIBUTING/CONTRIBUTING_DE.md => docs/de-DE/CONTRIBUTING.md (96%) rename README/README_DE.md => docs/de-DE/README.md (80%) rename CONTRIBUTING/CONTRIBUTING_ES.md => docs/es-ES/CONTRIBUTING.md (96%) rename README/README_ES.md => docs/es-ES/README.md (79%) rename CONTRIBUTING/CONTRIBUTING_FR.md => docs/fr-FR/CONTRIBUTING.md (96%) rename README/README_FR.md => docs/fr-FR/README.md (79%) rename CONTRIBUTING/CONTRIBUTING_JA.md => docs/ja-JP/CONTRIBUTING.md (96%) rename README/README_JA.md => docs/ja-JP/README.md (79%) rename CONTRIBUTING/CONTRIBUTING_KR.md => docs/ko-KR/CONTRIBUTING.md (96%) rename README/README_KR.md => docs/ko-KR/README.md (79%) rename CONTRIBUTING/CONTRIBUTING_PT.md => docs/pt-BR/CONTRIBUTING.md (96%) rename README/README_PT.md => docs/pt-BR/README.md (78%) rename README/README_SI.md => docs/sl-SI/README.md (83%) rename README/README_KL.md => docs/tlh/README.md (79%) rename CONTRIBUTING/CONTRIBUTING_TR.md => docs/tr-TR/CONTRIBUTING.md (96%) rename README/README_TR.md => docs/tr-TR/README.md (79%) rename CONTRIBUTING/CONTRIBUTING_VI.md => docs/vi-VN/CONTRIBUTING.md (96%) rename README/README_VI.md => docs/vi-VN/README.md (80%) rename CONTRIBUTING/CONTRIBUTING_CN.md => docs/zh-CN/CONTRIBUTING.md (96%) rename README/README_CN.md => docs/zh-CN/README.md (79%) rename CONTRIBUTING/CONTRIBUTING_TW.md => docs/zh-TW/CONTRIBUTING.md (96%) rename README/README_TW.md => docs/zh-TW/README.md (80%) diff --git a/README.md b/README.md index 8159057f55..aadced582d 100644 --- a/README.md +++ b/README.md @@ -40,18 +40,18 @@

README in English - 繁體中文文件 - 简体中文版自述文件 - 日本語のREADME - README en Español - README en Français - README tlhIngan Hol - README in Korean - README بالعربية - Türkçe README - README Tiếng Việt - README in Deutsch - README in বাংলা + 繁體中文文件 + 简体中文文件 + 日本語のREADME + README en Español + README en Français + README tlhIngan Hol + README in Korean + README بالعربية + Türkçe README + README Tiếng Việt + README in Deutsch + README in বাংলা

Dify is an open-source platform for developing LLM applications. Its intuitive interface combines agentic AI workflows, RAG pipelines, agent capabilities, model management, observability features, and more—allowing you to quickly move from prototype to production. diff --git a/README/README_AR.md b/docs/ar-SA/README.md similarity index 81% rename from README/README_AR.md rename to docs/ar-SA/README.md index df29db73da..afa494c5d3 100644 --- a/README/README_AR.md +++ b/docs/ar-SA/README.md @@ -1,4 +1,4 @@ -![cover-v5-optimized](../images/GitHub_README_if.png) +![cover-v5-optimized](../../images/GitHub_README_if.png)

Dify Cloud · @@ -35,17 +35,19 @@

- README in English - 简体中文版自述文件 - 日本語のREADME - README en Español - README en Français - README tlhIngan Hol - README in Korean - README بالعربية - Türkçe README - README Tiếng Việt - README in বাংলা + README in English + 繁體中文文件 + 简体中文文件 + 日本語のREADME + README en Español + README en Français + README tlhIngan Hol + README in Korean + README بالعربية + Türkçe README + README Tiếng Việt + README in Deutsch + README in বাংলা

@@ -97,7 +99,7 @@
-أسهل طريقة لبدء تشغيل خادم Dify هي تشغيل ملف [docker-compose.yml](docker/docker-compose.yaml) الخاص بنا. قبل تشغيل أمر التثبيت، تأكد من تثبيت [Docker](https://docs.docker.com/get-docker/) و [Docker Compose](https://docs.docker.com/compose/install/) على جهازك: +أسهل طريقة لبدء تشغيل خادم Dify هي تشغيل ملف [docker-compose.yml](../../docker/docker-compose.yaml) الخاص بنا. قبل تشغيل أمر التثبيت، تأكد من تثبيت [Docker](https://docs.docker.com/get-docker/) و [Docker Compose](https://docs.docker.com/compose/install/) على جهازك: ```bash cd docker @@ -111,7 +113,7 @@ docker compose up -d ## الخطوات التالية -إذا كنت بحاجة إلى تخصيص الإعدادات، فيرجى الرجوع إلى التعليقات في ملف [.env.example](docker/.env.example) وتحديث القيم المقابلة في ملف `.env`. بالإضافة إلى ذلك، قد تحتاج إلى إجراء تعديلات على ملف `docker-compose.yaml` نفسه، مثل تغيير إصدارات الصور أو تعيينات المنافذ أو نقاط تحميل وحدات التخزين، بناءً على بيئة النشر ومتطلباتك الخاصة. بعد إجراء أي تغييرات، يرجى إعادة تشغيل `docker-compose up -d`. يمكنك العثور على قائمة كاملة بمتغيرات البيئة المتاحة [هنا](https://docs.dify.ai/getting-started/install-self-hosted/environments). +إذا كنت بحاجة إلى تخصيص الإعدادات، فيرجى الرجوع إلى التعليقات في ملف [.env.example](../../docker/.env.example) وتحديث القيم المقابلة في ملف `.env`. بالإضافة إلى ذلك، قد تحتاج إلى إجراء تعديلات على ملف `docker-compose.yaml` نفسه، مثل تغيير إصدارات الصور أو تعيينات المنافذ أو نقاط تحميل وحدات التخزين، بناءً على بيئة النشر ومتطلباتك الخاصة. بعد إجراء أي تغييرات، يرجى إعادة تشغيل `docker-compose up -d`. يمكنك العثور على قائمة كاملة بمتغيرات البيئة المتاحة [هنا](https://docs.dify.ai/getting-started/install-self-hosted/environments). يوجد مجتمع خاص بـ [Helm Charts](https://helm.sh/) وملفات YAML التي تسمح بتنفيذ Dify على Kubernetes للنظام من الإيجابيات العلوية. @@ -185,12 +187,4 @@ docker compose up -d ## الرخصة -هذا المستودع متاح تحت [رخصة البرنامج الحر Dify](../LICENSE)، والتي تعتبر بشكل أساسي Apache 2.0 مع بعض القيود الإضافية. - -## الكشف عن الأمان - -لحماية خصوصيتك، يرجى تجنب نشر مشكلات الأمان على GitHub. بدلاً من ذلك، أرسل أسئلتك إلى وسنقدم لك إجابة أكثر تفصيلاً. - -## الرخصة - -هذا المستودع متاح تحت [رخصة البرنامج الحر Dify](../LICENSE)، والتي تعتبر بشكل أساسي Apache 2.0 مع بعض القيود الإضافية. +هذا المستودع متاح تحت [رخصة البرنامج الحر Dify](../../LICENSE)، والتي تعتبر بشكل أساسي Apache 2.0 مع بعض القيود الإضافية. diff --git a/README/README_BN.md b/docs/bn-BD/README.md similarity index 85% rename from README/README_BN.md rename to docs/bn-BD/README.md index b0a64a6cfe..318853a8de 100644 --- a/README/README_BN.md +++ b/docs/bn-BD/README.md @@ -1,4 +1,4 @@ -![cover-v5-optimized](../images/GitHub_README_if.png) +![cover-v5-optimized](../../images/GitHub_README_if.png)

📌 ডিফাই ওয়ার্কফ্লো ফাইল আপলোড পরিচিতি: গুগল নোটবুক-এলএম পডকাস্ট পুনর্নির্মাণ @@ -39,18 +39,19 @@

- README in English - 简体中文版自述文件 - 日本語のREADME - README en Español - README en Français - README tlhIngan Hol - README in Korean - README بالعربية - Türkçe README - README Tiếng Việt - README in Deutsch - README in বাংলা + README in English + 繁體中文文件 + 简体中文文件 + 日本語のREADME + README en Español + README en Français + README tlhIngan Hol + README in Korean + README بالعربية + Türkçe README + README Tiếng Việt + README in Deutsch + README in বাংলা

ডিফাই একটি ওপেন-সোর্স LLM অ্যাপ ডেভেলপমেন্ট প্ল্যাটফর্ম। এটি ইন্টুইটিভ ইন্টারফেস, এজেন্টিক AI ওয়ার্কফ্লো, RAG পাইপলাইন, এজেন্ট ক্যাপাবিলিটি, মডেল ম্যানেজমেন্ট, মনিটরিং সুবিধা এবং আরও অনেক কিছু একত্রিত করে, যা দ্রুত প্রোটোটাইপ থেকে প্রোডাকশন পর্যন্ত নিয়ে যেতে সহায়তা করে। @@ -64,7 +65,7 @@
-ডিফাই সার্ভার চালু করার সবচেয়ে সহজ উপায় [docker compose](docker/docker-compose.yaml) মাধ্যমে। নিম্নলিখিত কমান্ডগুলো ব্যবহার করে ডিফাই চালানোর আগে, নিশ্চিত করুন যে আপনার মেশিনে [Docker](https://docs.docker.com/get-docker/) এবং [Docker Compose](https://docs.docker.com/compose/install/) ইনস্টল করা আছে : +ডিফাই সার্ভার চালু করার সবচেয়ে সহজ উপায় [docker compose](../../docker/docker-compose.yaml) মাধ্যমে। নিম্নলিখিত কমান্ডগুলো ব্যবহার করে ডিফাই চালানোর আগে, নিশ্চিত করুন যে আপনার মেশিনে [Docker](https://docs.docker.com/get-docker/) এবং [Docker Compose](https://docs.docker.com/compose/install/) ইনস্টল করা আছে : ```bash cd dify @@ -128,7 +129,7 @@ GitHub-এ ডিফাইকে স্টার দিয়ে রাখুন ## Advanced Setup -যদি আপনার কনফিগারেশনটি কাস্টমাইজ করার প্রয়োজন হয়, তাহলে অনুগ্রহ করে আমাদের [.env.example](docker/.env.example) ফাইল দেখুন এবং আপনার `.env` ফাইলে সংশ্লিষ্ট মানগুলি আপডেট করুন। এছাড়াও, আপনার নির্দিষ্ট এনভায়রনমেন্ট এবং প্রয়োজনীয়তার উপর ভিত্তি করে আপনাকে `docker-compose.yaml` ফাইলে সমন্বয় করতে হতে পারে, যেমন ইমেজ ভার্সন পরিবর্তন করা, পোর্ট ম্যাপিং করা, অথবা ভলিউম মাউন্ট করা। +যদি আপনার কনফিগারেশনটি কাস্টমাইজ করার প্রয়োজন হয়, তাহলে অনুগ্রহ করে আমাদের [.env.example](../../docker/.env.example) ফাইল দেখুন এবং আপনার `.env` ফাইলে সংশ্লিষ্ট মানগুলি আপডেট করুন। এছাড়াও, আপনার নির্দিষ্ট এনভায়রনমেন্ট এবং প্রয়োজনীয়তার উপর ভিত্তি করে আপনাকে `docker-compose.yaml` ফাইলে সমন্বয় করতে হতে পারে, যেমন ইমেজ ভার্সন পরিবর্তন করা, পোর্ট ম্যাপিং করা, অথবা ভলিউম মাউন্ট করা। যেকোনো পরিবর্তন করার পর, অনুগ্রহ করে `docker-compose up -d` পুনরায় চালান। ভেরিয়েবলের সম্পূর্ণ তালিকা [এখানে] (https://docs.dify.ai/getting-started/install-self-hosted/environments) খুঁজে পেতে পারেন। যদি আপনি একটি হাইলি এভেইলেবল সেটআপ কনফিগার করতে চান, তাহলে কমিউনিটি [Helm Charts](https://helm.sh/) এবং YAML ফাইল রয়েছে যা Dify কে Kubernetes-এ ডিপ্লয় করার প্রক্রিয়া বর্ণনা করে। @@ -175,7 +176,7 @@ GitHub-এ ডিফাইকে স্টার দিয়ে রাখুন ## Contributing -যারা কোড অবদান রাখতে চান, তাদের জন্য আমাদের [অবদান নির্দেশিকা] দেখুন (https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md)। +যারা কোড অবদান রাখতে চান, তাদের জন্য আমাদের [অবদান নির্দেশিকা](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) দেখুন। একই সাথে, সোশ্যাল মিডিয়া এবং ইভেন্ট এবং কনফারেন্সে এটি শেয়ার করে Dify কে সমর্থন করুন। > আমরা ম্যান্ডারিন বা ইংরেজি ছাড়া অন্য ভাষায় Dify অনুবাদ করতে সাহায্য করার জন্য অবদানকারীদের খুঁজছি। আপনি যদি সাহায্য করতে আগ্রহী হন, তাহলে আরও তথ্যের জন্য [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) দেখুন এবং আমাদের [ডিসকর্ড কমিউনিটি সার্ভার](https://discord.gg/8Tpq4AcN9c) এর `গ্লোবাল-ইউজারস` চ্যানেলে আমাদের একটি মন্তব্য করুন। @@ -203,4 +204,4 @@ GitHub-এ ডিফাইকে স্টার দিয়ে রাখুন ## লাইসেন্স -এই রিপোজিটরিটি [ডিফাই ওপেন সোর্স লাইসেন্স](../LICENSE) এর অধিনে , যা মূলত অ্যাপাচি ২.০, তবে কিছু অতিরিক্ত বিধিনিষেধ রয়েছে। +এই রিপোজিটরিটি [ডিফাই ওপেন সোর্স লাইসেন্স](../../LICENSE) এর অধিনে , যা মূলত অ্যাপাচি ২.০, তবে কিছু অতিরিক্ত বিধিনিষেধ রয়েছে। diff --git a/CONTRIBUTING/CONTRIBUTING_DE.md b/docs/de-DE/CONTRIBUTING.md similarity index 96% rename from CONTRIBUTING/CONTRIBUTING_DE.md rename to docs/de-DE/CONTRIBUTING.md index c9e52c4fd7..db12006b30 100644 --- a/CONTRIBUTING/CONTRIBUTING_DE.md +++ b/docs/de-DE/CONTRIBUTING.md @@ -6,7 +6,7 @@ Wir müssen wendig sein und schnell liefern, aber wir möchten auch sicherstelle Dieser Leitfaden ist, wie Dify selbst, in ständiger Entwicklung. Wir sind dankbar für Ihr Verständnis, falls er manchmal hinter dem eigentlichen Projekt zurückbleibt, und begrüßen jedes Feedback zur Verbesserung. -Bitte nehmen Sie sich einen Moment Zeit, um unsere [Lizenz- und Mitwirkungsvereinbarung](../LICENSE) zu lesen. Die Community hält sich außerdem an den [Verhaltenskodex](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md). +Bitte nehmen Sie sich einen Moment Zeit, um unsere [Lizenz- und Mitwirkungsvereinbarung](../../LICENSE) zu lesen. Die Community hält sich außerdem an den [Verhaltenskodex](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md). ## Bevor Sie loslegen diff --git a/README/README_DE.md b/docs/de-DE/README.md similarity index 80% rename from README/README_DE.md rename to docs/de-DE/README.md index d1a5837ab4..8907d914d3 100644 --- a/README/README_DE.md +++ b/docs/de-DE/README.md @@ -1,4 +1,4 @@ -![cover-v5-optimized](../images/GitHub_README_if.png) +![cover-v5-optimized](../../images/GitHub_README_if.png)

📌 Einführung in Dify Workflow File Upload: Google NotebookLM Podcast nachbilden @@ -39,18 +39,19 @@

- README in English - 简体中文版自述文件 - 日本語のREADME - README en Español - README en Français - README tlhIngan Hol - README in Korean - README بالعربية - Türkçe README - README Tiếng Việt - README in Deutsch - README in বাংলা + README in English + 繁體中文文件 + 简体中文文件 + 日本語のREADME + README en Español + README en Français + README tlhIngan Hol + README in Korean + README بالعربية + Türkçe README + README Tiếng Việt + README in Deutsch + README in বাংলা

Dify ist eine Open-Source-Plattform zur Entwicklung von LLM-Anwendungen. Ihre intuitive Benutzeroberfläche vereint agentenbasierte KI-Workflows, RAG-Pipelines, Agentenfunktionen, Modellverwaltung, Überwachungsfunktionen und mehr, sodass Sie schnell von einem Prototyp in die Produktion übergehen können. @@ -64,7 +65,7 @@ Dify ist eine Open-Source-Plattform zur Entwicklung von LLM-Anwendungen. Ihre in
-Der einfachste Weg, den Dify-Server zu starten, ist über [docker compose](docker/docker-compose.yaml). Stellen Sie vor dem Ausführen von Dify mit den folgenden Befehlen sicher, dass [Docker](https://docs.docker.com/get-docker/) und [Docker Compose](https://docs.docker.com/compose/install/) auf Ihrem System installiert sind: +Der einfachste Weg, den Dify-Server zu starten, ist über [docker compose](../../docker/docker-compose.yaml). Stellen Sie vor dem Ausführen von Dify mit den folgenden Befehlen sicher, dass [Docker](https://docs.docker.com/get-docker/) und [Docker Compose](https://docs.docker.com/compose/install/) auf Ihrem System installiert sind: ```bash cd dify @@ -127,7 +128,7 @@ Star Dify auf GitHub und lassen Sie sich sofort über neue Releases benachrichti ## Erweiterte Einstellungen -Falls Sie die Konfiguration anpassen müssen, lesen Sie bitte die Kommentare in unserer [.env.example](docker/.env.example)-Datei und aktualisieren Sie die entsprechenden Werte in Ihrer `.env`-Datei. Zusätzlich müssen Sie eventuell Anpassungen an der `docker-compose.yaml`-Datei vornehmen, wie zum Beispiel das Ändern von Image-Versionen, Portzuordnungen oder Volumen-Mounts, je nach Ihrer spezifischen Einsatzumgebung und Ihren Anforderungen. Nachdem Sie Änderungen vorgenommen haben, starten Sie `docker-compose up -d` erneut. Eine vollständige Liste der verfügbaren Umgebungsvariablen finden Sie [hier](https://docs.dify.ai/getting-started/install-self-hosted/environments). +Falls Sie die Konfiguration anpassen müssen, lesen Sie bitte die Kommentare in unserer [.env.example](../../docker/.env.example)-Datei und aktualisieren Sie die entsprechenden Werte in Ihrer `.env`-Datei. Zusätzlich müssen Sie eventuell Anpassungen an der `docker-compose.yaml`-Datei vornehmen, wie zum Beispiel das Ändern von Image-Versionen, Portzuordnungen oder Volumen-Mounts, je nach Ihrer spezifischen Einsatzumgebung und Ihren Anforderungen. Nachdem Sie Änderungen vorgenommen haben, starten Sie `docker-compose up -d` erneut. Eine vollständige Liste der verfügbaren Umgebungsvariablen finden Sie [hier](https://docs.dify.ai/getting-started/install-self-hosted/environments). Falls Sie eine hochverfügbare Konfiguration einrichten möchten, gibt es von der Community bereitgestellte [Helm Charts](https://helm.sh/) und YAML-Dateien, die es ermöglichen, Dify auf Kubernetes bereitzustellen. @@ -173,14 +174,14 @@ Stellen Sie Dify mit einem Klick in AKS bereit, indem Sie [Azure Devops Pipeline ## Contributing -Falls Sie Code beitragen möchten, lesen Sie bitte unseren [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING/CONTRIBUTING_DE.md). Gleichzeitig bitten wir Sie, Dify zu unterstützen, indem Sie es in den sozialen Medien teilen und auf Veranstaltungen und Konferenzen präsentieren. +Falls Sie Code beitragen möchten, lesen Sie bitte unseren [Contribution Guide](./CONTRIBUTING.md). Gleichzeitig bitten wir Sie, Dify zu unterstützen, indem Sie es in den sozialen Medien teilen und auf Veranstaltungen und Konferenzen präsentieren. > Wir suchen Mitwirkende, die dabei helfen, Dify in weitere Sprachen zu übersetzen – außer Mandarin oder Englisch. Wenn Sie Interesse an einer Mitarbeit haben, lesen Sie bitte die [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) für weitere Informationen und hinterlassen Sie einen Kommentar im `global-users`-Kanal unseres [Discord Community Servers](https://discord.gg/8Tpq4AcN9c). ## Gemeinschaft & Kontakt - [GitHub Discussion](https://github.com/langgenius/dify/discussions). Am besten geeignet für: den Austausch von Feedback und das Stellen von Fragen. -- [GitHub Issues](https://github.com/langgenius/dify/issues). Am besten für: Fehler, auf die Sie bei der Verwendung von Dify.AI stoßen, und Funktionsvorschläge. Siehe unseren [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). +- [GitHub Issues](https://github.com/langgenius/dify/issues). Am besten für: Fehler, auf die Sie bei der Verwendung von Dify.AI stoßen, und Funktionsvorschläge. Siehe unseren [Contribution Guide](./CONTRIBUTING.md). - [Discord](https://discord.gg/FngNHpbcY7). Am besten geeignet für: den Austausch von Bewerbungen und den Austausch mit der Community. - [X(Twitter)](https://twitter.com/dify_ai). Am besten geeignet für: den Austausch von Bewerbungen und den Austausch mit der Community. @@ -200,4 +201,4 @@ Um Ihre Privatsphäre zu schützen, vermeiden Sie es bitte, Sicherheitsprobleme ## Lizenz -Dieses Repository steht unter der [Dify Open Source License](../LICENSE), die im Wesentlichen Apache 2.0 mit einigen zusätzlichen Einschränkungen ist. +Dieses Repository steht unter der [Dify Open Source License](../../LICENSE), die im Wesentlichen Apache 2.0 mit einigen zusätzlichen Einschränkungen ist. diff --git a/CONTRIBUTING/CONTRIBUTING_ES.md b/docs/es-ES/CONTRIBUTING.md similarity index 96% rename from CONTRIBUTING/CONTRIBUTING_ES.md rename to docs/es-ES/CONTRIBUTING.md index 764c678fb2..6cd80651c4 100644 --- a/CONTRIBUTING/CONTRIBUTING_ES.md +++ b/docs/es-ES/CONTRIBUTING.md @@ -6,7 +6,7 @@ Necesitamos ser ágiles y enviar rápidamente dado donde estamos, pero también Esta guía, como Dify mismo, es un trabajo en constante progreso. Agradecemos mucho tu comprensión si a veces se queda atrás del proyecto real, y damos la bienvenida a cualquier comentario para que podamos mejorar. -En términos de licencia, por favor tómate un minuto para leer nuestro breve [Acuerdo de Licencia y Colaborador](../LICENSE). La comunidad también se adhiere al [código de conducta](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md). +En términos de licencia, por favor tómate un minuto para leer nuestro breve [Acuerdo de Licencia y Colaborador](../../LICENSE). La comunidad también se adhiere al [código de conducta](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md). ## Antes de empezar diff --git a/README/README_ES.md b/docs/es-ES/README.md similarity index 79% rename from README/README_ES.md rename to docs/es-ES/README.md index 60f0a06868..b005691fea 100644 --- a/README/README_ES.md +++ b/docs/es-ES/README.md @@ -1,4 +1,4 @@ -![cover-v5-optimized](../images/GitHub_README_if.png) +![cover-v5-optimized](../../images/GitHub_README_if.png)

Dify Cloud · @@ -35,17 +35,19 @@

- README in English - 简体中文版自述文件 - 日本語のREADME - README en Español - README en Français - README tlhIngan Hol - README in Korean - README بالعربية - Türkçe README - README Tiếng Việt - README in বাংলা + README in English + 繁體中文文件 + 简体中文文件 + 日本語のREADME + README en Español + README en Français + README tlhIngan Hol + README in Korean + README بالعربية + Türkçe README + README Tiếng Việt + README in Deutsch + README in বাংলা

# @@ -108,7 +110,7 @@ Dale estrella a Dify en GitHub y serás notificado instantáneamente de las nuev
-La forma más fácil de iniciar el servidor de Dify es ejecutar nuestro archivo [docker-compose.yml](docker/docker-compose.yaml). Antes de ejecutar el comando de instalación, asegúrate de que [Docker](https://docs.docker.com/get-docker/) y [Docker Compose](https://docs.docker.com/compose/install/) estén instalados en tu máquina: +La forma más fácil de iniciar el servidor de Dify es ejecutar nuestro archivo [docker-compose.yml](../../docker/docker-compose.yaml). Antes de ejecutar el comando de instalación, asegúrate de que [Docker](https://docs.docker.com/get-docker/) y [Docker Compose](https://docs.docker.com/compose/install/) estén instalados en tu máquina: ```bash cd docker @@ -122,7 +124,7 @@ Después de ejecutarlo, puedes acceder al panel de control de Dify en tu navegad ## Próximos pasos -Si necesita personalizar la configuración, consulte los comentarios en nuestro archivo [.env.example](docker/.env.example) y actualice los valores correspondientes en su archivo `.env`. Además, es posible que deba realizar ajustes en el propio archivo `docker-compose.yaml`, como cambiar las versiones de las imágenes, las asignaciones de puertos o los montajes de volúmenes, según su entorno de implementación y requisitos específicos. Después de realizar cualquier cambio, vuelva a ejecutar `docker-compose up -d`. Puede encontrar la lista completa de variables de entorno disponibles [aquí](https://docs.dify.ai/getting-started/install-self-hosted/environments). +Si necesita personalizar la configuración, consulte los comentarios en nuestro archivo [.env.example](../../docker/.env.example) y actualice los valores correspondientes en su archivo `.env`. Además, es posible que deba realizar ajustes en el propio archivo `docker-compose.yaml`, como cambiar las versiones de las imágenes, las asignaciones de puertos o los montajes de volúmenes, según su entorno de implementación y requisitos específicos. Después de realizar cualquier cambio, vuelva a ejecutar `docker-compose up -d`. Puede encontrar la lista completa de variables de entorno disponibles [aquí](https://docs.dify.ai/getting-started/install-self-hosted/environments). . Después de realizar los cambios, ejecuta `docker-compose up -d` nuevamente. Puedes ver la lista completa de variables de entorno [aquí](https://docs.dify.ai/getting-started/install-self-hosted/environments). @@ -170,7 +172,7 @@ Implementa Dify en AKS con un clic usando [Azure Devops Pipeline Helm Chart by @ ## Contribuir -Para aquellos que deseen contribuir con código, consulten nuestra [Guía de contribución](https://github.com/langgenius/dify/blob/main/CONTRIBUTING/CONTRIBUTING_ES.md). +Para aquellos que deseen contribuir con código, consulten nuestra [Guía de contribución](./CONTRIBUTING.md). Al mismo tiempo, considera apoyar a Dify compartiéndolo en redes sociales y en eventos y conferencias. > Estamos buscando colaboradores para ayudar con la traducción de Dify a idiomas que no sean el mandarín o el inglés. Si estás interesado en ayudar, consulta el [README de i18n](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) para obtener más información y déjanos un comentario en el canal `global-users` de nuestro [Servidor de Comunidad en Discord](https://discord.gg/8Tpq4AcN9c). @@ -184,7 +186,7 @@ Al mismo tiempo, considera apoyar a Dify compartiéndolo en redes sociales y en ## Comunidad y Contacto - [Discusión en GitHub](https://github.com/langgenius/dify/discussions). Lo mejor para: compartir comentarios y hacer preguntas. -- [Reporte de problemas en GitHub](https://github.com/langgenius/dify/issues). Lo mejor para: errores que encuentres usando Dify.AI y propuestas de características. Consulta nuestra [Guía de contribución](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). +- [Reporte de problemas en GitHub](https://github.com/langgenius/dify/issues). Lo mejor para: errores que encuentres usando Dify.AI y propuestas de características. Consulta nuestra [Guía de contribución](./CONTRIBUTING.md). - [Discord](https://discord.gg/FngNHpbcY7). Lo mejor para: compartir tus aplicaciones y pasar el rato con la comunidad. - [X(Twitter)](https://twitter.com/dify_ai). Lo mejor para: compartir tus aplicaciones y pasar el rato con la comunidad. @@ -198,12 +200,4 @@ Para proteger tu privacidad, evita publicar problemas de seguridad en GitHub. En ## Licencia -Este repositorio está disponible bajo la [Licencia de Código Abierto de Dify](../LICENSE), que es esencialmente Apache 2.0 con algunas restricciones adicionales. - -## Divulgación de Seguridad - -Para proteger tu privacidad, evita publicar problemas de seguridad en GitHub. En su lugar, envía tus preguntas a security@dify.ai y te proporcionaremos una respuesta más detallada. - -## Licencia - -Este repositorio está disponible bajo la [Licencia de Código Abierto de Dify](../LICENSE), que es esencialmente Apache 2.0 con algunas restricciones adicionales. +Este repositorio está disponible bajo la [Licencia de Código Abierto de Dify](../../LICENSE), que es esencialmente Apache 2.0 con algunas restricciones adicionales. diff --git a/CONTRIBUTING/CONTRIBUTING_FR.md b/docs/fr-FR/CONTRIBUTING.md similarity index 96% rename from CONTRIBUTING/CONTRIBUTING_FR.md rename to docs/fr-FR/CONTRIBUTING.md index 8df491a0a0..74e44ca734 100644 --- a/CONTRIBUTING/CONTRIBUTING_FR.md +++ b/docs/fr-FR/CONTRIBUTING.md @@ -6,7 +6,7 @@ Nous devons être agiles et livrer rapidement compte tenu de notre position, mai Ce guide, comme Dify lui-même, est un travail en constante évolution. Nous apprécions grandement votre compréhension si parfois il est en retard par rapport au projet réel, et nous accueillons tout commentaire pour nous aider à nous améliorer. -En termes de licence, veuillez prendre une minute pour lire notre bref [Accord de Licence et de Contributeur](../LICENSE). La communauté adhère également au [code de conduite](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md). +En termes de licence, veuillez prendre une minute pour lire notre bref [Accord de Licence et de Contributeur](../../LICENSE). La communauté adhère également au [code de conduite](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md). ## Avant de vous lancer diff --git a/README/README_FR.md b/docs/fr-FR/README.md similarity index 79% rename from README/README_FR.md rename to docs/fr-FR/README.md index a782bd16f8..3aca9a9672 100644 --- a/README/README_FR.md +++ b/docs/fr-FR/README.md @@ -1,4 +1,4 @@ -![cover-v5-optimized](../images/GitHub_README_if.png) +![cover-v5-optimized](../../images/GitHub_README_if.png)

Dify Cloud · @@ -35,17 +35,19 @@

- README in English - 简体中文版自述文件 - 日本語のREADME - README en Español - README en Français - README tlhIngan Hol - README in Korean - README بالعربية - Türkçe README - README Tiếng Việt - README in বাংলা + README in English + 繁體中文文件 + 简体中文文件 + 日本語のREADME + README en Español + README en Français + README tlhIngan Hol + README in Korean + README بالعربية + Türkçe README + README Tiếng Việt + README in Deutsch + README in বাংলা

# @@ -108,7 +110,7 @@ Mettez une étoile à Dify sur GitHub et soyez instantanément informé des nouv
-La manière la plus simple de démarrer le serveur Dify est d'exécuter notre fichier [docker-compose.yml](docker/docker-compose.yaml). Avant d'exécuter la commande d'installation, assurez-vous que [Docker](https://docs.docker.com/get-docker/) et [Docker Compose](https://docs.docker.com/compose/install/) sont installés sur votre machine: +La manière la plus simple de démarrer le serveur Dify est d'exécuter notre fichier [docker-compose.yml](../../docker/docker-compose.yaml). Avant d'exécuter la commande d'installation, assurez-vous que [Docker](https://docs.docker.com/get-docker/) et [Docker Compose](https://docs.docker.com/compose/install/) sont installés sur votre machine: ```bash cd docker @@ -122,7 +124,7 @@ Après l'exécution, vous pouvez accéder au tableau de bord Dify dans votre nav ## Prochaines étapes -Si vous devez personnaliser la configuration, veuillez vous référer aux commentaires dans notre fichier [.env.example](docker/.env.example) et mettre à jour les valeurs correspondantes dans votre fichier `.env`. De plus, vous devrez peut-être apporter des modifications au fichier `docker-compose.yaml` lui-même, comme changer les versions d'image, les mappages de ports ou les montages de volumes, en fonction de votre environnement de déploiement et de vos exigences spécifiques. Après avoir effectué des modifications, veuillez réexécuter `docker-compose up -d`. Vous pouvez trouver la liste complète des variables d'environnement disponibles [ici](https://docs.dify.ai/getting-started/install-self-hosted/environments). +Si vous devez personnaliser la configuration, veuillez vous référer aux commentaires dans notre fichier [.env.example](../../docker/.env.example) et mettre à jour les valeurs correspondantes dans votre fichier `.env`. De plus, vous devrez peut-être apporter des modifications au fichier `docker-compose.yaml` lui-même, comme changer les versions d'image, les mappages de ports ou les montages de volumes, en fonction de votre environnement de déploiement et de vos exigences spécifiques. Après avoir effectué des modifications, veuillez réexécuter `docker-compose up -d`. Vous pouvez trouver la liste complète des variables d'environnement disponibles [ici](https://docs.dify.ai/getting-started/install-self-hosted/environments). Si vous souhaitez configurer une configuration haute disponibilité, la communauté fournit des [Helm Charts](https://helm.sh/) et des fichiers YAML, à travers lesquels vous pouvez déployer Dify sur Kubernetes. @@ -168,7 +170,7 @@ Déployez Dify sur AKS en un clic en utilisant [Azure Devops Pipeline Helm Chart ## Contribuer -Pour ceux qui souhaitent contribuer du code, consultez notre [Guide de contribution](https://github.com/langgenius/dify/blob/main/CONTRIBUTING/CONTRIBUTING_FR.md). +Pour ceux qui souhaitent contribuer du code, consultez notre [Guide de contribution](./CONTRIBUTING.md). Dans le même temps, veuillez envisager de soutenir Dify en le partageant sur les réseaux sociaux et lors d'événements et de conférences. > Nous recherchons des contributeurs pour aider à traduire Dify dans des langues autres que le mandarin ou l'anglais. Si vous êtes intéressé à aider, veuillez consulter le [README i18n](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) pour plus d'informations, et laissez-nous un commentaire dans le canal `global-users` de notre [Serveur communautaire Discord](https://discord.gg/8Tpq4AcN9c). @@ -182,7 +184,7 @@ Dans le même temps, veuillez envisager de soutenir Dify en le partageant sur le ## Communauté & Contact - [Discussion GitHub](https://github.com/langgenius/dify/discussions). Meilleur pour: partager des commentaires et poser des questions. -- [Problèmes GitHub](https://github.com/langgenius/dify/issues). Meilleur pour: les bogues que vous rencontrez en utilisant Dify.AI et les propositions de fonctionnalités. Consultez notre [Guide de contribution](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). +- [Problèmes GitHub](https://github.com/langgenius/dify/issues). Meilleur pour: les bogues que vous rencontrez en utilisant Dify.AI et les propositions de fonctionnalités. Consultez notre [Guide de contribution](./CONTRIBUTING.md). - [Discord](https://discord.gg/FngNHpbcY7). Meilleur pour: partager vos applications et passer du temps avec la communauté. - [X(Twitter)](https://twitter.com/dify_ai). Meilleur pour: partager vos applications et passer du temps avec la communauté. @@ -196,12 +198,4 @@ Pour protéger votre vie privée, veuillez éviter de publier des problèmes de ## Licence -Ce référentiel est disponible sous la [Licence open source Dify](../LICENSE), qui est essentiellement l'Apache 2.0 avec quelques restrictions supplémentaires. - -## Divulgation de sécurité - -Pour protéger votre vie privée, veuillez éviter de publier des problèmes de sécurité sur GitHub. Au lieu de cela, envoyez vos questions à security@dify.ai et nous vous fournirons une réponse plus détaillée. - -## Licence - -Ce référentiel est disponible sous la [Licence open source Dify](../LICENSE), qui est essentiellement l'Apache 2.0 avec quelques restrictions supplémentaires. +Ce référentiel est disponible sous la [Licence open source Dify](../../LICENSE), qui est essentiellement l'Apache 2.0 avec quelques restrictions supplémentaires. diff --git a/CONTRIBUTING/CONTRIBUTING_JA.md b/docs/ja-JP/CONTRIBUTING.md similarity index 96% rename from CONTRIBUTING/CONTRIBUTING_JA.md rename to docs/ja-JP/CONTRIBUTING.md index dd3d6cbfc5..4ee7d8c963 100644 --- a/CONTRIBUTING/CONTRIBUTING_JA.md +++ b/docs/ja-JP/CONTRIBUTING.md @@ -6,7 +6,7 @@ Difyに貢献しようとお考えですか?素晴らしいですね。私た このガイドは、Dify自体と同様に、常に進化し続けています。実際のプロジェクトの進行状況と多少のずれが生じる場合もございますが、ご理解いただけますと幸いです。改善のためのフィードバックも歓迎いたします。 -ライセンスについては、[ライセンスと貢献者同意書](../LICENSE)をご一読ください。また、コミュニティは[行動規範](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md)に従っています。 +ライセンスについては、[ライセンスと貢献者同意書](../../LICENSE)をご一読ください。また、コミュニティは[行動規範](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md)に従っています。 ## 始める前に diff --git a/README/README_JA.md b/docs/ja-JP/README.md similarity index 79% rename from README/README_JA.md rename to docs/ja-JP/README.md index 23cd0e692b..66831285d6 100644 --- a/README/README_JA.md +++ b/docs/ja-JP/README.md @@ -1,4 +1,4 @@ -![cover-v5-optimized](../images/GitHub_README_if.png) +![cover-v5-optimized](../../images/GitHub_README_if.png)

Dify Cloud · @@ -35,17 +35,19 @@

- README in English - 简体中文版自述文件 - 日本語のREADME - README en Español - README en Français - README tlhIngan Hol - README in Korean - README بالعربية - Türkçe README - README Tiếng Việt - README in বাংলা + README in English + 繁體中文文件 + 简体中文文件 + 日本語のREADME + README en Español + README en Français + README tlhIngan Hol + README in Korean + README بالعربية + Türkçe README + README Tiếng Việt + README in Deutsch + README in বাংলা

# @@ -109,7 +111,7 @@ GitHub上でDifyにスターを付けることで、Difyに関する新しいニ
-Difyサーバーを起動する最も簡単な方法は、[docker-compose.yml](docker/docker-compose.yaml)ファイルを実行することです。インストールコマンドを実行する前に、マシンに[Docker](https://docs.docker.com/get-docker/)と[Docker Compose](https://docs.docker.com/compose/install/)がインストールされていることを確認してください。 +Difyサーバーを起動する最も簡単な方法は、[docker-compose.yml](../../docker/docker-compose.yaml)ファイルを実行することです。インストールコマンドを実行する前に、マシンに[Docker](https://docs.docker.com/get-docker/)と[Docker Compose](https://docs.docker.com/compose/install/)がインストールされていることを確認してください。 ```bash cd docker @@ -123,7 +125,7 @@ docker compose up -d ## 次のステップ -設定をカスタマイズする必要がある場合は、[.env.example](docker/.env.example) ファイルのコメントを参照し、`.env` ファイルの対応する値を更新してください。さらに、デプロイ環境や要件に応じて、`docker-compose.yaml` ファイル自体を調整する必要がある場合があります。たとえば、イメージのバージョン、ポートのマッピング、ボリュームのマウントなどを変更します。変更を加えた後は、`docker-compose up -d` を再実行してください。利用可能な環境変数の全一覧は、[こちら](https://docs.dify.ai/getting-started/install-self-hosted/environments)で確認できます。 +設定をカスタマイズする必要がある場合は、[.env.example](../../docker/.env.example) ファイルのコメントを参照し、`.env` ファイルの対応する値を更新してください。さらに、デプロイ環境や要件に応じて、`docker-compose.yaml` ファイル自体を調整する必要がある場合があります。たとえば、イメージのバージョン、ポートのマッピング、ボリュームのマウントなどを変更します。変更を加えた後は、`docker-compose up -d` を再実行してください。利用可能な環境変数の全一覧は、[こちら](https://docs.dify.ai/getting-started/install-self-hosted/environments)で確認できます。 高可用性設定を設定する必要がある場合、コミュニティは[Helm Charts](https://helm.sh/)とYAMLファイルにより、DifyをKubernetesにデプロイすることができます。 @@ -169,7 +171,7 @@ docker compose up -d ## 貢献 -コードに貢献したい方は、[Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING/CONTRIBUTING_JA.md)を参照してください。 +コードに貢献したい方は、[Contribution Guide](./CONTRIBUTING.md)を参照してください。 同時に、DifyをSNSやイベント、カンファレンスで共有してサポートしていただけると幸いです。 > Difyを英語または中国語以外の言語に翻訳してくれる貢献者を募集しています。興味がある場合は、詳細については[i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md)を参照してください。また、[Discordコミュニティサーバー](https://discord.gg/8Tpq4AcN9c)の`global-users`チャンネルにコメントを残してください。 @@ -183,10 +185,10 @@ docker compose up -d ## コミュニティ & お問い合わせ - [GitHub Discussion](https://github.com/langgenius/dify/discussions). 主に: フィードバックの共有や質問。 -- [GitHub Issues](https://github.com/langgenius/dify/issues). 主に: Dify.AIを使用する際に発生するエラーや問題については、[貢献ガイド](../CONTRIBUTING/CONTRIBUTING_JA.md)を参照してください +- [GitHub Issues](https://github.com/langgenius/dify/issues). 主に: Dify.AIを使用する際に発生するエラーや問題については、[貢献ガイド](./CONTRIBUTING.md)を参照してください - [Discord](https://discord.gg/FngNHpbcY7). 主に: アプリケーションの共有やコミュニティとの交流。 - [X(Twitter)](https://twitter.com/dify_ai). 主に: アプリケーションの共有やコミュニティとの交流。 ## ライセンス -このリポジトリは、Dify Open Source License にいくつかの追加制限を加えた[Difyオープンソースライセンス](../LICENSE)の下で利用可能です。 +このリポジトリは、Dify Open Source License にいくつかの追加制限を加えた[Difyオープンソースライセンス](../../LICENSE)の下で利用可能です。 diff --git a/CONTRIBUTING/CONTRIBUTING_KR.md b/docs/ko-KR/CONTRIBUTING.md similarity index 96% rename from CONTRIBUTING/CONTRIBUTING_KR.md rename to docs/ko-KR/CONTRIBUTING.md index f94d5bfbc9..9c171c3561 100644 --- a/CONTRIBUTING/CONTRIBUTING_KR.md +++ b/docs/ko-KR/CONTRIBUTING.md @@ -6,7 +6,7 @@ Dify에 기여하려고 하시는군요 - 정말 멋집니다, 당신이 무엇 이 가이드는 Dify 자체와 마찬가지로 끊임없이 진행 중인 작업입니다. 때로는 실제 프로젝트보다 뒤처질 수 있다는 점을 이해해 주시면 감사하겠으며, 개선을 위한 피드백은 언제든지 환영합니다. -라이센스 측면에서, 간략한 [라이센스 및 기여자 동의서](../LICENSE)를 읽어보는 시간을 가져주세요. 커뮤니티는 또한 [행동 강령](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md)을 준수합니다. +라이센스 측면에서, 간략한 [라이센스 및 기여자 동의서](../../LICENSE)를 읽어보는 시간을 가져주세요. 커뮤니티는 또한 [행동 강령](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md)을 준수합니다. ## 시작하기 전에 diff --git a/README/README_KR.md b/docs/ko-KR/README.md similarity index 79% rename from README/README_KR.md rename to docs/ko-KR/README.md index e1a2a82677..ec67bc90ed 100644 --- a/README/README_KR.md +++ b/docs/ko-KR/README.md @@ -1,4 +1,4 @@ -![cover-v5-optimized](../images/GitHub_README_if.png) +![cover-v5-optimized](../../images/GitHub_README_if.png)

Dify 클라우드 · @@ -35,17 +35,19 @@

- README in English - 简体中文版自述文件 - 日本語のREADME - README en Español - README en Français - README tlhIngan Hol - README in Korean - README بالعربية - Türkçe README - README Tiếng Việt - README in বাংলা + README in English + 繁體中文文件 + 简体中文文件 + 日本語のREADME + README en Español + README en Français + README tlhIngan Hol + README in Korean + README بالعربية + Türkçe README + README Tiếng Việt + README in Deutsch + README in বাংলা

Dify는 오픈 소스 LLM 앱 개발 플랫폼입니다. 직관적인 인터페이스를 통해 AI 워크플로우, RAG 파이프라인, 에이전트 기능, 모델 관리, 관찰 기능 등을 결합하여 프로토타입에서 프로덕션까지 빠르게 전환할 수 있습니다. 주요 기능 목록은 다음과 같습니다:

@@ -102,7 +104,7 @@ GitHub에서 Dify에 별표를 찍어 새로운 릴리스를 즉시 알림 받
-Dify 서버를 시작하는 가장 쉬운 방법은 [docker-compose.yml](docker/docker-compose.yaml) 파일을 실행하는 것입니다. 설치 명령을 실행하기 전에 [Docker](https://docs.docker.com/get-docker/) 및 [Docker Compose](https://docs.docker.com/compose/install/)가 머신에 설치되어 있는지 확인하세요. +Dify 서버를 시작하는 가장 쉬운 방법은 [docker-compose.yml](../../docker/docker-compose.yaml) 파일을 실행하는 것입니다. 설치 명령을 실행하기 전에 [Docker](https://docs.docker.com/get-docker/) 및 [Docker Compose](https://docs.docker.com/compose/install/)가 머신에 설치되어 있는지 확인하세요. ```bash cd docker @@ -116,7 +118,7 @@ docker compose up -d ## 다음 단계 -구성을 사용자 정의해야 하는 경우 [.env.example](docker/.env.example) 파일의 주석을 참조하고 `.env` 파일에서 해당 값을 업데이트하십시오. 또한 특정 배포 환경 및 요구 사항에 따라 `docker-compose.yaml` 파일 자체를 조정해야 할 수도 있습니다. 예를 들어 이미지 버전, 포트 매핑 또는 볼륨 마운트를 변경합니다. 변경 한 후 `docker-compose up -d`를 다시 실행하십시오. 사용 가능한 환경 변수의 전체 목록은 [여기](https://docs.dify.ai/getting-started/install-self-hosted/environments)에서 찾을 수 있습니다. +구성을 사용자 정의해야 하는 경우 [.env.example](../../docker/.env.example) 파일의 주석을 참조하고 `.env` 파일에서 해당 값을 업데이트하십시오. 또한 특정 배포 환경 및 요구 사항에 따라 `docker-compose.yaml` 파일 자체를 조정해야 할 수도 있습니다. 예를 들어 이미지 버전, 포트 매핑 또는 볼륨 마운트를 변경합니다. 변경 한 후 `docker-compose up -d`를 다시 실행하십시오. 사용 가능한 환경 변수의 전체 목록은 [여기](https://docs.dify.ai/getting-started/install-self-hosted/environments)에서 찾을 수 있습니다. Dify를 Kubernetes에 배포하고 프리미엄 스케일링 설정을 구성했다는 커뮤니티가 제공하는 [Helm Charts](https://helm.sh/)와 YAML 파일이 존재합니다. @@ -162,7 +164,7 @@ Dify를 Kubernetes에 배포하고 프리미엄 스케일링 설정을 구성했 ## 기여 -코드에 기여하고 싶은 분들은 [기여 가이드](https://github.com/langgenius/dify/blob/main/CONTRIBUTING/CONTRIBUTING_KR.md)를 참조하세요. +코드에 기여하고 싶은 분들은 [기여 가이드](./CONTRIBUTING.md)를 참조하세요. 동시에 Dify를 소셜 미디어와 행사 및 컨퍼런스에 공유하여 지원하는 것을 고려해 주시기 바랍니다. > 우리는 Dify를 중국어나 영어 이외의 언어로 번역하는 데 도움을 줄 수 있는 기여자를 찾고 있습니다. 도움을 주고 싶으시다면 [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md)에서 더 많은 정보를 확인하시고 [Discord 커뮤니티 서버](https://discord.gg/8Tpq4AcN9c)의 `global-users` 채널에 댓글을 남겨주세요. @@ -176,7 +178,7 @@ Dify를 Kubernetes에 배포하고 프리미엄 스케일링 설정을 구성했 ## 커뮤니티 & 연락처 - [GitHub 토론](https://github.com/langgenius/dify/discussions). 피드백 공유 및 질문하기에 적합합니다. -- [GitHub 이슈](https://github.com/langgenius/dify/issues). Dify.AI 사용 중 발견한 버그와 기능 제안에 적합합니다. [기여 가이드](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md)를 참조하세요. +- [GitHub 이슈](https://github.com/langgenius/dify/issues). Dify.AI 사용 중 발견한 버그와 기능 제안에 적합합니다. [기여 가이드](./CONTRIBUTING.md)를 참조하세요. - [디스코드](https://discord.gg/FngNHpbcY7). 애플리케이션 공유 및 커뮤니티와 소통하기에 적합합니다. - [트위터](https://twitter.com/dify_ai). 애플리케이션 공유 및 커뮤니티와 소통하기에 적합합니다. @@ -190,4 +192,4 @@ Dify를 Kubernetes에 배포하고 프리미엄 스케일링 설정을 구성했 ## 라이선스 -이 저장소는 기본적으로 몇 가지 추가 제한 사항이 있는 Apache 2.0인 [Dify 오픈 소스 라이선스](../LICENSE)에 따라 사용할 수 있습니다. +이 저장소는 기본적으로 몇 가지 추가 제한 사항이 있는 Apache 2.0인 [Dify 오픈 소스 라이선스](../../LICENSE)에 따라 사용할 수 있습니다. diff --git a/CONTRIBUTING/CONTRIBUTING_PT.md b/docs/pt-BR/CONTRIBUTING.md similarity index 96% rename from CONTRIBUTING/CONTRIBUTING_PT.md rename to docs/pt-BR/CONTRIBUTING.md index 2aec1e2196..737b2ddce2 100644 --- a/CONTRIBUTING/CONTRIBUTING_PT.md +++ b/docs/pt-BR/CONTRIBUTING.md @@ -6,7 +6,7 @@ Precisamos ser ágeis e entregar rapidamente considerando onde estamos, mas tamb Este guia, como o próprio Dify, é um trabalho em constante evolução. Agradecemos muito a sua compreensão se às vezes ele ficar atrasado em relação ao projeto real, e damos as boas-vindas a qualquer feedback para que possamos melhorar. -Em termos de licenciamento, por favor, dedique um minuto para ler nosso breve [Acordo de Licença e Contribuidor](../LICENSE). A comunidade também adere ao [código de conduta](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md). +Em termos de licenciamento, por favor, dedique um minuto para ler nosso breve [Acordo de Licença e Contribuidor](../../LICENSE). A comunidade também adere ao [código de conduta](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md). ## Antes de começar diff --git a/README/README_PT.md b/docs/pt-BR/README.md similarity index 78% rename from README/README_PT.md rename to docs/pt-BR/README.md index 91132aade4..78383a3c76 100644 --- a/README/README_PT.md +++ b/docs/pt-BR/README.md @@ -1,4 +1,4 @@ -![cover-v5-optimized](../images/GitHub_README_if.png) +![cover-v5-optimized](../../images/GitHub_README_if.png)

📌 Introduzindo o Dify Workflow com Upload de Arquivo: Recrie o Podcast Google NotebookLM @@ -39,18 +39,20 @@

- README em Inglês - 简体中文版自述文件 - 日本語のREADME - README em Espanhol - README em Francês - README tlhIngan Hol - README em Coreano - README em Árabe - README em Turco - README em Vietnamita - README em Português - BR - README in বাংলা + README em Inglês + 繁體中文文件 + 简体中文文件 + 日本語のREADME + README em Espanhol + README em Francês + README tlhIngan Hol + README em Coreano + README em Árabe + README em Turco + README em Vietnamita + README em Português - BR + README in Deutsch + README in বাংলা

Dify é uma plataforma de desenvolvimento de aplicativos LLM de código aberto. Sua interface intuitiva combina workflow de IA, pipeline RAG, capacidades de agente, gerenciamento de modelos, recursos de observabilidade e muito mais, permitindo que você vá rapidamente do protótipo à produção. Aqui está uma lista das principais funcionalidades: @@ -108,7 +110,7 @@ Dê uma estrela no Dify no GitHub e seja notificado imediatamente sobre novos la
-A maneira mais fácil de iniciar o servidor Dify é executar nosso arquivo [docker-compose.yml](docker/docker-compose.yaml). Antes de rodar o comando de instalação, certifique-se de que o [Docker](https://docs.docker.com/get-docker/) e o [Docker Compose](https://docs.docker.com/compose/install/) estão instalados na sua máquina: +A maneira mais fácil de iniciar o servidor Dify é executar nosso arquivo [docker-compose.yml](../../docker/docker-compose.yaml). Antes de rodar o comando de instalação, certifique-se de que o [Docker](https://docs.docker.com/get-docker/) e o [Docker Compose](https://docs.docker.com/compose/install/) estão instalados na sua máquina: ```bash cd docker @@ -122,7 +124,7 @@ Após a execução, você pode acessar o painel do Dify no navegador em [http:// ## Próximos passos -Se precisar personalizar a configuração, consulte os comentários no nosso arquivo [.env.example](docker/.env.example) e atualize os valores correspondentes no seu arquivo `.env`. Além disso, talvez seja necessário fazer ajustes no próprio arquivo `docker-compose.yaml`, como alterar versões de imagem, mapeamentos de portas ou montagens de volumes, com base no seu ambiente de implantação específico e nas suas necessidades. Após fazer quaisquer alterações, execute novamente `docker-compose up -d`. Você pode encontrar a lista completa de variáveis de ambiente disponíveis [aqui](https://docs.dify.ai/getting-started/install-self-hosted/environments). +Se precisar personalizar a configuração, consulte os comentários no nosso arquivo [.env.example](../../docker/.env.example) e atualize os valores correspondentes no seu arquivo `.env`. Além disso, talvez seja necessário fazer ajustes no próprio arquivo `docker-compose.yaml`, como alterar versões de imagem, mapeamentos de portas ou montagens de volumes, com base no seu ambiente de implantação específico e nas suas necessidades. Após fazer quaisquer alterações, execute novamente `docker-compose up -d`. Você pode encontrar a lista completa de variáveis de ambiente disponíveis [aqui](https://docs.dify.ai/getting-started/install-self-hosted/environments). Se deseja configurar uma instalação de alta disponibilidade, há [Helm Charts](https://helm.sh/) e arquivos YAML contribuídos pela comunidade que permitem a implantação do Dify no Kubernetes. @@ -168,7 +170,7 @@ Implante o Dify no AKS com um clique usando [Azure Devops Pipeline Helm Chart by ## Contribuindo -Para aqueles que desejam contribuir com código, veja nosso [Guia de Contribuição](https://github.com/langgenius/dify/blob/main/CONTRIBUTING/CONTRIBUTING_PT.md). +Para aqueles que desejam contribuir com código, veja nosso [Guia de Contribuição](./CONTRIBUTING.md). Ao mesmo tempo, considere apoiar o Dify compartilhando-o nas redes sociais e em eventos e conferências. > Estamos buscando contribuidores para ajudar na tradução do Dify para idiomas além de Mandarim e Inglês. Se você tiver interesse em ajudar, consulte o [README i18n](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) para mais informações e deixe-nos um comentário no canal `global-users` em nosso [Servidor da Comunidade no Discord](https://discord.gg/8Tpq4AcN9c). @@ -182,7 +184,7 @@ Ao mesmo tempo, considere apoiar o Dify compartilhando-o nas redes sociais e em ## Comunidade e contato - [Discussões no GitHub](https://github.com/langgenius/dify/discussions). Melhor para: compartilhar feedback e fazer perguntas. -- [Problemas no GitHub](https://github.com/langgenius/dify/issues). Melhor para: relatar bugs encontrados no Dify.AI e propor novos recursos. Veja nosso [Guia de Contribuição](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). +- [Problemas no GitHub](https://github.com/langgenius/dify/issues). Melhor para: relatar bugs encontrados no Dify.AI e propor novos recursos. Veja nosso [Guia de Contribuição](./CONTRIBUTING.md). - [Discord](https://discord.gg/FngNHpbcY7). Melhor para: compartilhar suas aplicações e interagir com a comunidade. - [X(Twitter)](https://twitter.com/dify_ai). Melhor para: compartilhar suas aplicações e interagir com a comunidade. @@ -196,4 +198,4 @@ Para proteger sua privacidade, evite postar problemas de segurança no GitHub. E ## Licença -Este repositório está disponível sob a [Licença de Código Aberto Dify](../LICENSE), que é essencialmente Apache 2.0 com algumas restrições adicionais. +Este repositório está disponível sob a [Licença de Código Aberto Dify](../../LICENSE), que é essencialmente Apache 2.0 com algumas restrições adicionais. diff --git a/README/README_SI.md b/docs/sl-SI/README.md similarity index 83% rename from README/README_SI.md rename to docs/sl-SI/README.md index 8cd78c065c..65aedb7703 100644 --- a/README/README_SI.md +++ b/docs/sl-SI/README.md @@ -1,4 +1,4 @@ -![cover-v5-optimized](../images/GitHub_README_if.png) +![cover-v5-optimized](../../images/GitHub_README_if.png)

📌 Predstavljamo nalaganje datotek Dify Workflow: znova ustvarite Google NotebookLM Podcast @@ -36,18 +36,20 @@

- README in English - 简体中文版自述文件 - 日本語のREADME - README en Español - README en Français - README tlhIngan Hol - README in Korean - README بالعربية - Türkçe README - README Tiếng Việt - README Slovenščina - README in বাংলা + README in English + 繁體中文文件 + 简体中文文件 + 日本語のREADME + README en Español + README en Français + README tlhIngan Hol + README in Korean + README بالعربية + Türkçe README + README Tiếng Việt + README Slovenščina + README in Deutsch + README in বাংলা

Dify je odprtokodna platforma za razvoj aplikacij LLM. Njegov intuitivni vmesnik združuje agentski potek dela z umetno inteligenco, cevovod RAG, zmogljivosti agentov, upravljanje modelov, funkcije opazovanja in več, kar vam omogoča hiter prehod od prototipa do proizvodnje. @@ -169,7 +171,7 @@ Z enim klikom namestite Dify v AKS z uporabo [Azure Devops Pipeline Helm Chart b ## Prispevam -Za tiste, ki bi radi prispevali kodo, si oglejte naš vodnik za prispevke . Hkrati vas prosimo, da podprete Dify tako, da ga delite na družbenih medijih ter na dogodkih in konferencah. +Za tiste, ki bi radi prispevali kodo, si oglejte naš [vodnik za prispevke](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). Hkrati vas prosimo, da podprete Dify tako, da ga delite na družbenih medijih ter na dogodkih in konferencah. > Iščemo sodelavce za pomoč pri prevajanju Difyja v jezike, ki niso mandarinščina ali angleščina. Če želite pomagati, si oglejte i18n README za več informacij in nam pustite komentar v global-userskanalu našega strežnika skupnosti Discord . @@ -196,4 +198,4 @@ Zaradi zaščite vaše zasebnosti se izogibajte objavljanju varnostnih vprašanj ## Licenca -To skladišče je na voljo pod [odprtokodno licenco Dify](../LICENSE) , ki je v bistvu Apache 2.0 z nekaj dodatnimi omejitvami. +To skladišče je na voljo pod [odprtokodno licenco Dify](../../LICENSE) , ki je v bistvu Apache 2.0 z nekaj dodatnimi omejitvami. diff --git a/README/README_KL.md b/docs/tlh/README.md similarity index 79% rename from README/README_KL.md rename to docs/tlh/README.md index cae02f56fe..b1e3016efd 100644 --- a/README/README_KL.md +++ b/docs/tlh/README.md @@ -1,4 +1,4 @@ -![cover-v5-optimized](../images/GitHub_README_if.png) +![cover-v5-optimized](../../images/GitHub_README_if.png)

Dify Cloud · @@ -35,17 +35,19 @@

- README in English - 简体中文版自述文件 - 日本語のREADME - README en Español - README en Français - README tlhIngan Hol - README in Korean - README بالعربية - Türkçe README - README Tiếng Việt - README in বাংলা + README in English + 繁體中文文件 + 简体中文文件 + 日本語のREADME + README en Español + README en Français + README tlhIngan Hol + README in Korean + README بالعربية + Türkçe README + README Tiếng Việt + README in Deutsch + README in বাংলা

# @@ -108,7 +110,7 @@ Star Dify on GitHub and be instantly notified of new releases.
-The easiest way to start the Dify server is to run our [docker-compose.yml](docker/docker-compose.yaml) file. Before running the installation command, make sure that [Docker](https://docs.docker.com/get-docker/) and [Docker Compose](https://docs.docker.com/compose/install/) are installed on your machine: +The easiest way to start the Dify server is to run our [docker-compose.yml](../../docker/docker-compose.yaml) file. Before running the installation command, make sure that [Docker](https://docs.docker.com/get-docker/) and [Docker Compose](https://docs.docker.com/compose/install/) are installed on your machine: ```bash cd docker @@ -122,7 +124,7 @@ After running, you can access the Dify dashboard in your browser at [http://loca ## Next steps -If you need to customize the configuration, please refer to the comments in our [.env.example](docker/.env.example) file and update the corresponding values in your `.env` file. Additionally, you might need to make adjustments to the `docker-compose.yaml` file itself, such as changing image versions, port mappings, or volume mounts, based on your specific deployment environment and requirements. After making any changes, please re-run `docker-compose up -d`. You can find the full list of available environment variables [here](https://docs.dify.ai/getting-started/install-self-hosted/environments). +If you need to customize the configuration, please refer to the comments in our [.env.example](../../docker/.env.example) file and update the corresponding values in your `.env` file. Additionally, you might need to make adjustments to the `docker-compose.yaml` file itself, such as changing image versions, port mappings, or volume mounts, based on your specific deployment environment and requirements. After making any changes, please re-run `docker-compose up -d`. You can find the full list of available environment variables [here](https://docs.dify.ai/getting-started/install-self-hosted/environments). If you'd like to configure a highly-available setup, there are community-contributed [Helm Charts](https://helm.sh/) and YAML files which allow Dify to be deployed on Kubernetes. @@ -181,10 +183,7 @@ At the same time, please consider supporting Dify by sharing it on social media ## Community & Contact -- \[GitHub Discussion\](https://github.com/langgenius/dify/discussions - -). Best for: sharing feedback and asking questions. - +- [GitHub Discussion](https://github.com/langgenius/dify/discussions). Best for: sharing feedback and asking questions. - [GitHub Issues](https://github.com/langgenius/dify/issues). Best for: bugs you encounter using Dify.AI, and feature proposals. See our [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). - [Discord](https://discord.gg/FngNHpbcY7). Best for: sharing your applications and hanging out with the community. - [X(Twitter)](https://twitter.com/dify_ai). Best for: sharing your applications and hanging out with the community. @@ -199,4 +198,4 @@ To protect your privacy, please avoid posting security issues on GitHub. Instead ## License -This repository is available under the [Dify Open Source License](../LICENSE), which is essentially Apache 2.0 with a few additional restrictions. +This repository is available under the [Dify Open Source License](../../LICENSE), which is essentially Apache 2.0 with a few additional restrictions. diff --git a/CONTRIBUTING/CONTRIBUTING_TR.md b/docs/tr-TR/CONTRIBUTING.md similarity index 96% rename from CONTRIBUTING/CONTRIBUTING_TR.md rename to docs/tr-TR/CONTRIBUTING.md index 1932a3ab34..59227d31a9 100644 --- a/CONTRIBUTING/CONTRIBUTING_TR.md +++ b/docs/tr-TR/CONTRIBUTING.md @@ -6,7 +6,7 @@ Bulunduğumuz noktada çevik olmamız ve hızlı hareket etmemiz gerekiyor, anca Bu rehber, Dify'ın kendisi gibi, sürekli gelişen bir çalışmadır. Bazen gerçek projenin gerisinde kalırsa anlayışınız için çok minnettarız ve gelişmemize yardımcı olacak her türlü geri bildirimi memnuniyetle karşılıyoruz. -Lisanslama konusunda, lütfen kısa [Lisans ve Katkıda Bulunan Anlaşmamızı](../LICENSE) okumak için bir dakikanızı ayırın. Topluluk ayrıca [davranış kurallarına](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md) da uyar. +Lisanslama konusunda, lütfen kısa [Lisans ve Katkıda Bulunan Anlaşmamızı](../../LICENSE) okumak için bir dakikanızı ayırın. Topluluk ayrıca [davranış kurallarına](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md) da uyar. ## Başlamadan Önce diff --git a/README/README_TR.md b/docs/tr-TR/README.md similarity index 79% rename from README/README_TR.md rename to docs/tr-TR/README.md index 9836c6be61..a044da1f4e 100644 --- a/README/README_TR.md +++ b/docs/tr-TR/README.md @@ -1,4 +1,4 @@ -![cover-v5-optimized](../images/GitHub_README_if.png) +![cover-v5-optimized](../../images/GitHub_README_if.png)

Dify Bulut · @@ -35,17 +35,19 @@

- README in English - 简体中文版自述文件 - 日本語のREADME - README en Español - README en Français - README tlhIngan Hol - README in Korean - README بالعربية - Türkçe README - README Tiếng Việt - README in বাংলা + README in English + 繁體中文文件 + 简体中文文件 + 日本語のREADME + README en Español + README en Français + README tlhIngan Hol + README in Korean + README بالعربية + Türkçe README + README Tiếng Việt + README in Deutsch + README in বাংলা

Dify, açık kaynaklı bir LLM uygulama geliştirme platformudur. Sezgisel arayüzü, AI iş akışı, RAG pipeline'ı, ajan yetenekleri, model yönetimi, gözlemlenebilirlik özellikleri ve daha fazlasını birleştirerek, prototipten üretime hızlıca geçmenizi sağlar. İşte temel özelliklerin bir listesi: @@ -102,7 +104,7 @@ GitHub'da Dify'a yıldız verin ve yeni sürümlerden anında haberdar olun. > - RAM >= 4GB
-Dify sunucusunu başlatmanın en kolay yolu, [docker-compose.yml](docker/docker-compose.yaml) dosyamızı çalıştırmaktır. Kurulum komutunu çalıştırmadan önce, makinenizde [Docker](https://docs.docker.com/get-docker/) ve [Docker Compose](https://docs.docker.com/compose/install/)'un kurulu olduğundan emin olun: +Dify sunucusunu başlatmanın en kolay yolu, [docker-compose.yml](../../docker/docker-compose.yaml) dosyamızı çalıştırmaktır. Kurulum komutunu çalıştırmadan önce, makinenizde [Docker](https://docs.docker.com/get-docker/) ve [Docker Compose](https://docs.docker.com/compose/install/)'un kurulu olduğundan emin olun: ```bash cd docker @@ -116,7 +118,7 @@ docker compose up -d ## Sonraki adımlar -Yapılandırmayı özelleştirmeniz gerekiyorsa, lütfen [.env.example](docker/.env.example) dosyamızdaki yorumlara bakın ve `.env` dosyanızdaki ilgili değerleri güncelleyin. Ayrıca, spesifik dağıtım ortamınıza ve gereksinimlerinize bağlı olarak `docker-compose.yaml` dosyasının kendisinde de, imaj sürümlerini, port eşlemelerini veya hacim bağlantılarını değiştirmek gibi ayarlamalar yapmanız gerekebilir. Herhangi bir değişiklik yaptıktan sonra, lütfen `docker-compose up -d` komutunu tekrar çalıştırın. Kullanılabilir tüm ortam değişkenlerinin tam listesini [burada](https://docs.dify.ai/getting-started/install-self-hosted/environments) bulabilirsiniz. +Yapılandırmayı özelleştirmeniz gerekiyorsa, lütfen [.env.example](../../docker/.env.example) dosyamızdaki yorumlara bakın ve `.env` dosyanızdaki ilgili değerleri güncelleyin. Ayrıca, spesifik dağıtım ortamınıza ve gereksinimlerinize bağlı olarak `docker-compose.yaml` dosyasının kendisinde de, imaj sürümlerini, port eşlemelerini veya hacim bağlantılarını değiştirmek gibi ayarlamalar yapmanız gerekebilir. Herhangi bir değişiklik yaptıktan sonra, lütfen `docker-compose up -d` komutunu tekrar çalıştırın. Kullanılabilir tüm ortam değişkenlerinin tam listesini [burada](https://docs.dify.ai/getting-started/install-self-hosted/environments) bulabilirsiniz. Yüksek kullanılabilirliğe sahip bir kurulum yapılandırmak isterseniz, Dify'ın Kubernetes üzerine dağıtılmasına olanak tanıyan topluluk katkılı [Helm Charts](https://helm.sh/) ve YAML dosyaları mevcuttur. @@ -161,7 +163,7 @@ Dify'ı bulut platformuna tek tıklamayla dağıtın [terraform](https://www.ter ## Katkıda Bulunma -Kod katkısında bulunmak isteyenler için [Katkı Kılavuzumuza](https://github.com/langgenius/dify/blob/main/CONTRIBUTING/CONTRIBUTING_TR.md) bakabilirsiniz. +Kod katkısında bulunmak isteyenler için [Katkı Kılavuzumuza](./CONTRIBUTING.md) bakabilirsiniz. Aynı zamanda, lütfen Dify'ı sosyal medyada, etkinliklerde ve konferanslarda paylaşarak desteklemeyi düşünün. > Dify'ı Mandarin veya İngilizce dışındaki dillere çevirmemize yardımcı olacak katkıda bulunanlara ihtiyacımız var. Yardımcı olmakla ilgileniyorsanız, lütfen daha fazla bilgi için [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) dosyasına bakın ve [Discord Topluluk Sunucumuzdaki](https://discord.gg/8Tpq4AcN9c) `global-users` kanalında bize bir yorum bırakın. @@ -175,7 +177,7 @@ Aynı zamanda, lütfen Dify'ı sosyal medyada, etkinliklerde ve konferanslarda p ## Topluluk & iletişim - [GitHub Tartışmaları](https://github.com/langgenius/dify/discussions). En uygun: geri bildirim paylaşmak ve soru sormak için. -- [GitHub Sorunları](https://github.com/langgenius/dify/issues). En uygun: Dify.AI kullanırken karşılaştığınız hatalar ve özellik önerileri için. [Katkı Kılavuzumuza](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) bakın. +- [GitHub Sorunları](https://github.com/langgenius/dify/issues). En uygun: Dify.AI kullanırken karşılaştığınız hatalar ve özellik önerileri için. [Katkı Kılavuzumuza](./CONTRIBUTING.md) bakın. - [Discord](https://discord.gg/FngNHpbcY7). En uygun: uygulamalarınızı paylaşmak ve toplulukla vakit geçirmek için. - [X(Twitter)](https://twitter.com/dify_ai). En uygun: uygulamalarınızı paylaşmak ve toplulukla vakit geçirmek için. @@ -189,4 +191,4 @@ Gizliliğinizi korumak için, lütfen güvenlik sorunlarını GitHub'da paylaşm ## Lisans -Bu depo, temel olarak Apache 2.0 lisansı ve birkaç ek kısıtlama içeren [Dify Açık Kaynak Lisansı](../LICENSE) altında kullanıma sunulmuştur. +Bu depo, temel olarak Apache 2.0 lisansı ve birkaç ek kısıtlama içeren [Dify Açık Kaynak Lisansı](../../LICENSE) altında kullanıma sunulmuştur. diff --git a/CONTRIBUTING/CONTRIBUTING_VI.md b/docs/vi-VN/CONTRIBUTING.md similarity index 96% rename from CONTRIBUTING/CONTRIBUTING_VI.md rename to docs/vi-VN/CONTRIBUTING.md index b9844c4869..fa1d875f83 100644 --- a/CONTRIBUTING/CONTRIBUTING_VI.md +++ b/docs/vi-VN/CONTRIBUTING.md @@ -6,7 +6,7 @@ Chúng tôi cần phải nhanh nhẹn và triển khai nhanh chóng, nhưng cũn Hướng dẫn này, giống như Dify, đang được phát triển liên tục. Chúng tôi rất cảm kích sự thông cảm của bạn nếu đôi khi nó chưa theo kịp dự án thực tế, và hoan nghênh mọi phản hồi để cải thiện. -Về giấy phép, vui lòng dành chút thời gian đọc [Thỏa thuận Cấp phép và Người đóng góp](../LICENSE) ngắn gọn của chúng tôi. Cộng đồng cũng tuân theo [quy tắc ứng xử](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md). +Về giấy phép, vui lòng dành chút thời gian đọc [Thỏa thuận Cấp phép và Người đóng góp](../../LICENSE) ngắn gọn của chúng tôi. Cộng đồng cũng tuân theo [quy tắc ứng xử](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md). ## Trước khi bắt đầu diff --git a/README/README_VI.md b/docs/vi-VN/README.md similarity index 80% rename from README/README_VI.md rename to docs/vi-VN/README.md index 22d74eb31d..847641da12 100644 --- a/README/README_VI.md +++ b/docs/vi-VN/README.md @@ -1,4 +1,4 @@ -![cover-v5-optimized](../images/GitHub_README_if.png) +![cover-v5-optimized](../../images/GitHub_README_if.png)

Dify Cloud · @@ -35,17 +35,19 @@

- README in English - 简体中文版自述文件 - 日本語のREADME - README en Español - README en Français - README tlhIngan Hol - README in Korean - README بالعربية - Türkçe README - README Tiếng Việt - README in বাংলা + README in English + 繁體中文文件 + 简体中文文件 + 日本語のREADME + README en Español + README en Français + README tlhIngan Hol + README in Korean + README بالعربية + Türkçe README + README Tiếng Việt + README in Deutsch + README in বাংলা

Dify là một nền tảng phát triển ứng dụng LLM mã nguồn mở. Giao diện trực quan kết hợp quy trình làm việc AI, mô hình RAG, khả năng tác nhân, quản lý mô hình, tính năng quan sát và hơn thế nữa, cho phép bạn nhanh chóng chuyển từ nguyên mẫu sang sản phẩm. Đây là danh sách các tính năng cốt lõi: @@ -103,7 +105,7 @@ Yêu thích Dify trên GitHub và được thông báo ngay lập tức về cá
-Cách dễ nhất để khởi động máy chủ Dify là chạy tệp [docker-compose.yml](docker/docker-compose.yaml) của chúng tôi. Trước khi chạy lệnh cài đặt, hãy đảm bảo rằng [Docker](https://docs.docker.com/get-docker/) và [Docker Compose](https://docs.docker.com/compose/install/) đã được cài đặt trên máy của bạn: +Cách dễ nhất để khởi động máy chủ Dify là chạy tệp [docker-compose.yml](../../docker/docker-compose.yaml) của chúng tôi. Trước khi chạy lệnh cài đặt, hãy đảm bảo rằng [Docker](https://docs.docker.com/get-docker/) và [Docker Compose](https://docs.docker.com/compose/install/) đã được cài đặt trên máy của bạn: ```bash cd docker @@ -117,7 +119,7 @@ Sau khi chạy, bạn có thể truy cập bảng điều khiển Dify trong tr ## Các bước tiếp theo -Nếu bạn cần tùy chỉnh cấu hình, vui lòng tham khảo các nhận xét trong tệp [.env.example](docker/.env.example) của chúng tôi và cập nhật các giá trị tương ứng trong tệp `.env` của bạn. Ngoài ra, bạn có thể cần điều chỉnh tệp `docker-compose.yaml`, chẳng hạn như thay đổi phiên bản hình ảnh, ánh xạ cổng hoặc gắn kết khối lượng, dựa trên môi trường triển khai cụ thể và yêu cầu của bạn. Sau khi thực hiện bất kỳ thay đổi nào, vui lòng chạy lại `docker-compose up -d`. Bạn có thể tìm thấy danh sách đầy đủ các biến môi trường có sẵn [tại đây](https://docs.dify.ai/getting-started/install-self-hosted/environments). +Nếu bạn cần tùy chỉnh cấu hình, vui lòng tham khảo các nhận xét trong tệp [.env.example](../../docker/.env.example) của chúng tôi và cập nhật các giá trị tương ứng trong tệp `.env` của bạn. Ngoài ra, bạn có thể cần điều chỉnh tệp `docker-compose.yaml`, chẳng hạn như thay đổi phiên bản hình ảnh, ánh xạ cổng hoặc gắn kết khối lượng, dựa trên môi trường triển khai cụ thể và yêu cầu của bạn. Sau khi thực hiện bất kỳ thay đổi nào, vui lòng chạy lại `docker-compose up -d`. Bạn có thể tìm thấy danh sách đầy đủ các biến môi trường có sẵn [tại đây](https://docs.dify.ai/getting-started/install-self-hosted/environments). Nếu bạn muốn cấu hình một cài đặt có độ sẵn sàng cao, có các [Helm Charts](https://helm.sh/) và tệp YAML do cộng đồng đóng góp cho phép Dify được triển khai trên Kubernetes. @@ -162,7 +164,7 @@ Triển khai Dify lên AKS chỉ với một cú nhấp chuột bằng [Azure De ## Đóng góp -Đối với những người muốn đóng góp mã, xem [Hướng dẫn Đóng góp](https://github.com/langgenius/dify/blob/main/CONTRIBUTING/CONTRIBUTING_VI.md) của chúng tôi. +Đối với những người muốn đóng góp mã, xem [Hướng dẫn Đóng góp](./CONTRIBUTING.md) của chúng tôi. Đồng thời, vui lòng xem xét hỗ trợ Dify bằng cách chia sẻ nó trên mạng xã hội và tại các sự kiện và hội nghị. > Chúng tôi đang tìm kiếm người đóng góp để giúp dịch Dify sang các ngôn ngữ khác ngoài tiếng Trung hoặc tiếng Anh. Nếu bạn quan tâm đến việc giúp đỡ, vui lòng xem [README i18n](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) để biết thêm thông tin và để lại bình luận cho chúng tôi trong kênh `global-users` của [Máy chủ Cộng đồng Discord](https://discord.gg/8Tpq4AcN9c) của chúng tôi. @@ -176,7 +178,7 @@ Triển khai Dify lên AKS chỉ với một cú nhấp chuột bằng [Azure De ## Cộng đồng & liên hệ - [Thảo luận GitHub](https://github.com/langgenius/dify/discussions). Tốt nhất cho: chia sẻ phản hồi và đặt câu hỏi. -- [Vấn đề GitHub](https://github.com/langgenius/dify/issues). Tốt nhất cho: lỗi bạn gặp phải khi sử dụng Dify.AI và đề xuất tính năng. Xem [Hướng dẫn Đóng góp](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) của chúng tôi. +- [Vấn đề GitHub](https://github.com/langgenius/dify/issues). Tốt nhất cho: lỗi bạn gặp phải khi sử dụng Dify.AI và đề xuất tính năng. Xem [Hướng dẫn Đóng góp](./CONTRIBUTING.md) của chúng tôi. - [Discord](https://discord.gg/FngNHpbcY7). Tốt nhất cho: chia sẻ ứng dụng của bạn và giao lưu với cộng đồng. - [X(Twitter)](https://twitter.com/dify_ai). Tốt nhất cho: chia sẻ ứng dụng của bạn và giao lưu với cộng đồng. @@ -190,4 +192,4 @@ Triển khai Dify lên AKS chỉ với một cú nhấp chuột bằng [Azure De ## Giấy phép -Kho lưu trữ này có sẵn theo [Giấy phép Mã nguồn Mở Dify](../LICENSE), về cơ bản là Apache 2.0 với một vài hạn chế bổ sung. +Kho lưu trữ này có sẵn theo [Giấy phép Mã nguồn Mở Dify](../../LICENSE), về cơ bản là Apache 2.0 với một vài hạn chế bổ sung. diff --git a/CONTRIBUTING/CONTRIBUTING_CN.md b/docs/zh-CN/CONTRIBUTING.md similarity index 96% rename from CONTRIBUTING/CONTRIBUTING_CN.md rename to docs/zh-CN/CONTRIBUTING.md index 8c52d8939c..5b71467804 100644 --- a/CONTRIBUTING/CONTRIBUTING_CN.md +++ b/docs/zh-CN/CONTRIBUTING.md @@ -6,7 +6,7 @@ 本指南和 Dify 一样在不断完善中。如果有任何滞后于项目实际情况的地方,恳请谅解,我们也欢迎任何改进建议。 -关于许可证,请花一分钟阅读我们简短的[许可和贡献者协议](../LICENSE)。同时也请遵循社区[行为准则](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md)。 +关于许可证,请花一分钟阅读我们简短的[许可和贡献者协议](../../LICENSE)。同时也请遵循社区[行为准则](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md)。 ## 开始之前 diff --git a/README/README_CN.md b/docs/zh-CN/README.md similarity index 79% rename from README/README_CN.md rename to docs/zh-CN/README.md index 9501992bd2..202b99a6b1 100644 --- a/README/README_CN.md +++ b/docs/zh-CN/README.md @@ -1,4 +1,4 @@ -![cover-v5-optimized](../images/GitHub_README_if.png) +![cover-v5-optimized](../../images/GitHub_README_if.png)
Dify 云服务 · @@ -35,17 +35,19 @@

- README in English - 简体中文版自述文件 - 日本語のREADME - README en Español - README en Français - README tlhIngan Hol - README in Korean - README بالعربية - Türkçe README - README Tiếng Việt - README in বাংলা + README in English + 繁體中文文件 + 简体中文文件 + 日本語のREADME + README en Español + README en Français + README tlhIngan Hol + README in Korean + README بالعربية + Türkçe README + README Tiếng Việt + README in Deutsch + README in বাংলা
# @@ -111,7 +113,7 @@ Dify 是一个开源的 LLM 应用开发平台。其直观的界面结合了 AI ### 快速启动 -启动 Dify 服务器的最简单方法是运行我们的 [docker-compose.yml](docker/docker-compose.yaml) 文件。在运行安装命令之前,请确保您的机器上安装了 [Docker](https://docs.docker.com/get-docker/) 和 [Docker Compose](https://docs.docker.com/compose/install/): +启动 Dify 服务器的最简单方法是运行我们的 [docker-compose.yml](../../docker/docker-compose.yaml) 文件。在运行安装命令之前,请确保您的机器上安装了 [Docker](https://docs.docker.com/get-docker/) 和 [Docker Compose](https://docs.docker.com/compose/install/): ```bash cd docker @@ -123,7 +125,7 @@ docker compose up -d ### 自定义配置 -如果您需要自定义配置,请参考 [.env.example](docker/.env.example) 文件中的注释,并更新 `.env` 文件中对应的值。此外,您可能需要根据您的具体部署环境和需求对 `docker-compose.yaml` 文件本身进行调整,例如更改镜像版本、端口映射或卷挂载。完成任何更改后,请重新运行 `docker-compose up -d`。您可以在[此处](https://docs.dify.ai/getting-started/install-self-hosted/environments)找到可用环境变量的完整列表。 +如果您需要自定义配置,请参考 [.env.example](../../docker/.env.example) 文件中的注释,并更新 `.env` 文件中对应的值。此外,您可能需要根据您的具体部署环境和需求对 `docker-compose.yaml` 文件本身进行调整,例如更改镜像版本、端口映射或卷挂载。完成任何更改后,请重新运行 `docker-compose up -d`。您可以在[此处](https://docs.dify.ai/getting-started/install-self-hosted/environments)找到可用环境变量的完整列表。 #### 使用 Helm Chart 或 Kubernetes 资源清单(YAML)部署 @@ -180,7 +182,7 @@ docker compose up -d ## Contributing -对于那些想要贡献代码的人,请参阅我们的[贡献指南](https://github.com/langgenius/dify/blob/main/CONTRIBUTING/CONTRIBUTING_CN.md)。 +对于那些想要贡献代码的人,请参阅我们的[贡献指南](./CONTRIBUTING.md)。 同时,请考虑通过社交媒体、活动和会议来支持 Dify 的分享。 > 我们正在寻找贡献者来帮助将 Dify 翻译成除了中文和英文之外的其他语言。如果您有兴趣帮助,请参阅我们的[i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md)获取更多信息,并在我们的[Discord 社区服务器](https://discord.gg/8Tpq4AcN9c)的`global-users`频道中留言。 @@ -196,7 +198,7 @@ docker compose up -d 我们欢迎您为 Dify 做出贡献,以帮助改善 Dify。包括:提交代码、问题、新想法,或分享您基于 Dify 创建的有趣且有用的 AI 应用程序。同时,我们也欢迎您在不同的活动、会议和社交媒体上分享 Dify。 - [GitHub Discussion](https://github.com/langgenius/dify/discussions). 👉:分享您的应用程序并与社区交流。 -- [GitHub Issues](https://github.com/langgenius/dify/issues)。👉:使用 Dify.AI 时遇到的错误和问题,请参阅[贡献指南](../CONTRIBUTING.md)。 +- [GitHub Issues](https://github.com/langgenius/dify/issues)。👉:使用 Dify.AI 时遇到的错误和问题,请参阅[贡献指南](./CONTRIBUTING.md)。 - [电子邮件支持](mailto:hello@dify.ai?subject=%5BGitHub%5DQuestions%20About%20Dify)。👉:关于使用 Dify.AI 的问题。 - [Discord](https://discord.gg/FngNHpbcY7)。👉:分享您的应用程序并与社区交流。 - [X(Twitter)](https://twitter.com/dify_ai)。👉:分享您的应用程序并与社区交流。 @@ -208,4 +210,4 @@ docker compose up -d ## License -本仓库遵循 [Dify Open Source License](../LICENSE) 开源协议,该许可证本质上是 Apache 2.0,但有一些额外的限制。 +本仓库遵循 [Dify Open Source License](../../LICENSE) 开源协议,该许可证本质上是 Apache 2.0,但有一些额外的限制。 diff --git a/CONTRIBUTING/CONTRIBUTING_TW.md b/docs/zh-TW/CONTRIBUTING.md similarity index 96% rename from CONTRIBUTING/CONTRIBUTING_TW.md rename to docs/zh-TW/CONTRIBUTING.md index 7fba220a22..1d5f02efa1 100644 --- a/CONTRIBUTING/CONTRIBUTING_TW.md +++ b/docs/zh-TW/CONTRIBUTING.md @@ -6,7 +6,7 @@ 這份指南與 Dify 一樣,都在持續完善中。如果指南內容有落後於實際專案的情況,還請見諒,也歡迎提供改進建議。 -關於授權部分,請花點時間閱讀我們簡短的[授權和貢獻者協議](../LICENSE)。社群也需遵守[行為準則](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md)。 +關於授權部分,請花點時間閱讀我們簡短的[授權和貢獻者協議](../../LICENSE)。社群也需遵守[行為準則](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md)。 ## 開始之前 diff --git a/README/README_TW.md b/docs/zh-TW/README.md similarity index 80% rename from README/README_TW.md rename to docs/zh-TW/README.md index b9c0b81246..526e8d9c8c 100644 --- a/README/README_TW.md +++ b/docs/zh-TW/README.md @@ -1,4 +1,4 @@ -![cover-v5-optimized](../images/GitHub_README_if.png) +![cover-v5-optimized](../../images/GitHub_README_if.png)

📌 介紹 Dify 工作流程檔案上傳功能:重現 Google NotebookLM Podcast @@ -39,18 +39,18 @@

- README in English - 繁體中文文件 - 简体中文版自述文件 - 日本語のREADME - README en Español - README en Français - README tlhIngan Hol - README in Korean - README بالعربية - Türkçe README - README Tiếng Việt - README in Deutsch + README in English + 繁體中文文件 + 简体中文文件 + 日本語のREADME + README en Español + README en Français + README tlhIngan Hol + README in Korean + README بالعربية + Türkçe README + README Tiếng Việt + README in Deutsch

Dify 是一個開源的 LLM 應用程式開發平台。其直觀的界面結合了智能代理工作流程、RAG 管道、代理功能、模型管理、可觀察性功能等,讓您能夠快速從原型進展到生產環境。 @@ -64,7 +64,7 @@ Dify 是一個開源的 LLM 應用程式開發平台。其直觀的界面結合
-啟動 Dify 伺服器最簡單的方式是透過 [docker compose](docker/docker-compose.yaml)。在使用以下命令運行 Dify 之前,請確保您的機器已安裝 [Docker](https://docs.docker.com/get-docker/) 和 [Docker Compose](https://docs.docker.com/compose/install/): +啟動 Dify 伺服器最簡單的方式是透過 [docker compose](../../docker/docker-compose.yaml)。在使用以下命令運行 Dify 之前,請確保您的機器已安裝 [Docker](https://docs.docker.com/get-docker/) 和 [Docker Compose](https://docs.docker.com/compose/install/): ```bash cd dify @@ -128,7 +128,7 @@ Dify 的所有功能都提供相應的 API,因此您可以輕鬆地將 Dify ## 進階設定 -如果您需要自定義配置,請參考我們的 [.env.example](docker/.env.example) 文件中的註釋,並在您的 `.env` 文件中更新相應的值。此外,根據您特定的部署環境和需求,您可能需要調整 `docker-compose.yaml` 文件本身,例如更改映像版本、端口映射或卷掛載。進行任何更改後,請重新運行 `docker-compose up -d`。您可以在[這裡](https://docs.dify.ai/getting-started/install-self-hosted/environments)找到可用環境變數的完整列表。 +如果您需要自定義配置,請參考我們的 [.env.example](../../docker/.env.example) 文件中的註釋,並在您的 `.env` 文件中更新相應的值。此外,根據您特定的部署環境和需求,您可能需要調整 `docker-compose.yaml` 文件本身,例如更改映像版本、端口映射或卷掛載。進行任何更改後,請重新運行 `docker-compose up -d`。您可以在[這裡](https://docs.dify.ai/getting-started/install-self-hosted/environments)找到可用環境變數的完整列表。 如果您想配置高可用性設置,社區貢獻的 [Helm Charts](https://helm.sh/) 和 Kubernetes 資源清單(YAML)允許在 Kubernetes 上部署 Dify。 @@ -173,7 +173,7 @@ Dify 的所有功能都提供相應的 API,因此您可以輕鬆地將 Dify ## 貢獻 -對於想要貢獻程式碼的開發者,請參閱我們的[貢獻指南](https://github.com/langgenius/dify/blob/main/CONTRIBUTING/CONTRIBUTING_TW.md)。 +對於想要貢獻程式碼的開發者,請參閱我們的[貢獻指南](./CONTRIBUTING.md)。 同時,也請考慮透過在社群媒體和各種活動與會議上分享 Dify 來支持我們。 > 我們正在尋找貢獻者協助將 Dify 翻譯成中文和英文以外的語言。如果您有興趣幫忙,請查看 [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) 獲取更多資訊,並在我們的 [Discord 社群伺服器](https://discord.gg/8Tpq4AcN9c) 的 `global-users` 頻道留言給我們。 @@ -181,7 +181,7 @@ Dify 的所有功能都提供相應的 API,因此您可以輕鬆地將 Dify ## 社群與聯絡方式 - [GitHub Discussion](https://github.com/langgenius/dify/discussions):最適合分享反饋和提問。 -- [GitHub Issues](https://github.com/langgenius/dify/issues):最適合報告使用 Dify.AI 時遇到的問題和提出功能建議。請參閱我們的[貢獻指南](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md)。 +- [GitHub Issues](https://github.com/langgenius/dify/issues):最適合報告使用 Dify.AI 時遇到的問題和提出功能建議。請參閱我們的[貢獻指南](./CONTRIBUTING.md)。 - [Discord](https://discord.gg/FngNHpbcY7):最適合分享您的應用程式並與社群互動。 - [X(Twitter)](https://twitter.com/dify_ai):最適合分享您的應用程式並與社群互動。 @@ -201,4 +201,4 @@ Dify 的所有功能都提供相應的 API,因此您可以輕鬆地將 Dify ## 授權條款 -本代碼庫採用 [Dify 開源授權](../LICENSE),這基本上是 Apache 2.0 授權加上一些額外限制條款。 +本代碼庫採用 [Dify 開源授權](../../LICENSE),這基本上是 Apache 2.0 授權加上一些額外限制條款。 From 22f64d60bbe69be584935cad32509244c1e62905 Mon Sep 17 00:00:00 2001 From: GuanMu Date: Sun, 5 Oct 2025 12:49:41 +0800 Subject: [PATCH 05/31] chore: update Dockerfile to use Python 3.12-bookworm and refactor layout logic to utilize ELK for improved node layout (#26522) --- .../hooks/use-workflow-interactions.ts | 133 +++-- .../components/workflow/utils/dagre-layout.ts | 246 -------- web/app/components/workflow/utils/index.ts | 2 +- web/app/components/workflow/utils/layout.ts | 529 ++++++++++++++++++ 4 files changed, 593 insertions(+), 317 deletions(-) delete mode 100644 web/app/components/workflow/utils/dagre-layout.ts create mode 100644 web/app/components/workflow/utils/layout.ts diff --git a/web/app/components/workflow/hooks/use-workflow-interactions.ts b/web/app/components/workflow/hooks/use-workflow-interactions.ts index c508eea0ba..f63250dd42 100644 --- a/web/app/components/workflow/hooks/use-workflow-interactions.ts +++ b/web/app/components/workflow/hooks/use-workflow-interactions.ts @@ -10,7 +10,7 @@ import { NODE_LAYOUT_VERTICAL_PADDING, WORKFLOW_DATA_UPDATE, } from '../constants' -import type { Node, WorkflowDataUpdater } from '../types' +import type { WorkflowDataUpdater } from '../types' import { BlockEnum, ControlMode } from '../types' import { getLayoutByDagre, @@ -18,6 +18,7 @@ import { initialEdges, initialNodes, } from '../utils' +import type { LayoutResult } from '../utils' import { useNodesReadOnly, useSelectionInteractions, @@ -102,10 +103,17 @@ export const useWorkflowOrganize = () => { && node.type === CUSTOM_NODE, ) - const childLayoutsMap: Record = {} - loopAndIterationNodes.forEach((node) => { - childLayoutsMap[node.id] = getLayoutForChildNodes(node.id, nodes, edges) - }) + const childLayoutEntries = await Promise.all( + loopAndIterationNodes.map(async node => [ + node.id, + await getLayoutForChildNodes(node.id, nodes, edges), + ] as const), + ) + const childLayoutsMap = childLayoutEntries.reduce((acc, [nodeId, layout]) => { + if (layout) + acc[nodeId] = layout + return acc + }, {} as Record) const containerSizeChanges: Record = {} @@ -113,37 +121,20 @@ export const useWorkflowOrganize = () => { const childLayout = childLayoutsMap[parentNode.id] if (!childLayout) return - let minX = Infinity - let minY = Infinity - let maxX = -Infinity - let maxY = -Infinity - let hasChildren = false + const { + bounds, + nodes: layoutNodes, + } = childLayout - const childNodes = nodes.filter(node => node.parentId === parentNode.id) + if (!layoutNodes.size) + return - childNodes.forEach((node) => { - if (childLayout.node(node.id)) { - hasChildren = true - const childNodeWithPosition = childLayout.node(node.id) + const requiredWidth = (bounds.maxX - bounds.minX) + NODE_LAYOUT_HORIZONTAL_PADDING * 2 + const requiredHeight = (bounds.maxY - bounds.minY) + NODE_LAYOUT_VERTICAL_PADDING * 2 - const nodeX = childNodeWithPosition.x - node.width! / 2 - const nodeY = childNodeWithPosition.y - node.height! / 2 - - minX = Math.min(minX, nodeX) - minY = Math.min(minY, nodeY) - maxX = Math.max(maxX, nodeX + node.width!) - maxY = Math.max(maxY, nodeY + node.height!) - } - }) - - if (hasChildren) { - const requiredWidth = maxX - minX + NODE_LAYOUT_HORIZONTAL_PADDING * 2 - const requiredHeight = maxY - minY + NODE_LAYOUT_VERTICAL_PADDING * 2 - - containerSizeChanges[parentNode.id] = { - width: Math.max(parentNode.width || 0, requiredWidth), - height: Math.max(parentNode.height || 0, requiredHeight), - } + containerSizeChanges[parentNode.id] = { + width: Math.max(parentNode.width || 0, requiredWidth), + height: Math.max(parentNode.height || 0, requiredHeight), } }) @@ -166,63 +157,65 @@ export const useWorkflowOrganize = () => { }) }) - const layout = getLayoutByDagre(nodesWithUpdatedSizes, edges) + const layout = await getLayoutByDagre(nodesWithUpdatedSizes, edges) - const rankMap = {} as Record - nodesWithUpdatedSizes.forEach((node) => { - if (!node.parentId && node.type === CUSTOM_NODE) { - const rank = layout.node(node.id).rank! - - if (!rankMap[rank]) { - rankMap[rank] = node - } - else { - if (rankMap[rank].position.y > node.position.y) - rankMap[rank] = node + // Build layer map for vertical alignment - nodes in the same layer should align + const layerMap = new Map() + layout.nodes.forEach((layoutInfo) => { + if (layoutInfo.layer !== undefined) { + const existing = layerMap.get(layoutInfo.layer) + const newLayerInfo = { + minY: existing ? Math.min(existing.minY, layoutInfo.y) : layoutInfo.y, + maxHeight: existing ? Math.max(existing.maxHeight, layoutInfo.height) : layoutInfo.height, } + layerMap.set(layoutInfo.layer, newLayerInfo) } }) const newNodes = produce(nodesWithUpdatedSizes, (draft) => { draft.forEach((node) => { if (!node.parentId && node.type === CUSTOM_NODE) { - const nodeWithPosition = layout.node(node.id) + const layoutInfo = layout.nodes.get(node.id) + if (!layoutInfo) + return + + // Calculate vertical position with layer alignment + let yPosition = layoutInfo.y + if (layoutInfo.layer !== undefined) { + const layerInfo = layerMap.get(layoutInfo.layer) + if (layerInfo) { + // Align to the center of the tallest node in this layer + const layerCenterY = layerInfo.minY + layerInfo.maxHeight / 2 + yPosition = layerCenterY - layoutInfo.height / 2 + } + } node.position = { - x: nodeWithPosition.x - node.width! / 2, - y: nodeWithPosition.y - node.height! / 2 + rankMap[nodeWithPosition.rank!].height! / 2, + x: layoutInfo.x, + y: yPosition, } } }) loopAndIterationNodes.forEach((parentNode) => { const childLayout = childLayoutsMap[parentNode.id] - if (!childLayout) return + if (!childLayout) + return const childNodes = draft.filter(node => node.parentId === parentNode.id) + const { + bounds, + nodes: layoutNodes, + } = childLayout - let minX = Infinity - let minY = Infinity + childNodes.forEach((childNode) => { + const layoutInfo = layoutNodes.get(childNode.id) + if (!layoutInfo) + return - childNodes.forEach((node) => { - if (childLayout.node(node.id)) { - const childNodeWithPosition = childLayout.node(node.id) - const nodeX = childNodeWithPosition.x - node.width! / 2 - const nodeY = childNodeWithPosition.y - node.height! / 2 - - minX = Math.min(minX, nodeX) - minY = Math.min(minY, nodeY) - } - }) - - childNodes.forEach((node) => { - if (childLayout.node(node.id)) { - const childNodeWithPosition = childLayout.node(node.id) - - node.position = { - x: NODE_LAYOUT_HORIZONTAL_PADDING + (childNodeWithPosition.x - node.width! / 2 - minX), - y: NODE_LAYOUT_VERTICAL_PADDING + (childNodeWithPosition.y - node.height! / 2 - minY), - } + childNode.position = { + x: NODE_LAYOUT_HORIZONTAL_PADDING + (layoutInfo.x - bounds.minX), + y: NODE_LAYOUT_VERTICAL_PADDING + (layoutInfo.y - bounds.minY), } }) }) diff --git a/web/app/components/workflow/utils/dagre-layout.ts b/web/app/components/workflow/utils/dagre-layout.ts deleted file mode 100644 index 5eafe77586..0000000000 --- a/web/app/components/workflow/utils/dagre-layout.ts +++ /dev/null @@ -1,246 +0,0 @@ -import dagre from '@dagrejs/dagre' -import { - cloneDeep, -} from 'lodash-es' -import type { - Edge, - Node, -} from '../types' -import { - BlockEnum, -} from '../types' -import { - CUSTOM_NODE, - NODE_LAYOUT_HORIZONTAL_PADDING, - NODE_LAYOUT_MIN_DISTANCE, - NODE_LAYOUT_VERTICAL_PADDING, -} from '../constants' -import { CUSTOM_ITERATION_START_NODE } from '../nodes/iteration-start/constants' -import { CUSTOM_LOOP_START_NODE } from '../nodes/loop-start/constants' - -export const getLayoutByDagre = (originNodes: Node[], originEdges: Edge[]) => { - const dagreGraph = new dagre.graphlib.Graph({ compound: true }) - dagreGraph.setDefaultEdgeLabel(() => ({})) - - const nodes = cloneDeep(originNodes).filter(node => !node.parentId && node.type === CUSTOM_NODE) - const edges = cloneDeep(originEdges).filter(edge => (!edge.data?.isInIteration && !edge.data?.isInLoop)) - -// The default dagre layout algorithm often fails to correctly order the branches -// of an If/Else node, leading to crossed edges. -// -// To solve this, we employ a "virtual container" strategy: -// 1. A virtual, compound parent node (the "container") is created for each If/Else node's branches. -// 2. Each direct child of the If/Else node is preceded by a virtual dummy node. These dummies are placed inside the container. -// 3. A rigid, sequential chain of invisible edges is created between these dummy nodes (e.g., dummy_IF -> dummy_ELIF -> dummy_ELSE). -// -// This forces dagre to treat the ordered branches as an unbreakable, atomic group, -// ensuring their layout respects the intended logical sequence. - const ifElseNodes = nodes.filter(node => node.data.type === BlockEnum.IfElse) - let virtualLogicApplied = false - - ifElseNodes.forEach((ifElseNode) => { - const childEdges = edges.filter(e => e.source === ifElseNode.id) - if (childEdges.length <= 1) - return - - virtualLogicApplied = true - const sortedChildEdges = childEdges.sort((edgeA, edgeB) => { - const handleA = edgeA.sourceHandle - const handleB = edgeB.sourceHandle - - if (handleA && handleB) { - const cases = (ifElseNode.data as any).cases || [] - const isAElse = handleA === 'false' - const isBElse = handleB === 'false' - - if (isAElse) return 1 - if (isBElse) return -1 - - const indexA = cases.findIndex((c: any) => c.case_id === handleA) - const indexB = cases.findIndex((c: any) => c.case_id === handleB) - - if (indexA !== -1 && indexB !== -1) - return indexA - indexB - } - return 0 - }) - - const parentDummyId = `dummy-parent-${ifElseNode.id}` - dagreGraph.setNode(parentDummyId, { width: 1, height: 1 }) - - const dummyNodes: string[] = [] - sortedChildEdges.forEach((edge) => { - const dummyNodeId = `dummy-${edge.source}-${edge.target}` - dummyNodes.push(dummyNodeId) - dagreGraph.setNode(dummyNodeId, { width: 1, height: 1 }) - dagreGraph.setParent(dummyNodeId, parentDummyId) - - const edgeIndex = edges.findIndex(e => e.id === edge.id) - if (edgeIndex > -1) - edges.splice(edgeIndex, 1) - - edges.push({ id: `e-${edge.source}-${dummyNodeId}`, source: edge.source, target: dummyNodeId, sourceHandle: edge.sourceHandle } as Edge) - edges.push({ id: `e-${dummyNodeId}-${edge.target}`, source: dummyNodeId, target: edge.target, targetHandle: edge.targetHandle } as Edge) - }) - - for (let i = 0; i < dummyNodes.length - 1; i++) { - const sourceDummy = dummyNodes[i] - const targetDummy = dummyNodes[i + 1] - edges.push({ id: `e-dummy-${sourceDummy}-${targetDummy}`, source: sourceDummy, target: targetDummy } as Edge) - } - }) - - dagreGraph.setGraph({ - rankdir: 'LR', - align: 'UL', - nodesep: 40, - ranksep: virtualLogicApplied ? 30 : 60, - ranker: 'tight-tree', - marginx: 30, - marginy: 200, - }) - - nodes.forEach((node) => { - dagreGraph.setNode(node.id, { - width: node.width!, - height: node.height!, - }) - }) - edges.forEach((edge) => { - dagreGraph.setEdge(edge.source, edge.target) - }) - dagre.layout(dagreGraph) - return dagreGraph -} - -export const getLayoutForChildNodes = (parentNodeId: string, originNodes: Node[], originEdges: Edge[]) => { - const dagreGraph = new dagre.graphlib.Graph() - dagreGraph.setDefaultEdgeLabel(() => ({})) - - const nodes = cloneDeep(originNodes).filter(node => node.parentId === parentNodeId) - const edges = cloneDeep(originEdges).filter(edge => - (edge.data?.isInIteration && edge.data?.iteration_id === parentNodeId) - || (edge.data?.isInLoop && edge.data?.loop_id === parentNodeId), - ) - - const startNode = nodes.find(node => - node.type === CUSTOM_ITERATION_START_NODE - || node.type === CUSTOM_LOOP_START_NODE - || node.data?.type === BlockEnum.LoopStart - || node.data?.type === BlockEnum.IterationStart, - ) - - if (!startNode) { - dagreGraph.setGraph({ - rankdir: 'LR', - align: 'UL', - nodesep: 40, - ranksep: 60, - marginx: NODE_LAYOUT_HORIZONTAL_PADDING, - marginy: NODE_LAYOUT_VERTICAL_PADDING, - }) - - nodes.forEach((node) => { - dagreGraph.setNode(node.id, { - width: node.width || 244, - height: node.height || 100, - }) - }) - - edges.forEach((edge) => { - dagreGraph.setEdge(edge.source, edge.target) - }) - - dagre.layout(dagreGraph) - return dagreGraph - } - - const startNodeOutEdges = edges.filter(edge => edge.source === startNode.id) - const firstConnectedNodes = startNodeOutEdges.map(edge => - nodes.find(node => node.id === edge.target), - ).filter(Boolean) as Node[] - - const nonStartNodes = nodes.filter(node => node.id !== startNode.id) - const nonStartEdges = edges.filter(edge => edge.source !== startNode.id && edge.target !== startNode.id) - - dagreGraph.setGraph({ - rankdir: 'LR', - align: 'UL', - nodesep: 40, - ranksep: 60, - marginx: NODE_LAYOUT_HORIZONTAL_PADDING / 2, - marginy: NODE_LAYOUT_VERTICAL_PADDING / 2, - }) - - nonStartNodes.forEach((node) => { - dagreGraph.setNode(node.id, { - width: node.width || 244, - height: node.height || 100, - }) - }) - - nonStartEdges.forEach((edge) => { - dagreGraph.setEdge(edge.source, edge.target) - }) - - dagre.layout(dagreGraph) - - const startNodeSize = { - width: startNode.width || 44, - height: startNode.height || 48, - } - - const startNodeX = NODE_LAYOUT_HORIZONTAL_PADDING / 1.5 - let startNodeY = 100 - - let minFirstLayerX = Infinity - let avgFirstLayerY = 0 - let firstLayerCount = 0 - - if (firstConnectedNodes.length > 0) { - firstConnectedNodes.forEach((node) => { - if (dagreGraph.node(node.id)) { - const nodePos = dagreGraph.node(node.id) - avgFirstLayerY += nodePos.y - firstLayerCount++ - minFirstLayerX = Math.min(minFirstLayerX, nodePos.x - nodePos.width / 2) - } - }) - - if (firstLayerCount > 0) { - avgFirstLayerY /= firstLayerCount - startNodeY = avgFirstLayerY - } - - const minRequiredX = startNodeX + startNodeSize.width + NODE_LAYOUT_MIN_DISTANCE - - if (minFirstLayerX < minRequiredX) { - const shiftX = minRequiredX - minFirstLayerX - - nonStartNodes.forEach((node) => { - if (dagreGraph.node(node.id)) { - const nodePos = dagreGraph.node(node.id) - dagreGraph.setNode(node.id, { - x: nodePos.x + shiftX, - y: nodePos.y, - width: nodePos.width, - height: nodePos.height, - }) - } - }) - } - } - - dagreGraph.setNode(startNode.id, { - x: startNodeX + startNodeSize.width / 2, - y: startNodeY, - width: startNodeSize.width, - height: startNodeSize.height, - }) - - startNodeOutEdges.forEach((edge) => { - dagreGraph.setEdge(edge.source, edge.target) - }) - - return dagreGraph -} diff --git a/web/app/components/workflow/utils/index.ts b/web/app/components/workflow/utils/index.ts index ab59f513bc..e9ae2d1ef0 100644 --- a/web/app/components/workflow/utils/index.ts +++ b/web/app/components/workflow/utils/index.ts @@ -1,7 +1,7 @@ export * from './node' export * from './edge' export * from './workflow-init' -export * from './dagre-layout' +export * from './layout' export * from './common' export * from './tool' export * from './workflow' diff --git a/web/app/components/workflow/utils/layout.ts b/web/app/components/workflow/utils/layout.ts new file mode 100644 index 0000000000..b3cf3b0d88 --- /dev/null +++ b/web/app/components/workflow/utils/layout.ts @@ -0,0 +1,529 @@ +import ELK from 'elkjs/lib/elk.bundled.js' +import type { ElkNode, LayoutOptions } from 'elkjs/lib/elk-api' +import { cloneDeep } from 'lodash-es' +import type { + Edge, + Node, +} from '../types' +import { + BlockEnum, +} from '../types' +import { + CUSTOM_NODE, + NODE_LAYOUT_HORIZONTAL_PADDING, + NODE_LAYOUT_VERTICAL_PADDING, +} from '../constants' +import { CUSTOM_ITERATION_START_NODE } from '../nodes/iteration-start/constants' +import { CUSTOM_LOOP_START_NODE } from '../nodes/loop-start/constants' +import type { CaseItem, IfElseNodeType } from '../nodes/if-else/types' + +// Although the file name refers to Dagre, the implementation now relies on ELK's layered algorithm. +// Keep the export signatures unchanged to minimise the blast radius while we migrate the layout stack. + +const elk = new ELK() + +const DEFAULT_NODE_WIDTH = 244 +const DEFAULT_NODE_HEIGHT = 100 + +const ROOT_LAYOUT_OPTIONS = { + 'elk.algorithm': 'layered', + 'elk.direction': 'RIGHT', + + // === Spacing - Maximum spacing to prevent any overlap === + 'elk.layered.spacing.nodeNodeBetweenLayers': '100', + 'elk.spacing.nodeNode': '80', + 'elk.spacing.edgeNode': '50', + 'elk.spacing.edgeEdge': '30', + 'elk.spacing.edgeLabel': '10', + 'elk.spacing.portPort': '20', + + // === Port Configuration === + 'elk.portConstraints': 'FIXED_ORDER', + 'elk.layered.considerModelOrder.strategy': 'PREFER_EDGES', + 'elk.port.side': 'SOUTH', + + // === Node Placement - Best quality === + 'elk.layered.nodePlacement.strategy': 'NETWORK_SIMPLEX', + 'elk.layered.nodePlacement.favorStraightEdges': 'true', + 'elk.layered.nodePlacement.linearSegments.deflectionDampening': '0.5', + 'elk.layered.nodePlacement.networkSimplex.nodeFlexibility': 'NODE_SIZE', + + // === Edge Routing - Maximum quality === + 'elk.edgeRouting': 'SPLINES', + 'elk.layered.edgeRouting.selfLoopPlacement': 'NORTH', + 'elk.layered.edgeRouting.sloppySplineRouting': 'false', + 'elk.layered.edgeRouting.splines.mode': 'CONSERVATIVE', + 'elk.layered.edgeRouting.splines.sloppy.layerSpacingFactor': '1.2', + + // === Crossing Minimization - Most aggressive === + 'elk.layered.crossingMinimization.strategy': 'LAYER_SWEEP', + 'elk.layered.crossingMinimization.greedySwitch.type': 'TWO_SIDED', + 'elk.layered.crossingMinimization.greedySwitchHierarchical.type': 'TWO_SIDED', + 'elk.layered.crossingMinimization.semiInteractive': 'true', + 'elk.layered.crossingMinimization.hierarchicalSweepiness': '0.9', + + // === Layering Strategy - Best quality === + 'elk.layered.layering.strategy': 'NETWORK_SIMPLEX', + 'elk.layered.layering.networkSimplex.nodeFlexibility': 'NODE_SIZE', + 'elk.layered.layering.layerConstraint': 'NONE', + 'elk.layered.layering.minWidth.upperBoundOnWidth': '4', + + // === Cycle Breaking === + 'elk.layered.cycleBreaking.strategy': 'DEPTH_FIRST', + + // === Connected Components === + 'elk.separateConnectedComponents': 'true', + 'elk.spacing.componentComponent': '100', + + // === Node Size Constraints === + 'elk.nodeSize.constraints': 'NODE_LABELS', + 'elk.nodeSize.options': 'DEFAULT_MINIMUM_SIZE MINIMUM_SIZE_ACCOUNTS_FOR_PADDING', + + // === Edge Label Placement === + 'elk.edgeLabels.placement': 'CENTER', + 'elk.edgeLabels.inline': 'true', + + // === Compaction === + 'elk.layered.compaction.postCompaction.strategy': 'EDGE_LENGTH', + 'elk.layered.compaction.postCompaction.constraints': 'EDGE_LENGTH', + + // === High-Quality Mode === + 'elk.layered.thoroughness': '10', + 'elk.layered.wrapping.strategy': 'OFF', + 'elk.hierarchyHandling': 'INCLUDE_CHILDREN', + + // === Additional Optimizations === + 'elk.layered.feedbackEdges': 'true', + 'elk.layered.mergeEdges': 'false', + 'elk.layered.mergeHierarchyEdges': 'false', + 'elk.layered.allowNonFlowPortsToSwitchSides': 'false', + 'elk.layered.northOrSouthPort': 'false', + 'elk.partitioning.activate': 'false', + 'elk.junctionPoints': 'true', + + // === Content Alignment === + 'elk.contentAlignment': 'V_TOP H_LEFT', + 'elk.alignment': 'AUTOMATIC', +} + +const CHILD_LAYOUT_OPTIONS = { + 'elk.algorithm': 'layered', + 'elk.direction': 'RIGHT', + + // === Spacing - High quality for child nodes === + 'elk.layered.spacing.nodeNodeBetweenLayers': '80', + 'elk.spacing.nodeNode': '60', + 'elk.spacing.edgeNode': '40', + 'elk.spacing.edgeEdge': '25', + 'elk.spacing.edgeLabel': '8', + 'elk.spacing.portPort': '15', + + // === Node Placement - Best quality === + 'elk.layered.nodePlacement.strategy': 'NETWORK_SIMPLEX', + 'elk.layered.nodePlacement.favorStraightEdges': 'true', + 'elk.layered.nodePlacement.linearSegments.deflectionDampening': '0.5', + 'elk.layered.nodePlacement.networkSimplex.nodeFlexibility': 'NODE_SIZE', + + // === Edge Routing - Maximum quality === + 'elk.edgeRouting': 'SPLINES', + 'elk.layered.edgeRouting.sloppySplineRouting': 'false', + 'elk.layered.edgeRouting.splines.mode': 'CONSERVATIVE', + + // === Crossing Minimization - Aggressive === + 'elk.layered.crossingMinimization.strategy': 'LAYER_SWEEP', + 'elk.layered.crossingMinimization.greedySwitch.type': 'TWO_SIDED', + 'elk.layered.crossingMinimization.semiInteractive': 'true', + + // === Layering Strategy === + 'elk.layered.layering.strategy': 'NETWORK_SIMPLEX', + 'elk.layered.layering.networkSimplex.nodeFlexibility': 'NODE_SIZE', + + // === Cycle Breaking === + 'elk.layered.cycleBreaking.strategy': 'DEPTH_FIRST', + + // === Node Size === + 'elk.nodeSize.constraints': 'NODE_LABELS', + + // === Compaction === + 'elk.layered.compaction.postCompaction.strategy': 'EDGE_LENGTH', + + // === High-Quality Mode === + 'elk.layered.thoroughness': '10', + 'elk.hierarchyHandling': 'INCLUDE_CHILDREN', + + // === Additional Optimizations === + 'elk.layered.feedbackEdges': 'true', + 'elk.layered.mergeEdges': 'false', + 'elk.junctionPoints': 'true', +} + +type LayoutInfo = { + x: number + y: number + width: number + height: number + layer?: number +} + +type LayoutBounds = { + minX: number + minY: number + maxX: number + maxY: number +} + +export type LayoutResult = { + nodes: Map + bounds: LayoutBounds +} + +// ELK Port definition for native port support +type ElkPortShape = { + id: string + layoutOptions?: LayoutOptions +} + +type ElkNodeShape = { + id: string + width: number + height: number + ports?: ElkPortShape[] + layoutOptions?: LayoutOptions + children?: ElkNodeShape[] +} + +type ElkEdgeShape = { + id: string + sources: string[] + targets: string[] + sourcePort?: string + targetPort?: string +} + +const toElkNode = (node: Node): ElkNodeShape => ({ + id: node.id, + width: node.width ?? DEFAULT_NODE_WIDTH, + height: node.height ?? DEFAULT_NODE_HEIGHT, +}) + +let edgeCounter = 0 +const nextEdgeId = () => `elk-edge-${edgeCounter++}` + +const createEdge = ( + source: string, + target: string, + sourcePort?: string, + targetPort?: string, +): ElkEdgeShape => ({ + id: nextEdgeId(), + sources: [source], + targets: [target], + sourcePort, + targetPort, +}) + +const collectLayout = (graph: ElkNode, predicate: (id: string) => boolean): LayoutResult => { + const result = new Map() + let minX = Infinity + let minY = Infinity + let maxX = -Infinity + let maxY = -Infinity + + const visit = (node: ElkNode) => { + node.children?.forEach((child: ElkNode) => { + if (predicate(child.id)) { + const x = child.x ?? 0 + const y = child.y ?? 0 + const width = child.width ?? DEFAULT_NODE_WIDTH + const height = child.height ?? DEFAULT_NODE_HEIGHT + const layer = child?.layoutOptions?.['org.eclipse.elk.layered.layerIndex'] + + result.set(child.id, { + x, + y, + width, + height, + layer: layer ? Number.parseInt(layer) : undefined, + }) + + minX = Math.min(minX, x) + minY = Math.min(minY, y) + maxX = Math.max(maxX, x + width) + maxY = Math.max(maxY, y + height) + } + + if (child.children?.length) + visit(child) + }) + } + + visit(graph) + + if (!Number.isFinite(minX) || !Number.isFinite(minY)) { + minX = 0 + minY = 0 + maxX = 0 + maxY = 0 + } + + return { + nodes: result, + bounds: { + minX, + minY, + maxX, + maxY, + }, + } +} + +/** + * Build If/Else node with ELK native Ports instead of dummy nodes + * This is the recommended approach for handling multiple branches + */ +const buildIfElseWithPorts = ( + ifElseNode: Node, + edges: Edge[], +): { node: ElkNodeShape; portMap: Map } | null => { + const childEdges = edges.filter(edge => edge.source === ifElseNode.id) + + if (childEdges.length <= 1) + return null + + // Sort child edges according to case order + const sortedChildEdges = [...childEdges].sort((edgeA, edgeB) => { + const handleA = edgeA.sourceHandle + const handleB = edgeB.sourceHandle + + if (handleA && handleB) { + const cases = (ifElseNode.data as IfElseNodeType).cases || [] + const isAElse = handleA === 'false' + const isBElse = handleB === 'false' + + if (isAElse) + return 1 + if (isBElse) + return -1 + + const indexA = cases.findIndex((c: CaseItem) => c.case_id === handleA) + const indexB = cases.findIndex((c: CaseItem) => c.case_id === handleB) + + if (indexA !== -1 && indexB !== -1) + return indexA - indexB + } + + return 0 + }) + + // Create ELK ports for each branch + const ports: ElkPortShape[] = sortedChildEdges.map((edge, index) => ({ + id: `${ifElseNode.id}-port-${edge.sourceHandle || index}`, + layoutOptions: { + 'port.side': 'EAST', // Ports on the right side (matching 'RIGHT' direction) + 'port.index': String(index), + }, + })) + + // Build port mapping: sourceHandle -> portId + const portMap = new Map() + sortedChildEdges.forEach((edge, index) => { + const portId = `${ifElseNode.id}-port-${edge.sourceHandle || index}` + portMap.set(edge.id, portId) + }) + + return { + node: { + id: ifElseNode.id, + width: ifElseNode.width ?? DEFAULT_NODE_WIDTH, + height: ifElseNode.height ?? DEFAULT_NODE_HEIGHT, + ports, + layoutOptions: { + 'elk.portConstraints': 'FIXED_ORDER', + }, + }, + portMap, + } +} + +const normaliseBounds = (layout: LayoutResult): LayoutResult => { + const { + nodes, + bounds, + } = layout + + if (nodes.size === 0) + return layout + + const offsetX = bounds.minX + const offsetY = bounds.minY + + const adjustedNodes = new Map() + nodes.forEach((info, id) => { + adjustedNodes.set(id, { + ...info, + x: info.x - offsetX, + y: info.y - offsetY, + }) + }) + + return { + nodes: adjustedNodes, + bounds: { + minX: 0, + minY: 0, + maxX: bounds.maxX - offsetX, + maxY: bounds.maxY - offsetY, + }, + } +} + +export const getLayoutByDagre = async (originNodes: Node[], originEdges: Edge[]): Promise => { + edgeCounter = 0 + const nodes = cloneDeep(originNodes).filter(node => !node.parentId && node.type === CUSTOM_NODE) + const edges = cloneDeep(originEdges).filter(edge => (!edge.data?.isInIteration && !edge.data?.isInLoop)) + + const elkNodes: ElkNodeShape[] = [] + const elkEdges: ElkEdgeShape[] = [] + + // Track which edges have been processed for If/Else nodes with ports + const edgeToPortMap = new Map() + + // Build nodes with ports for If/Else nodes + nodes.forEach((node) => { + if (node.data.type === BlockEnum.IfElse) { + const portsResult = buildIfElseWithPorts(node, edges) + if (portsResult) { + // Use node with ports + elkNodes.push(portsResult.node) + // Store port mappings for edges + portsResult.portMap.forEach((portId, edgeId) => { + edgeToPortMap.set(edgeId, portId) + }) + } + else { + // No multiple branches, use normal node + elkNodes.push(toElkNode(node)) + } + } + else { + elkNodes.push(toElkNode(node)) + } + }) + + // Build edges with port connections + edges.forEach((edge) => { + const sourcePort = edgeToPortMap.get(edge.id) + elkEdges.push(createEdge(edge.source, edge.target, sourcePort)) + }) + + const graph = { + id: 'workflow-root', + layoutOptions: ROOT_LAYOUT_OPTIONS, + children: elkNodes, + edges: elkEdges, + } + + const layoutedGraph = await elk.layout(graph) + // No need to filter dummy nodes anymore, as we're using ports + const layout = collectLayout(layoutedGraph, () => true) + return normaliseBounds(layout) +} + +const normaliseChildLayout = ( + layout: LayoutResult, + nodes: Node[], +): LayoutResult => { + const result = new Map() + layout.nodes.forEach((info, id) => { + result.set(id, info) + }) + + // Ensure iteration / loop start nodes do not collapse into the children. + const startNode = nodes.find(node => + node.type === CUSTOM_ITERATION_START_NODE + || node.type === CUSTOM_LOOP_START_NODE + || node.data?.type === BlockEnum.LoopStart + || node.data?.type === BlockEnum.IterationStart, + ) + + if (startNode) { + const startLayout = result.get(startNode.id) + + if (startLayout) { + const desiredMinX = NODE_LAYOUT_HORIZONTAL_PADDING / 1.5 + if (startLayout.x > desiredMinX) { + const shiftX = startLayout.x - desiredMinX + result.forEach((value, key) => { + result.set(key, { + ...value, + x: value.x - shiftX, + }) + }) + } + + const desiredMinY = startLayout.y + const deltaY = NODE_LAYOUT_VERTICAL_PADDING / 2 + result.forEach((value, key) => { + result.set(key, { + ...value, + y: value.y - desiredMinY + deltaY, + }) + }) + } + } + + let minX = Infinity + let minY = Infinity + let maxX = -Infinity + let maxY = -Infinity + + result.forEach((value) => { + minX = Math.min(minX, value.x) + minY = Math.min(minY, value.y) + maxX = Math.max(maxX, value.x + value.width) + maxY = Math.max(maxY, value.y + value.height) + }) + + if (!Number.isFinite(minX) || !Number.isFinite(minY)) + return layout + + return normaliseBounds({ + nodes: result, + bounds: { + minX, + minY, + maxX, + maxY, + }, + }) +} + +export const getLayoutForChildNodes = async ( + parentNodeId: string, + originNodes: Node[], + originEdges: Edge[], +): Promise => { + edgeCounter = 0 + const nodes = cloneDeep(originNodes).filter(node => node.parentId === parentNodeId) + if (!nodes.length) + return null + + const edges = cloneDeep(originEdges).filter(edge => + (edge.data?.isInIteration && edge.data?.iteration_id === parentNodeId) + || (edge.data?.isInLoop && edge.data?.loop_id === parentNodeId), + ) + + const elkNodes: ElkNodeShape[] = nodes.map(toElkNode) + const elkEdges: ElkEdgeShape[] = edges.map(edge => createEdge(edge.source, edge.target)) + + const graph = { + id: parentNodeId, + layoutOptions: CHILD_LAYOUT_OPTIONS, + children: elkNodes, + edges: elkEdges, + } + + const layoutedGraph = await elk.layout(graph) + const layout = collectLayout(layoutedGraph, () => true) + return normaliseChildLayout(layout, nodes) +} From c20e0ad90d5b5e86db3fa435c71cb1f30f21f68e Mon Sep 17 00:00:00 2001 From: Asuka Minato Date: Sun, 5 Oct 2025 15:57:42 +0900 Subject: [PATCH 06/31] =?UTF-8?q?Removes=20the=20'extensions'=20directory?= =?UTF-8?q?=20from=20pyrightconfig.json=20and=20fixes=20=E2=80=A6=20(#2651?= =?UTF-8?q?2)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: google-labs-jules[bot] <161369871+google-labs-jules[bot]@users.noreply.github.com> Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- api/extensions/ext_app_metrics.py | 8 +-- api/extensions/ext_database.py | 10 +-- api/extensions/ext_import_modules.py | 2 +- api/extensions/storage/aliyun_oss_storage.py | 4 +- api/extensions/storage/aws_s3_storage.py | 8 +-- api/extensions/storage/azure_blob_storage.py | 22 ++++++- .../clickzetta_volume_storage.py | 19 +++--- .../clickzetta_volume/volume_permissions.py | 9 ++- .../storage/google_cloud_storage.py | 6 ++ api/extensions/storage/huawei_obs_storage.py | 2 +- api/extensions/storage/oracle_oci_storage.py | 4 +- api/extensions/storage/supabase_storage.py | 6 +- .../storage/volcengine_tos_storage.py | 20 ++++++ api/pyrightconfig.json | 1 - .../storage/test_supabase_storage.py | 62 +++---------------- .../oss/volcengine_tos/test_volcengine_tos.py | 10 ++- 16 files changed, 106 insertions(+), 87 deletions(-) diff --git a/api/extensions/ext_app_metrics.py b/api/extensions/ext_app_metrics.py index 56a69a1862..4a6490b9f0 100644 --- a/api/extensions/ext_app_metrics.py +++ b/api/extensions/ext_app_metrics.py @@ -10,14 +10,14 @@ from dify_app import DifyApp def init_app(app: DifyApp): @app.after_request - def after_request(response): + def after_request(response): # pyright: ignore[reportUnusedFunction] """Add Version headers to the response.""" response.headers.add("X-Version", dify_config.project.version) response.headers.add("X-Env", dify_config.DEPLOY_ENV) return response @app.route("/health") - def health(): + def health(): # pyright: ignore[reportUnusedFunction] return Response( json.dumps({"pid": os.getpid(), "status": "ok", "version": dify_config.project.version}), status=200, @@ -25,7 +25,7 @@ def init_app(app: DifyApp): ) @app.route("/threads") - def threads(): + def threads(): # pyright: ignore[reportUnusedFunction] num_threads = threading.active_count() threads = threading.enumerate() @@ -50,7 +50,7 @@ def init_app(app: DifyApp): } @app.route("/db-pool-stat") - def pool_stat(): + def pool_stat(): # pyright: ignore[reportUnusedFunction] from extensions.ext_database import db engine = db.engine diff --git a/api/extensions/ext_database.py b/api/extensions/ext_database.py index 067ce39e4f..c90b1d0a9f 100644 --- a/api/extensions/ext_database.py +++ b/api/extensions/ext_database.py @@ -10,7 +10,7 @@ from models.engine import db logger = logging.getLogger(__name__) # Global flag to avoid duplicate registration of event listener -_GEVENT_COMPATIBILITY_SETUP: bool = False +_gevent_compatibility_setup: bool = False def _safe_rollback(connection): @@ -26,14 +26,14 @@ def _safe_rollback(connection): def _setup_gevent_compatibility(): - global _GEVENT_COMPATIBILITY_SETUP # pylint: disable=global-statement + global _gevent_compatibility_setup # pylint: disable=global-statement # Avoid duplicate registration - if _GEVENT_COMPATIBILITY_SETUP: + if _gevent_compatibility_setup: return @event.listens_for(Pool, "reset") - def _safe_reset(dbapi_connection, connection_record, reset_state): # pylint: disable=unused-argument + def _safe_reset(dbapi_connection, connection_record, reset_state): # pyright: ignore[reportUnusedFunction] if reset_state.terminate_only: return @@ -47,7 +47,7 @@ def _setup_gevent_compatibility(): except (AttributeError, ImportError): _safe_rollback(dbapi_connection) - _GEVENT_COMPATIBILITY_SETUP = True + _gevent_compatibility_setup = True def init_app(app: DifyApp): diff --git a/api/extensions/ext_import_modules.py b/api/extensions/ext_import_modules.py index 9566f430b6..4eb363ff93 100644 --- a/api/extensions/ext_import_modules.py +++ b/api/extensions/ext_import_modules.py @@ -2,4 +2,4 @@ from dify_app import DifyApp def init_app(app: DifyApp): - from events import event_handlers # noqa: F401 + from events import event_handlers # noqa: F401 # pyright: ignore[reportUnusedImport] diff --git a/api/extensions/storage/aliyun_oss_storage.py b/api/extensions/storage/aliyun_oss_storage.py index 00bf5d4f93..5da4737138 100644 --- a/api/extensions/storage/aliyun_oss_storage.py +++ b/api/extensions/storage/aliyun_oss_storage.py @@ -33,7 +33,9 @@ class AliyunOssStorage(BaseStorage): def load_once(self, filename: str) -> bytes: obj = self.client.get_object(self.__wrapper_folder_filename(filename)) - data: bytes = obj.read() + data = obj.read() + if not isinstance(data, bytes): + return b"" return data def load_stream(self, filename: str) -> Generator: diff --git a/api/extensions/storage/aws_s3_storage.py b/api/extensions/storage/aws_s3_storage.py index e755ab089a..6ab2a95e3c 100644 --- a/api/extensions/storage/aws_s3_storage.py +++ b/api/extensions/storage/aws_s3_storage.py @@ -39,10 +39,10 @@ class AwsS3Storage(BaseStorage): self.client.head_bucket(Bucket=self.bucket_name) except ClientError as e: # if bucket not exists, create it - if e.response["Error"]["Code"] == "404": + if e.response.get("Error", {}).get("Code") == "404": self.client.create_bucket(Bucket=self.bucket_name) # if bucket is not accessible, pass, maybe the bucket is existing but not accessible - elif e.response["Error"]["Code"] == "403": + elif e.response.get("Error", {}).get("Code") == "403": pass else: # other error, raise exception @@ -55,7 +55,7 @@ class AwsS3Storage(BaseStorage): try: data: bytes = self.client.get_object(Bucket=self.bucket_name, Key=filename)["Body"].read() except ClientError as ex: - if ex.response["Error"]["Code"] == "NoSuchKey": + if ex.response.get("Error", {}).get("Code") == "NoSuchKey": raise FileNotFoundError("File not found") else: raise @@ -66,7 +66,7 @@ class AwsS3Storage(BaseStorage): response = self.client.get_object(Bucket=self.bucket_name, Key=filename) yield from response["Body"].iter_chunks() except ClientError as ex: - if ex.response["Error"]["Code"] == "NoSuchKey": + if ex.response.get("Error", {}).get("Code") == "NoSuchKey": raise FileNotFoundError("file not found") elif "reached max retries" in str(ex): raise ValueError("please do not request the same file too frequently") diff --git a/api/extensions/storage/azure_blob_storage.py b/api/extensions/storage/azure_blob_storage.py index 9053aece89..4bccaf13c8 100644 --- a/api/extensions/storage/azure_blob_storage.py +++ b/api/extensions/storage/azure_blob_storage.py @@ -27,24 +27,38 @@ class AzureBlobStorage(BaseStorage): self.credential = None def save(self, filename, data): + if not self.bucket_name: + return + client = self._sync_client() blob_container = client.get_container_client(container=self.bucket_name) blob_container.upload_blob(filename, data) def load_once(self, filename: str) -> bytes: + if not self.bucket_name: + raise FileNotFoundError("Azure bucket name is not configured.") + client = self._sync_client() blob = client.get_container_client(container=self.bucket_name) blob = blob.get_blob_client(blob=filename) - data: bytes = blob.download_blob().readall() + data = blob.download_blob().readall() + if not isinstance(data, bytes): + raise TypeError(f"Expected bytes from blob.readall(), got {type(data).__name__}") return data def load_stream(self, filename: str) -> Generator: + if not self.bucket_name: + raise FileNotFoundError("Azure bucket name is not configured.") + client = self._sync_client() blob = client.get_blob_client(container=self.bucket_name, blob=filename) blob_data = blob.download_blob() yield from blob_data.chunks() def download(self, filename, target_filepath): + if not self.bucket_name: + return + client = self._sync_client() blob = client.get_blob_client(container=self.bucket_name, blob=filename) @@ -53,12 +67,18 @@ class AzureBlobStorage(BaseStorage): blob_data.readinto(my_blob) def exists(self, filename): + if not self.bucket_name: + return False + client = self._sync_client() blob = client.get_blob_client(container=self.bucket_name, blob=filename) return blob.exists() def delete(self, filename): + if not self.bucket_name: + return + client = self._sync_client() blob_container = client.get_container_client(container=self.bucket_name) diff --git a/api/extensions/storage/clickzetta_volume/clickzetta_volume_storage.py b/api/extensions/storage/clickzetta_volume/clickzetta_volume_storage.py index 2ffac9a92d..06c528ca41 100644 --- a/api/extensions/storage/clickzetta_volume/clickzetta_volume_storage.py +++ b/api/extensions/storage/clickzetta_volume/clickzetta_volume_storage.py @@ -430,7 +430,7 @@ class ClickZettaVolumeStorage(BaseStorage): rows = self._execute_sql(sql, fetch=True) - exists = len(rows) > 0 + exists = len(rows) > 0 if rows else False logger.debug("File %s exists check: %s", filename, exists) return exists except Exception as e: @@ -509,16 +509,17 @@ class ClickZettaVolumeStorage(BaseStorage): rows = self._execute_sql(sql, fetch=True) result = [] - for row in rows: - file_path = row[0] # relative_path column + if rows: + for row in rows: + file_path = row[0] # relative_path column - # For User Volume, remove dify prefix from results - dify_prefix_with_slash = f"{self._config.dify_prefix}/" - if volume_prefix == "USER VOLUME" and file_path.startswith(dify_prefix_with_slash): - file_path = file_path[len(dify_prefix_with_slash) :] # Remove prefix + # For User Volume, remove dify prefix from results + dify_prefix_with_slash = f"{self._config.dify_prefix}/" + if volume_prefix == "USER VOLUME" and file_path.startswith(dify_prefix_with_slash): + file_path = file_path[len(dify_prefix_with_slash) :] # Remove prefix - if files and not file_path.endswith("/") or directories and file_path.endswith("/"): - result.append(file_path) + if files and not file_path.endswith("/") or directories and file_path.endswith("/"): + result.append(file_path) logger.debug("Scanned %d items in path %s", len(result), path) return result diff --git a/api/extensions/storage/clickzetta_volume/volume_permissions.py b/api/extensions/storage/clickzetta_volume/volume_permissions.py index eb1116638f..6dcf800abb 100644 --- a/api/extensions/storage/clickzetta_volume/volume_permissions.py +++ b/api/extensions/storage/clickzetta_volume/volume_permissions.py @@ -439,6 +439,11 @@ class VolumePermissionManager: self._permission_cache.clear() logger.debug("Permission cache cleared") + @property + def volume_type(self) -> str | None: + """Get the volume type.""" + return self._volume_type + def get_permission_summary(self, dataset_id: str | None = None) -> dict[str, bool]: """Get permission summary @@ -632,13 +637,13 @@ def check_volume_permission(permission_manager: VolumePermissionManager, operati VolumePermissionError: If no permission """ if not permission_manager.validate_operation(operation, dataset_id): - error_message = f"Permission denied for operation '{operation}' on {permission_manager._volume_type} volume" + error_message = f"Permission denied for operation '{operation}' on {permission_manager.volume_type} volume" if dataset_id: error_message += f" (dataset: {dataset_id})" raise VolumePermissionError( error_message, operation=operation, - volume_type=permission_manager._volume_type or "unknown", + volume_type=permission_manager.volume_type or "unknown", dataset_id=dataset_id, ) diff --git a/api/extensions/storage/google_cloud_storage.py b/api/extensions/storage/google_cloud_storage.py index 705639f42e..7f59252f2f 100644 --- a/api/extensions/storage/google_cloud_storage.py +++ b/api/extensions/storage/google_cloud_storage.py @@ -35,12 +35,16 @@ class GoogleCloudStorage(BaseStorage): def load_once(self, filename: str) -> bytes: bucket = self.client.get_bucket(self.bucket_name) blob = bucket.get_blob(filename) + if blob is None: + raise FileNotFoundError("File not found") data: bytes = blob.download_as_bytes() return data def load_stream(self, filename: str) -> Generator: bucket = self.client.get_bucket(self.bucket_name) blob = bucket.get_blob(filename) + if blob is None: + raise FileNotFoundError("File not found") with blob.open(mode="rb") as blob_stream: while chunk := blob_stream.read(4096): yield chunk @@ -48,6 +52,8 @@ class GoogleCloudStorage(BaseStorage): def download(self, filename, target_filepath): bucket = self.client.get_bucket(self.bucket_name) blob = bucket.get_blob(filename) + if blob is None: + raise FileNotFoundError("File not found") blob.download_to_filename(target_filepath) def exists(self, filename): diff --git a/api/extensions/storage/huawei_obs_storage.py b/api/extensions/storage/huawei_obs_storage.py index 07f1d19970..3e75ecb7a9 100644 --- a/api/extensions/storage/huawei_obs_storage.py +++ b/api/extensions/storage/huawei_obs_storage.py @@ -45,7 +45,7 @@ class HuaweiObsStorage(BaseStorage): def _get_meta(self, filename): res = self.client.getObjectMetadata(bucketName=self.bucket_name, objectKey=filename) - if res.status < 300: + if res and res.status and res.status < 300: return res else: return None diff --git a/api/extensions/storage/oracle_oci_storage.py b/api/extensions/storage/oracle_oci_storage.py index 82829f7fd5..acc00cbd6b 100644 --- a/api/extensions/storage/oracle_oci_storage.py +++ b/api/extensions/storage/oracle_oci_storage.py @@ -29,7 +29,7 @@ class OracleOCIStorage(BaseStorage): try: data: bytes = self.client.get_object(Bucket=self.bucket_name, Key=filename)["Body"].read() except ClientError as ex: - if ex.response["Error"]["Code"] == "NoSuchKey": + if ex.response.get("Error", {}).get("Code") == "NoSuchKey": raise FileNotFoundError("File not found") else: raise @@ -40,7 +40,7 @@ class OracleOCIStorage(BaseStorage): response = self.client.get_object(Bucket=self.bucket_name, Key=filename) yield from response["Body"].iter_chunks() except ClientError as ex: - if ex.response["Error"]["Code"] == "NoSuchKey": + if ex.response.get("Error", {}).get("Code") == "NoSuchKey": raise FileNotFoundError("File not found") else: raise diff --git a/api/extensions/storage/supabase_storage.py b/api/extensions/storage/supabase_storage.py index 711c3f7211..2ca84d4c15 100644 --- a/api/extensions/storage/supabase_storage.py +++ b/api/extensions/storage/supabase_storage.py @@ -46,13 +46,13 @@ class SupabaseStorage(BaseStorage): Path(target_filepath).write_bytes(result) def exists(self, filename): - result = self.client.storage.from_(self.bucket_name).list(filename) - if result.count() > 0: + result = self.client.storage.from_(self.bucket_name).list(path=filename) + if len(result) > 0: return True return False def delete(self, filename): - self.client.storage.from_(self.bucket_name).remove(filename) + self.client.storage.from_(self.bucket_name).remove([filename]) def bucket_exists(self): buckets = self.client.storage.list_buckets() diff --git a/api/extensions/storage/volcengine_tos_storage.py b/api/extensions/storage/volcengine_tos_storage.py index 32839d3497..8ed8e4c170 100644 --- a/api/extensions/storage/volcengine_tos_storage.py +++ b/api/extensions/storage/volcengine_tos_storage.py @@ -11,6 +11,14 @@ class VolcengineTosStorage(BaseStorage): def __init__(self): super().__init__() + if not dify_config.VOLCENGINE_TOS_ACCESS_KEY: + raise ValueError("VOLCENGINE_TOS_ACCESS_KEY is not set") + if not dify_config.VOLCENGINE_TOS_SECRET_KEY: + raise ValueError("VOLCENGINE_TOS_SECRET_KEY is not set") + if not dify_config.VOLCENGINE_TOS_ENDPOINT: + raise ValueError("VOLCENGINE_TOS_ENDPOINT is not set") + if not dify_config.VOLCENGINE_TOS_REGION: + raise ValueError("VOLCENGINE_TOS_REGION is not set") self.bucket_name = dify_config.VOLCENGINE_TOS_BUCKET_NAME self.client = tos.TosClientV2( ak=dify_config.VOLCENGINE_TOS_ACCESS_KEY, @@ -20,27 +28,39 @@ class VolcengineTosStorage(BaseStorage): ) def save(self, filename, data): + if not self.bucket_name: + raise ValueError("VOLCENGINE_TOS_BUCKET_NAME is not set") self.client.put_object(bucket=self.bucket_name, key=filename, content=data) def load_once(self, filename: str) -> bytes: + if not self.bucket_name: + raise FileNotFoundError("VOLCENGINE_TOS_BUCKET_NAME is not set") data = self.client.get_object(bucket=self.bucket_name, key=filename).read() if not isinstance(data, bytes): raise TypeError(f"Expected bytes, got {type(data).__name__}") return data def load_stream(self, filename: str) -> Generator: + if not self.bucket_name: + raise FileNotFoundError("VOLCENGINE_TOS_BUCKET_NAME is not set") response = self.client.get_object(bucket=self.bucket_name, key=filename) while chunk := response.read(4096): yield chunk def download(self, filename, target_filepath): + if not self.bucket_name: + raise ValueError("VOLCENGINE_TOS_BUCKET_NAME is not set") self.client.get_object_to_file(bucket=self.bucket_name, key=filename, file_path=target_filepath) def exists(self, filename): + if not self.bucket_name: + return False res = self.client.head_object(bucket=self.bucket_name, key=filename) if res.status_code != 200: return False return True def delete(self, filename): + if not self.bucket_name: + return self.client.delete_object(bucket=self.bucket_name, key=filename) diff --git a/api/pyrightconfig.json b/api/pyrightconfig.json index 00dda8b087..d98e30bb80 100644 --- a/api/pyrightconfig.json +++ b/api/pyrightconfig.json @@ -5,7 +5,6 @@ ".venv", "migrations/", "core/rag", - "extensions", "core/app/app_config/easy_ui_based_app/dataset" ], "typeCheckingMode": "strict", diff --git a/api/tests/unit_tests/extensions/storage/test_supabase_storage.py b/api/tests/unit_tests/extensions/storage/test_supabase_storage.py index 958072223e..476f87269c 100644 --- a/api/tests/unit_tests/extensions/storage/test_supabase_storage.py +++ b/api/tests/unit_tests/extensions/storage/test_supabase_storage.py @@ -172,73 +172,31 @@ class TestSupabaseStorage: assert "test-bucket" in [call[0][0] for call in mock_client.storage.from_.call_args_list if call[0]] mock_client.storage.from_().download.assert_called_with("test.txt") - def test_exists_with_list_containing_items(self, storage_with_mock_client): - """Test exists returns True when list() returns items (using len() > 0).""" + def test_exists_returns_true_when_file_found(self, storage_with_mock_client): + """Test exists returns True when list() returns items.""" storage, mock_client = storage_with_mock_client - # Mock list return with special object that has count() method - mock_list_result = Mock() - mock_list_result.count.return_value = 1 - mock_client.storage.from_().list.return_value = mock_list_result + mock_client.storage.from_().list.return_value = [{"name": "test.txt"}] result = storage.exists("test.txt") assert result is True - # from_ gets called during init too, so just check it was called with the right bucket assert "test-bucket" in [call[0][0] for call in mock_client.storage.from_.call_args_list if call[0]] - mock_client.storage.from_().list.assert_called_with("test.txt") + mock_client.storage.from_().list.assert_called_with(path="test.txt") - def test_exists_with_count_method_greater_than_zero(self, storage_with_mock_client): - """Test exists returns True when list result has count() > 0.""" + def test_exists_returns_false_when_file_not_found(self, storage_with_mock_client): + """Test exists returns False when list() returns an empty list.""" storage, mock_client = storage_with_mock_client - # Mock list return with count() method - mock_list_result = Mock() - mock_list_result.count.return_value = 1 - mock_client.storage.from_().list.return_value = mock_list_result - - result = storage.exists("test.txt") - - assert result is True - # Verify the correct calls were made - assert "test-bucket" in [call[0][0] for call in mock_client.storage.from_.call_args_list if call[0]] - mock_client.storage.from_().list.assert_called_with("test.txt") - mock_list_result.count.assert_called() - - def test_exists_with_count_method_zero(self, storage_with_mock_client): - """Test exists returns False when list result has count() == 0.""" - storage, mock_client = storage_with_mock_client - - # Mock list return with count() method returning 0 - mock_list_result = Mock() - mock_list_result.count.return_value = 0 - mock_client.storage.from_().list.return_value = mock_list_result + mock_client.storage.from_().list.return_value = [] result = storage.exists("test.txt") assert result is False - # Verify the correct calls were made assert "test-bucket" in [call[0][0] for call in mock_client.storage.from_.call_args_list if call[0]] - mock_client.storage.from_().list.assert_called_with("test.txt") - mock_list_result.count.assert_called() + mock_client.storage.from_().list.assert_called_with(path="test.txt") - def test_exists_with_empty_list(self, storage_with_mock_client): - """Test exists returns False when list() returns empty list.""" - storage, mock_client = storage_with_mock_client - - # Mock list return with special object that has count() method returning 0 - mock_list_result = Mock() - mock_list_result.count.return_value = 0 - mock_client.storage.from_().list.return_value = mock_list_result - - result = storage.exists("test.txt") - - assert result is False - # Verify the correct calls were made - assert "test-bucket" in [call[0][0] for call in mock_client.storage.from_.call_args_list if call[0]] - mock_client.storage.from_().list.assert_called_with("test.txt") - - def test_delete_calls_remove_with_filename(self, storage_with_mock_client): + def test_delete_calls_remove_with_filename_in_list(self, storage_with_mock_client): """Test delete calls remove([...]) (some client versions require a list).""" storage, mock_client = storage_with_mock_client @@ -247,7 +205,7 @@ class TestSupabaseStorage: storage.delete(filename) mock_client.storage.from_.assert_called_once_with("test-bucket") - mock_client.storage.from_().remove.assert_called_once_with(filename) + mock_client.storage.from_().remove.assert_called_once_with([filename]) def test_bucket_exists_returns_true_when_bucket_found(self): """Test bucket_exists returns True when bucket is found in list.""" diff --git a/api/tests/unit_tests/oss/volcengine_tos/test_volcengine_tos.py b/api/tests/unit_tests/oss/volcengine_tos/test_volcengine_tos.py index 04988e85d8..1659205ec0 100644 --- a/api/tests/unit_tests/oss/volcengine_tos/test_volcengine_tos.py +++ b/api/tests/unit_tests/oss/volcengine_tos/test_volcengine_tos.py @@ -1,3 +1,5 @@ +from unittest.mock import patch + import pytest from tos import TosClientV2 # type: ignore @@ -13,7 +15,13 @@ class TestVolcengineTos(BaseStorageTest): @pytest.fixture(autouse=True) def setup_method(self, setup_volcengine_tos_mock): """Executed before each test method.""" - self.storage = VolcengineTosStorage() + with patch("extensions.storage.volcengine_tos_storage.dify_config") as mock_config: + mock_config.VOLCENGINE_TOS_ACCESS_KEY = "test_access_key" + mock_config.VOLCENGINE_TOS_SECRET_KEY = "test_secret_key" + mock_config.VOLCENGINE_TOS_ENDPOINT = "test_endpoint" + mock_config.VOLCENGINE_TOS_REGION = "test_region" + self.storage = VolcengineTosStorage() + self.storage.bucket_name = get_example_bucket() self.storage.client = TosClientV2( ak="dify", From 10be9cfbbf652c006fa148028c0f6030b5bc75d0 Mon Sep 17 00:00:00 2001 From: Bowen Liang Date: Mon, 6 Oct 2025 10:39:28 +0800 Subject: [PATCH 07/31] chore: fix basedwright style warning for opendal.layers imports (#26596) --- api/extensions/storage/opendal_storage.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/api/extensions/storage/opendal_storage.py b/api/extensions/storage/opendal_storage.py index b10391c7f1..f7146adba6 100644 --- a/api/extensions/storage/opendal_storage.py +++ b/api/extensions/storage/opendal_storage.py @@ -3,9 +3,9 @@ import os from collections.abc import Generator from pathlib import Path +import opendal from dotenv import dotenv_values from opendal import Operator -from opendal.layers import RetryLayer from extensions.storage.base_storage import BaseStorage @@ -35,7 +35,7 @@ class OpenDALStorage(BaseStorage): root = kwargs.get("root", "storage") Path(root).mkdir(parents=True, exist_ok=True) - retry_layer = RetryLayer(max_times=3, factor=2.0, jitter=True) + retry_layer = opendal.layers.RetryLayer(max_times=3, factor=2.0, jitter=True) self.op = Operator(scheme=scheme, **kwargs).layer(retry_layer) logger.debug("opendal operator created with scheme %s", scheme) logger.debug("added retry layer to opendal operator") From 4a475bf1cd65aef0ad2d37482f6a9bc089b3d718 Mon Sep 17 00:00:00 2001 From: -LAN- Date: Mon, 6 Oct 2025 10:40:13 +0800 Subject: [PATCH 08/31] chore: Raise default string length limits (#26592) Signed-off-by: -LAN- Co-authored-by: Bowen Liang --- api/.env.example | 4 ++-- api/configs/feature/__init__.py | 7 ++++++- .../nodes/template_transform/template_transform_node.py | 4 ++-- api/pytest.ini | 2 +- api/tests/integration_tests/workflow/nodes/test_code.py | 4 ++-- api/tests/unit_tests/configs/test_dify_config.py | 1 + docker/.env.example | 4 ++-- docker/docker-compose.yaml | 4 ++-- 8 files changed, 18 insertions(+), 12 deletions(-) diff --git a/api/.env.example b/api/.env.example index d53de3779b..a462bfdbec 100644 --- a/api/.env.example +++ b/api/.env.example @@ -427,8 +427,8 @@ CODE_EXECUTION_POOL_MAX_KEEPALIVE_CONNECTIONS=20 CODE_EXECUTION_POOL_KEEPALIVE_EXPIRY=5.0 CODE_MAX_NUMBER=9223372036854775807 CODE_MIN_NUMBER=-9223372036854775808 -CODE_MAX_STRING_LENGTH=80000 -TEMPLATE_TRANSFORM_MAX_LENGTH=80000 +CODE_MAX_STRING_LENGTH=400000 +TEMPLATE_TRANSFORM_MAX_LENGTH=400000 CODE_MAX_STRING_ARRAY_LENGTH=30 CODE_MAX_OBJECT_ARRAY_LENGTH=30 CODE_MAX_NUMBER_ARRAY_LENGTH=1000 diff --git a/api/configs/feature/__init__.py b/api/configs/feature/__init__.py index 363cf4e2b5..42c88dda8b 100644 --- a/api/configs/feature/__init__.py +++ b/api/configs/feature/__init__.py @@ -150,7 +150,7 @@ class CodeExecutionSandboxConfig(BaseSettings): CODE_MAX_STRING_LENGTH: PositiveInt = Field( description="Maximum allowed length for strings in code execution", - default=80000, + default=400_000, ) CODE_MAX_STRING_ARRAY_LENGTH: PositiveInt = Field( @@ -582,6 +582,11 @@ class WorkflowConfig(BaseSettings): default=200 * 1024, ) + TEMPLATE_TRANSFORM_MAX_LENGTH: PositiveInt = Field( + description="Maximum number of characters allowed in Template Transform node output", + default=400_000, + ) + # GraphEngine Worker Pool Configuration GRAPH_ENGINE_MIN_WORKERS: PositiveInt = Field( description="Minimum number of workers per GraphEngine instance", diff --git a/api/core/workflow/nodes/template_transform/template_transform_node.py b/api/core/workflow/nodes/template_transform/template_transform_node.py index cf05ef253a..254a8318b5 100644 --- a/api/core/workflow/nodes/template_transform/template_transform_node.py +++ b/api/core/workflow/nodes/template_transform/template_transform_node.py @@ -1,7 +1,7 @@ -import os from collections.abc import Mapping, Sequence from typing import Any +from configs import dify_config from core.helper.code_executor.code_executor import CodeExecutionError, CodeExecutor, CodeLanguage from core.workflow.enums import ErrorStrategy, NodeType, WorkflowNodeExecutionStatus from core.workflow.node_events import NodeRunResult @@ -9,7 +9,7 @@ from core.workflow.nodes.base.entities import BaseNodeData, RetryConfig from core.workflow.nodes.base.node import Node from core.workflow.nodes.template_transform.entities import TemplateTransformNodeData -MAX_TEMPLATE_TRANSFORM_OUTPUT_LENGTH = int(os.environ.get("TEMPLATE_TRANSFORM_MAX_LENGTH", "80000")) +MAX_TEMPLATE_TRANSFORM_OUTPUT_LENGTH = dify_config.TEMPLATE_TRANSFORM_MAX_LENGTH class TemplateTransformNode(Node): diff --git a/api/pytest.ini b/api/pytest.ini index eb49619481..afb53b47cc 100644 --- a/api/pytest.ini +++ b/api/pytest.ini @@ -7,7 +7,7 @@ env = CHATGLM_API_BASE = http://a.abc.com:11451 CODE_EXECUTION_API_KEY = dify-sandbox CODE_EXECUTION_ENDPOINT = http://127.0.0.1:8194 - CODE_MAX_STRING_LENGTH = 80000 + CODE_MAX_STRING_LENGTH = 400000 PLUGIN_DAEMON_KEY=lYkiYYT6owG+71oLerGzA7GXCgOT++6ovaezWAjpCjf+Sjc3ZtU+qUEi PLUGIN_DAEMON_URL=http://127.0.0.1:5002 PLUGIN_MAX_PACKAGE_SIZE=15728640 diff --git a/api/tests/integration_tests/workflow/nodes/test_code.py b/api/tests/integration_tests/workflow/nodes/test_code.py index e2f3a74bf9..b62d8aa544 100644 --- a/api/tests/integration_tests/workflow/nodes/test_code.py +++ b/api/tests/integration_tests/workflow/nodes/test_code.py @@ -1,9 +1,9 @@ import time import uuid -from os import getenv import pytest +from configs import dify_config from core.app.entities.app_invoke_entities import InvokeFrom from core.workflow.entities import GraphInitParams, GraphRuntimeState, VariablePool from core.workflow.enums import WorkflowNodeExecutionStatus @@ -15,7 +15,7 @@ from core.workflow.system_variable import SystemVariable from models.enums import UserFrom from tests.integration_tests.workflow.nodes.__mock.code_executor import setup_code_executor_mock -CODE_MAX_STRING_LENGTH = int(getenv("CODE_MAX_STRING_LENGTH", "10000")) +CODE_MAX_STRING_LENGTH = dify_config.CODE_MAX_STRING_LENGTH def init_code_node(code_config: dict): diff --git a/api/tests/unit_tests/configs/test_dify_config.py b/api/tests/unit_tests/configs/test_dify_config.py index f4e3d97719..2968de4b91 100644 --- a/api/tests/unit_tests/configs/test_dify_config.py +++ b/api/tests/unit_tests/configs/test_dify_config.py @@ -33,6 +33,7 @@ def test_dify_config(monkeypatch: pytest.MonkeyPatch): assert config.EDITION == "SELF_HOSTED" assert config.API_COMPRESSION_ENABLED is False assert config.SENTRY_TRACES_SAMPLE_RATE == 1.0 + assert config.TEMPLATE_TRANSFORM_MAX_LENGTH == 400_000 # annotated field with default value assert config.HTTP_REQUEST_MAX_READ_TIMEOUT == 600 diff --git a/docker/.env.example b/docker/.env.example index 8c23bfc9b7..e04ef9e5bc 100644 --- a/docker/.env.example +++ b/docker/.env.example @@ -867,14 +867,14 @@ CODE_MAX_NUMBER=9223372036854775807 CODE_MIN_NUMBER=-9223372036854775808 CODE_MAX_DEPTH=5 CODE_MAX_PRECISION=20 -CODE_MAX_STRING_LENGTH=80000 +CODE_MAX_STRING_LENGTH=400000 CODE_MAX_STRING_ARRAY_LENGTH=30 CODE_MAX_OBJECT_ARRAY_LENGTH=30 CODE_MAX_NUMBER_ARRAY_LENGTH=1000 CODE_EXECUTION_CONNECT_TIMEOUT=10 CODE_EXECUTION_READ_TIMEOUT=60 CODE_EXECUTION_WRITE_TIMEOUT=10 -TEMPLATE_TRANSFORM_MAX_LENGTH=80000 +TEMPLATE_TRANSFORM_MAX_LENGTH=400000 # Workflow runtime configuration WORKFLOW_MAX_EXECUTION_STEPS=500 diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml index b5ecb9db03..abac6d3b1e 100644 --- a/docker/docker-compose.yaml +++ b/docker/docker-compose.yaml @@ -390,14 +390,14 @@ x-shared-env: &shared-api-worker-env CODE_MIN_NUMBER: ${CODE_MIN_NUMBER:--9223372036854775808} CODE_MAX_DEPTH: ${CODE_MAX_DEPTH:-5} CODE_MAX_PRECISION: ${CODE_MAX_PRECISION:-20} - CODE_MAX_STRING_LENGTH: ${CODE_MAX_STRING_LENGTH:-80000} + CODE_MAX_STRING_LENGTH: ${CODE_MAX_STRING_LENGTH:-400000} CODE_MAX_STRING_ARRAY_LENGTH: ${CODE_MAX_STRING_ARRAY_LENGTH:-30} CODE_MAX_OBJECT_ARRAY_LENGTH: ${CODE_MAX_OBJECT_ARRAY_LENGTH:-30} CODE_MAX_NUMBER_ARRAY_LENGTH: ${CODE_MAX_NUMBER_ARRAY_LENGTH:-1000} CODE_EXECUTION_CONNECT_TIMEOUT: ${CODE_EXECUTION_CONNECT_TIMEOUT:-10} CODE_EXECUTION_READ_TIMEOUT: ${CODE_EXECUTION_READ_TIMEOUT:-60} CODE_EXECUTION_WRITE_TIMEOUT: ${CODE_EXECUTION_WRITE_TIMEOUT:-10} - TEMPLATE_TRANSFORM_MAX_LENGTH: ${TEMPLATE_TRANSFORM_MAX_LENGTH:-80000} + TEMPLATE_TRANSFORM_MAX_LENGTH: ${TEMPLATE_TRANSFORM_MAX_LENGTH:-400000} WORKFLOW_MAX_EXECUTION_STEPS: ${WORKFLOW_MAX_EXECUTION_STEPS:-500} WORKFLOW_MAX_EXECUTION_TIME: ${WORKFLOW_MAX_EXECUTION_TIME:-1200} WORKFLOW_CALL_MAX_DEPTH: ${WORKFLOW_CALL_MAX_DEPTH:-5} From d89c5f71468316d68af73fbc582caf322d649b3a Mon Sep 17 00:00:00 2001 From: Will Date: Mon, 6 Oct 2025 10:40:38 +0800 Subject: [PATCH 09/31] chore: Avoid directly using OpenAI dependencies (#26590) Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> --- .../datasource/__base/datasource_runtime.py | 3 +- api/core/tools/__base/tool_runtime.py | 3 +- api/extensions/ext_sentry.py | 2 -- api/pyproject.toml | 1 - api/services/app_generate_service.py | 4 --- .../services/test_app_generate_service.py | 31 ------------------- api/uv.lock | 2 -- 7 files changed, 2 insertions(+), 44 deletions(-) diff --git a/api/core/datasource/__base/datasource_runtime.py b/api/core/datasource/__base/datasource_runtime.py index b7f280208a..c5d6c1d771 100644 --- a/api/core/datasource/__base/datasource_runtime.py +++ b/api/core/datasource/__base/datasource_runtime.py @@ -1,7 +1,6 @@ from typing import TYPE_CHECKING, Any, Optional -from openai import BaseModel -from pydantic import Field +from pydantic import BaseModel, Field # Import InvokeFrom locally to avoid circular import from core.app.entities.app_invoke_entities import InvokeFrom diff --git a/api/core/tools/__base/tool_runtime.py b/api/core/tools/__base/tool_runtime.py index 3de0014c61..09bc817c01 100644 --- a/api/core/tools/__base/tool_runtime.py +++ b/api/core/tools/__base/tool_runtime.py @@ -1,7 +1,6 @@ from typing import Any -from openai import BaseModel -from pydantic import Field +from pydantic import BaseModel, Field from core.app.entities.app_invoke_entities import InvokeFrom from core.tools.entities.tool_entities import CredentialType, ToolInvokeFrom diff --git a/api/extensions/ext_sentry.py b/api/extensions/ext_sentry.py index 6cfa99a62a..5ed7840211 100644 --- a/api/extensions/ext_sentry.py +++ b/api/extensions/ext_sentry.py @@ -4,7 +4,6 @@ from dify_app import DifyApp def init_app(app: DifyApp): if dify_config.SENTRY_DSN: - import openai import sentry_sdk from langfuse import parse_error # type: ignore from sentry_sdk.integrations.celery import CeleryIntegration @@ -28,7 +27,6 @@ def init_app(app: DifyApp): HTTPException, ValueError, FileNotFoundError, - openai.APIStatusError, InvokeRateLimitError, parse_error.defaultErrorResponse, ], diff --git a/api/pyproject.toml b/api/pyproject.toml index 85fa0beaab..5113157c05 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -37,7 +37,6 @@ dependencies = [ "mailchimp-transactional~=1.0.50", "markdown~=3.5.1", "numpy~=1.26.4", - "openai~=1.61.0", "openpyxl~=3.1.5", "opik~=1.7.25", "opentelemetry-api==1.27.0", diff --git a/api/services/app_generate_service.py b/api/services/app_generate_service.py index 8911da4728..b462ddf236 100644 --- a/api/services/app_generate_service.py +++ b/api/services/app_generate_service.py @@ -2,8 +2,6 @@ import uuid from collections.abc import Generator, Mapping from typing import Any, Union -from openai._exceptions import RateLimitError - from configs import dify_config from core.app.apps.advanced_chat.app_generator import AdvancedChatAppGenerator from core.app.apps.agent_chat.app_generator import AgentChatAppGenerator @@ -122,8 +120,6 @@ class AppGenerateService: ) else: raise ValueError(f"Invalid app mode {app_model.mode}") - except RateLimitError as e: - raise InvokeRateLimitError(str(e)) except Exception: rate_limit.exit(request_id) raise diff --git a/api/tests/test_containers_integration_tests/services/test_app_generate_service.py b/api/tests/test_containers_integration_tests/services/test_app_generate_service.py index ca0f309fd4..9386687a04 100644 --- a/api/tests/test_containers_integration_tests/services/test_app_generate_service.py +++ b/api/tests/test_containers_integration_tests/services/test_app_generate_service.py @@ -3,7 +3,6 @@ from unittest.mock import MagicMock, patch import pytest from faker import Faker -from openai._exceptions import RateLimitError from core.app.entities.app_invoke_entities import InvokeFrom from models.model import EndUser @@ -484,36 +483,6 @@ class TestAppGenerateService: # Verify error message assert "Rate limit exceeded" in str(exc_info.value) - def test_generate_with_rate_limit_error_from_openai( - self, db_session_with_containers, mock_external_service_dependencies - ): - """ - Test generation when OpenAI rate limit error occurs. - """ - fake = Faker() - app, account = self._create_test_app_and_account( - db_session_with_containers, mock_external_service_dependencies, mode="completion" - ) - - # Setup completion generator to raise RateLimitError - mock_response = MagicMock() - mock_response.request = MagicMock() - mock_external_service_dependencies["completion_generator"].return_value.generate.side_effect = RateLimitError( - "Rate limit exceeded", response=mock_response, body=None - ) - - # Setup test arguments - args = {"inputs": {"query": fake.text(max_nb_chars=50)}, "response_mode": "streaming"} - - # Execute the method under test and expect rate limit error - with pytest.raises(InvokeRateLimitError) as exc_info: - AppGenerateService.generate( - app_model=app, user=account, args=args, invoke_from=InvokeFrom.SERVICE_API, streaming=True - ) - - # Verify error message - assert "Rate limit exceeded" in str(exc_info.value) - def test_generate_with_invalid_app_mode(self, db_session_with_containers, mock_external_service_dependencies): """ Test generation with invalid app mode. diff --git a/api/uv.lock b/api/uv.lock index b1e86cd86d..06b4d7124a 100644 --- a/api/uv.lock +++ b/api/uv.lock @@ -1314,7 +1314,6 @@ dependencies = [ { name = "mailchimp-transactional" }, { name = "markdown" }, { name = "numpy" }, - { name = "openai" }, { name = "openpyxl" }, { name = "opentelemetry-api" }, { name = "opentelemetry-distro" }, @@ -1508,7 +1507,6 @@ requires-dist = [ { name = "mailchimp-transactional", specifier = "~=1.0.50" }, { name = "markdown", specifier = "~=3.5.1" }, { name = "numpy", specifier = "~=1.26.4" }, - { name = "openai", specifier = "~=1.61.0" }, { name = "openpyxl", specifier = "~=3.1.5" }, { name = "opentelemetry-api", specifier = "==1.27.0" }, { name = "opentelemetry-distro", specifier = "==0.48b0" }, From 11770439befb26bac837559472d6252c190c8164 Mon Sep 17 00:00:00 2001 From: Will Date: Mon, 6 Oct 2025 20:21:51 +0800 Subject: [PATCH 10/31] chore: remove explicit dependency on the fastapi framework (#26609) --- .../console/datasets/rag_pipeline/datasource_auth.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/controllers/console/datasets/rag_pipeline/datasource_auth.py b/api/controllers/console/datasets/rag_pipeline/datasource_auth.py index 154d9e646b..53b5a0d965 100644 --- a/api/controllers/console/datasets/rag_pipeline/datasource_auth.py +++ b/api/controllers/console/datasets/rag_pipeline/datasource_auth.py @@ -1,4 +1,3 @@ -from fastapi.encoders import jsonable_encoder from flask import make_response, redirect, request from flask_login import current_user from flask_restx import Resource, reqparse @@ -11,6 +10,7 @@ from controllers.console.wraps import ( setup_required, ) from core.model_runtime.errors.validate import CredentialsValidateFailedError +from core.model_runtime.utils.encoders import jsonable_encoder from core.plugin.impl.oauth import OAuthHandler from libs.helper import StrLen from libs.login import login_required From b30e7ced0a47432d6f1b4bc205aaa3998842b3d9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 6 Oct 2025 20:22:32 +0800 Subject: [PATCH 11/31] chore(deps): bump react-easy-crop from 5.5.0 to 5.5.3 in /web (#26602) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- web/package.json | 2 +- web/pnpm-lock.yaml | 10 +++++----- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/web/package.json b/web/package.json index 7695d04f40..630fef0a40 100644 --- a/web/package.json +++ b/web/package.json @@ -107,7 +107,7 @@ "react": "19.1.1", "react-18-input-autosize": "^3.0.0", "react-dom": "19.1.1", - "react-easy-crop": "^5.1.0", + "react-easy-crop": "^5.5.3", "react-hook-form": "^7.53.1", "react-hotkeys-hook": "^4.6.1", "react-i18next": "^15.1.0", diff --git a/web/pnpm-lock.yaml b/web/pnpm-lock.yaml index 19a6b87cac..7f89238ca8 100644 --- a/web/pnpm-lock.yaml +++ b/web/pnpm-lock.yaml @@ -242,8 +242,8 @@ importers: specifier: 19.1.1 version: 19.1.1(react@19.1.1) react-easy-crop: - specifier: ^5.1.0 - version: 5.5.0(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + specifier: ^5.5.3 + version: 5.5.3(react-dom@19.1.1(react@19.1.1))(react@19.1.1) react-hook-form: specifier: ^7.53.1 version: 7.60.0(react@19.1.1) @@ -7300,8 +7300,8 @@ packages: react: '>= 16.3.0' react-dom: '>= 16.3.0' - react-easy-crop@5.5.0: - resolution: {integrity: sha512-OZzU+yXMhe69vLkDex+5QxcfT94FdcgVCyW2dBUw35ZoC3Is42TUxUy04w8nH1mfMKaizVdC3rh/wUfNW1mK4w==} + react-easy-crop@5.5.3: + resolution: {integrity: sha512-iKwFTnAsq+IVuyF6N0Q3zjRx9DG1NMySkwWxVfM/xAOeHYH1vhvM+V2kFiq5HOIQGWouITjfltCx54mbDpMpmA==} peerDependencies: react: '>=16.4.0' react-dom: '>=16.4.0' @@ -17070,7 +17070,7 @@ snapshots: react: 19.1.1 react-dom: 19.1.1(react@19.1.1) - react-easy-crop@5.5.0(react-dom@19.1.1(react@19.1.1))(react@19.1.1): + react-easy-crop@5.5.3(react-dom@19.1.1(react@19.1.1))(react@19.1.1): dependencies: normalize-wheel: 1.0.1 react: 19.1.1 From fdfccd1205686920bed7006072db6a4d36d4bdbe Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 6 Oct 2025 20:22:53 +0800 Subject: [PATCH 12/31] chore(deps): bump azure-storage-blob from 12.13.0 to 12.26.0 in /api (#26603) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- api/pyproject.toml | 2 +- api/uv.lock | 27 ++++++--------------------- 2 files changed, 7 insertions(+), 22 deletions(-) diff --git a/api/pyproject.toml b/api/pyproject.toml index 5113157c05..4ab1b6d457 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -177,7 +177,7 @@ dev = [ # Required for storage clients ############################################################ storage = [ - "azure-storage-blob==12.13.0", + "azure-storage-blob==12.26.0", "bce-python-sdk~=0.9.23", "cos-python-sdk-v5==1.9.38", "esdk-obs-python==3.24.6.1", diff --git a/api/uv.lock b/api/uv.lock index 06b4d7124a..89eb4aab36 100644 --- a/api/uv.lock +++ b/api/uv.lock @@ -445,16 +445,17 @@ wheels = [ [[package]] name = "azure-storage-blob" -version = "12.13.0" +version = "12.26.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "azure-core" }, { name = "cryptography" }, - { name = "msrest" }, + { name = "isodate" }, + { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b1/93/b13bf390e940a79a399981f75ac8d2e05a70112a95ebb7b41e9b752d2921/azure-storage-blob-12.13.0.zip", hash = "sha256:53f0d4cd32970ac9ff9b9753f83dd2fb3f9ac30e1d01e71638c436c509bfd884", size = 684838, upload-time = "2022-07-07T22:35:44.543Z" } +sdist = { url = "https://files.pythonhosted.org/packages/96/95/3e3414491ce45025a1cde107b6ae72bf72049e6021597c201cd6a3029b9a/azure_storage_blob-12.26.0.tar.gz", hash = "sha256:5dd7d7824224f7de00bfeb032753601c982655173061e242f13be6e26d78d71f", size = 583332, upload-time = "2025-07-16T21:34:07.644Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0e/2a/b8246df35af68d64fb7292c93dbbde63cd25036f2f669a9d9ae59e518c76/azure_storage_blob-12.13.0-py3-none-any.whl", hash = "sha256:280a6ab032845bab9627582bee78a50497ca2f14772929b5c5ee8b4605af0cb3", size = 377309, upload-time = "2022-07-07T22:35:41.905Z" }, + { url = "https://files.pythonhosted.org/packages/5b/64/63dbfdd83b31200ac58820a7951ddfdeed1fbee9285b0f3eae12d1357155/azure_storage_blob-12.26.0-py3-none-any.whl", hash = "sha256:8c5631b8b22b4f53ec5fff2f3bededf34cfef111e2af613ad42c9e6de00a77fe", size = 412907, upload-time = "2025-07-16T21:34:09.367Z" }, ] [[package]] @@ -1623,7 +1624,7 @@ dev = [ { name = "types-ujson", specifier = ">=5.10.0" }, ] storage = [ - { name = "azure-storage-blob", specifier = "==12.13.0" }, + { name = "azure-storage-blob", specifier = "==12.26.0" }, { name = "bce-python-sdk", specifier = "~=0.9.23" }, { name = "cos-python-sdk-v5", specifier = "==1.9.38" }, { name = "esdk-obs-python", specifier = "==3.24.6.1" }, @@ -3367,22 +3368,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/5e/75/bd9b7bb966668920f06b200e84454c8f3566b102183bc55c5473d96cb2b9/msal_extensions-1.3.1-py3-none-any.whl", hash = "sha256:96d3de4d034504e969ac5e85bae8106c8373b5c6568e4c8fa7af2eca9dbe6bca", size = 20583, upload-time = "2025-03-14T23:51:03.016Z" }, ] -[[package]] -name = "msrest" -version = "0.7.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "azure-core" }, - { name = "certifi" }, - { name = "isodate" }, - { name = "requests" }, - { name = "requests-oauthlib" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/68/77/8397c8fb8fc257d8ea0fa66f8068e073278c65f05acb17dcb22a02bfdc42/msrest-0.7.1.zip", hash = "sha256:6e7661f46f3afd88b75667b7187a92829924446c7ea1d169be8c4bb7eeb788b9", size = 175332, upload-time = "2022-06-13T22:41:25.111Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/15/cf/f2966a2638144491f8696c27320d5219f48a072715075d168b31d3237720/msrest-0.7.1-py3-none-any.whl", hash = "sha256:21120a810e1233e5e6cc7fe40b474eeb4ec6f757a15d7cf86702c369f9567c32", size = 85384, upload-time = "2022-06-13T22:41:22.42Z" }, -] - [[package]] name = "multidict" version = "6.6.4" From 1d6c03eddf4dfee019c126a887f42c5a842352c0 Mon Sep 17 00:00:00 2001 From: Will Date: Mon, 6 Oct 2025 20:24:24 +0800 Subject: [PATCH 13/31] delete unnecessary db merge (#26588) --- api/core/app/task_pipeline/message_cycle_manager.py | 1 - 1 file changed, 1 deletion(-) diff --git a/api/core/app/task_pipeline/message_cycle_manager.py b/api/core/app/task_pipeline/message_cycle_manager.py index 0004fb592e..7a384e5c92 100644 --- a/api/core/app/task_pipeline/message_cycle_manager.py +++ b/api/core/app/task_pipeline/message_cycle_manager.py @@ -107,7 +107,6 @@ class MessageCycleManager: if dify_config.DEBUG: logger.exception("generate conversation name failed, conversation_id: %s", conversation_id) - db.session.merge(conversation) db.session.commit() db.session.close() From 517726da3acf1278e414812c9f085f9dae72ab6c Mon Sep 17 00:00:00 2001 From: NeatGuyCoding <15627489+NeatGuyCoding@users.noreply.github.com> Date: Mon, 6 Oct 2025 20:25:31 +0800 Subject: [PATCH 14/31] Feature add test containers mail change mail task (#26570) Signed-off-by: NeatGuyCoding <15627489+NeatGuyCoding@users.noreply.github.com> --- .../tasks/test_mail_change_mail_task.py | 282 ++++++++++++++++++ 1 file changed, 282 insertions(+) create mode 100644 api/tests/test_containers_integration_tests/tasks/test_mail_change_mail_task.py diff --git a/api/tests/test_containers_integration_tests/tasks/test_mail_change_mail_task.py b/api/tests/test_containers_integration_tests/tasks/test_mail_change_mail_task.py new file mode 100644 index 0000000000..9cf348d989 --- /dev/null +++ b/api/tests/test_containers_integration_tests/tasks/test_mail_change_mail_task.py @@ -0,0 +1,282 @@ +from unittest.mock import MagicMock, patch + +import pytest +from faker import Faker + +from libs.email_i18n import EmailType +from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole +from tasks.mail_change_mail_task import send_change_mail_completed_notification_task, send_change_mail_task + + +class TestMailChangeMailTask: + """Integration tests for mail_change_mail_task using testcontainers.""" + + @pytest.fixture + def mock_external_service_dependencies(self): + """Mock setup for external service dependencies.""" + with ( + patch("tasks.mail_change_mail_task.mail") as mock_mail, + patch("tasks.mail_change_mail_task.get_email_i18n_service") as mock_get_email_i18n_service, + ): + # Setup mock mail service + mock_mail.is_inited.return_value = True + + # Setup mock email i18n service + mock_email_service = MagicMock() + mock_get_email_i18n_service.return_value = mock_email_service + + yield { + "mail": mock_mail, + "email_i18n_service": mock_email_service, + "get_email_i18n_service": mock_get_email_i18n_service, + } + + def _create_test_account(self, db_session_with_containers): + """ + Helper method to create a test account for testing. + + Args: + db_session_with_containers: Database session from testcontainers infrastructure + + Returns: + Account: Created account instance + """ + fake = Faker() + + # Create account + account = Account( + email=fake.email(), + name=fake.name(), + interface_language="en-US", + status="active", + ) + db_session_with_containers.add(account) + db_session_with_containers.commit() + + # Create tenant + tenant = Tenant( + name=fake.company(), + status="normal", + ) + db_session_with_containers.add(tenant) + db_session_with_containers.commit() + + # Create tenant-account join + join = TenantAccountJoin( + tenant_id=tenant.id, + account_id=account.id, + role=TenantAccountRole.OWNER.value, + current=True, + ) + db_session_with_containers.add(join) + db_session_with_containers.commit() + + return account + + def test_send_change_mail_task_success_old_email_phase( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test successful change email task execution for old_email phase. + + This test verifies: + - Proper mail service initialization check + - Correct email service method call with old_email phase + - Successful task completion + """ + # Arrange: Create test data + account = self._create_test_account(db_session_with_containers) + test_language = "en-US" + test_email = account.email + test_code = "123456" + test_phase = "old_email" + + # Act: Execute the task + send_change_mail_task(test_language, test_email, test_code, test_phase) + + # Assert: Verify the expected outcomes + mock_external_service_dependencies["mail"].is_inited.assert_called_once() + mock_external_service_dependencies["get_email_i18n_service"].assert_called_once() + mock_external_service_dependencies["email_i18n_service"].send_change_email.assert_called_once_with( + language_code=test_language, + to=test_email, + code=test_code, + phase=test_phase, + ) + + def test_send_change_mail_task_success_new_email_phase( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test successful change email task execution for new_email phase. + + This test verifies: + - Proper mail service initialization check + - Correct email service method call with new_email phase + - Successful task completion + """ + # Arrange: Create test data + account = self._create_test_account(db_session_with_containers) + test_language = "zh-Hans" + test_email = "new@example.com" + test_code = "789012" + test_phase = "new_email" + + # Act: Execute the task + send_change_mail_task(test_language, test_email, test_code, test_phase) + + # Assert: Verify the expected outcomes + mock_external_service_dependencies["mail"].is_inited.assert_called_once() + mock_external_service_dependencies["get_email_i18n_service"].assert_called_once() + mock_external_service_dependencies["email_i18n_service"].send_change_email.assert_called_once_with( + language_code=test_language, + to=test_email, + code=test_code, + phase=test_phase, + ) + + def test_send_change_mail_task_mail_not_initialized( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test change email task when mail service is not initialized. + + This test verifies: + - Early return when mail service is not initialized + - No email service calls when mail is not available + """ + # Arrange: Setup mail service as not initialized + mock_external_service_dependencies["mail"].is_inited.return_value = False + test_language = "en-US" + test_email = "test@example.com" + test_code = "123456" + test_phase = "old_email" + + # Act: Execute the task + send_change_mail_task(test_language, test_email, test_code, test_phase) + + # Assert: Verify no email service calls + mock_external_service_dependencies["mail"].is_inited.assert_called_once() + mock_external_service_dependencies["get_email_i18n_service"].assert_not_called() + mock_external_service_dependencies["email_i18n_service"].send_change_email.assert_not_called() + + def test_send_change_mail_task_email_service_exception( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test change email task when email service raises an exception. + + This test verifies: + - Exception is properly caught and logged + - Task completes without raising exception + """ + # Arrange: Setup email service to raise exception + mock_external_service_dependencies["email_i18n_service"].send_change_email.side_effect = Exception( + "Email service failed" + ) + test_language = "en-US" + test_email = "test@example.com" + test_code = "123456" + test_phase = "old_email" + + # Act: Execute the task (should not raise exception) + send_change_mail_task(test_language, test_email, test_code, test_phase) + + # Assert: Verify email service was called despite exception + mock_external_service_dependencies["mail"].is_inited.assert_called_once() + mock_external_service_dependencies["get_email_i18n_service"].assert_called_once() + mock_external_service_dependencies["email_i18n_service"].send_change_email.assert_called_once_with( + language_code=test_language, + to=test_email, + code=test_code, + phase=test_phase, + ) + + def test_send_change_mail_completed_notification_task_success( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test successful change email completed notification task execution. + + This test verifies: + - Proper mail service initialization check + - Correct email service method call with CHANGE_EMAIL_COMPLETED type + - Template context is properly constructed + - Successful task completion + """ + # Arrange: Create test data + account = self._create_test_account(db_session_with_containers) + test_language = "en-US" + test_email = account.email + + # Act: Execute the task + send_change_mail_completed_notification_task(test_language, test_email) + + # Assert: Verify the expected outcomes + mock_external_service_dependencies["mail"].is_inited.assert_called_once() + mock_external_service_dependencies["get_email_i18n_service"].assert_called_once() + mock_external_service_dependencies["email_i18n_service"].send_email.assert_called_once_with( + email_type=EmailType.CHANGE_EMAIL_COMPLETED, + language_code=test_language, + to=test_email, + template_context={ + "to": test_email, + "email": test_email, + }, + ) + + def test_send_change_mail_completed_notification_task_mail_not_initialized( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test change email completed notification task when mail service is not initialized. + + This test verifies: + - Early return when mail service is not initialized + - No email service calls when mail is not available + """ + # Arrange: Setup mail service as not initialized + mock_external_service_dependencies["mail"].is_inited.return_value = False + test_language = "en-US" + test_email = "test@example.com" + + # Act: Execute the task + send_change_mail_completed_notification_task(test_language, test_email) + + # Assert: Verify no email service calls + mock_external_service_dependencies["mail"].is_inited.assert_called_once() + mock_external_service_dependencies["get_email_i18n_service"].assert_not_called() + mock_external_service_dependencies["email_i18n_service"].send_email.assert_not_called() + + def test_send_change_mail_completed_notification_task_email_service_exception( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test change email completed notification task when email service raises an exception. + + This test verifies: + - Exception is properly caught and logged + - Task completes without raising exception + """ + # Arrange: Setup email service to raise exception + mock_external_service_dependencies["email_i18n_service"].send_email.side_effect = Exception( + "Email service failed" + ) + test_language = "en-US" + test_email = "test@example.com" + + # Act: Execute the task (should not raise exception) + send_change_mail_completed_notification_task(test_language, test_email) + + # Assert: Verify email service was called despite exception + mock_external_service_dependencies["mail"].is_inited.assert_called_once() + mock_external_service_dependencies["get_email_i18n_service"].assert_called_once() + mock_external_service_dependencies["email_i18n_service"].send_email.assert_called_once_with( + email_type=EmailType.CHANGE_EMAIL_COMPLETED, + language_code=test_language, + to=test_email, + template_context={ + "to": test_email, + "email": test_email, + }, + ) From dc0f053925cd742c7a383f549f1f7548ba0327c1 Mon Sep 17 00:00:00 2001 From: NeatGuyCoding <15627489+NeatGuyCoding@users.noreply.github.com> Date: Tue, 7 Oct 2025 12:48:11 +0800 Subject: [PATCH 15/31] Feature add test containers mail inner task (#26622) Signed-off-by: NeatGuyCoding <15627489+NeatGuyCoding@users.noreply.github.com> Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> --- .../tasks/test_mail_inner_task.py | 261 ++++++++++++++++++ 1 file changed, 261 insertions(+) create mode 100644 api/tests/test_containers_integration_tests/tasks/test_mail_inner_task.py diff --git a/api/tests/test_containers_integration_tests/tasks/test_mail_inner_task.py b/api/tests/test_containers_integration_tests/tasks/test_mail_inner_task.py new file mode 100644 index 0000000000..d67794654f --- /dev/null +++ b/api/tests/test_containers_integration_tests/tasks/test_mail_inner_task.py @@ -0,0 +1,261 @@ +from unittest.mock import MagicMock, patch + +import pytest +from faker import Faker + +from tasks.mail_inner_task import send_inner_email_task + + +class TestMailInnerTask: + """Integration tests for send_inner_email_task using testcontainers.""" + + @pytest.fixture + def mock_external_service_dependencies(self): + """Mock setup for external service dependencies.""" + with ( + patch("tasks.mail_inner_task.mail") as mock_mail, + patch("tasks.mail_inner_task.get_email_i18n_service") as mock_get_email_i18n_service, + patch("tasks.mail_inner_task._render_template_with_strategy") as mock_render_template, + ): + # Setup mock mail service + mock_mail.is_inited.return_value = True + + # Setup mock email i18n service + mock_email_service = MagicMock() + mock_get_email_i18n_service.return_value = mock_email_service + + # Setup mock template rendering + mock_render_template.return_value = "Test email content" + + yield { + "mail": mock_mail, + "email_service": mock_email_service, + "render_template": mock_render_template, + } + + def _create_test_email_data(self, fake: Faker) -> dict: + """ + Helper method to create test email data for testing. + + Args: + fake: Faker instance for generating test data + + Returns: + dict: Test email data including recipients, subject, body, and substitutions + """ + return { + "to": [fake.email() for _ in range(3)], + "subject": fake.sentence(nb_words=4), + "body": "Hello {{name}}, this is a test email from {{company}}.", + "substitutions": { + "name": fake.name(), + "company": fake.company(), + "date": fake.date(), + }, + } + + def test_send_inner_email_success(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test successful email sending with valid data. + + This test verifies: + - Proper email service initialization check + - Template rendering with substitutions + - Email service integration + - Multiple recipient handling + """ + # Arrange: Create test data + fake = Faker() + email_data = self._create_test_email_data(fake) + + # Act: Execute the task + send_inner_email_task( + to=email_data["to"], + subject=email_data["subject"], + body=email_data["body"], + substitutions=email_data["substitutions"], + ) + + # Assert: Verify the expected outcomes + # Verify mail service was checked for initialization + mock_external_service_dependencies["mail"].is_inited.assert_called_once() + + # Verify template rendering was called with correct parameters + mock_external_service_dependencies["render_template"].assert_called_once_with( + email_data["body"], email_data["substitutions"] + ) + + # Verify email service was called once with the full recipient list + mock_email_service = mock_external_service_dependencies["email_service"] + mock_email_service.send_raw_email.assert_called_once_with( + to=email_data["to"], + subject=email_data["subject"], + html_content="Test email content", + ) + + def test_send_inner_email_single_recipient(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test email sending with single recipient. + + This test verifies: + - Single recipient handling + - Template rendering + - Email service integration + """ + # Arrange: Create test data with single recipient + fake = Faker() + email_data = { + "to": [fake.email()], + "subject": fake.sentence(nb_words=3), + "body": "Welcome {{user_name}}!", + "substitutions": { + "user_name": fake.name(), + }, + } + + # Act: Execute the task + send_inner_email_task( + to=email_data["to"], + subject=email_data["subject"], + body=email_data["body"], + substitutions=email_data["substitutions"], + ) + + # Assert: Verify the expected outcomes + mock_email_service = mock_external_service_dependencies["email_service"] + mock_email_service.send_raw_email.assert_called_once_with( + to=email_data["to"], + subject=email_data["subject"], + html_content="Test email content", + ) + + def test_send_inner_email_empty_substitutions(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test email sending with empty substitutions. + + This test verifies: + - Template rendering with empty substitutions + - Email service integration + - Handling of minimal template context + """ + # Arrange: Create test data with empty substitutions + fake = Faker() + email_data = { + "to": [fake.email()], + "subject": fake.sentence(nb_words=3), + "body": "This is a simple email without variables.", + "substitutions": {}, + } + + # Act: Execute the task + send_inner_email_task( + to=email_data["to"], + subject=email_data["subject"], + body=email_data["body"], + substitutions=email_data["substitutions"], + ) + + # Assert: Verify the expected outcomes + mock_external_service_dependencies["render_template"].assert_called_once_with(email_data["body"], {}) + + mock_email_service = mock_external_service_dependencies["email_service"] + mock_email_service.send_raw_email.assert_called_once_with( + to=email_data["to"], + subject=email_data["subject"], + html_content="Test email content", + ) + + def test_send_inner_email_mail_not_initialized( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test email sending when mail service is not initialized. + + This test verifies: + - Early return when mail service is not initialized + - No template rendering occurs + - No email service calls + - No exceptions raised + """ + # Arrange: Setup mail service as not initialized + mock_external_service_dependencies["mail"].is_inited.return_value = False + + fake = Faker() + email_data = self._create_test_email_data(fake) + + # Act: Execute the task + send_inner_email_task( + to=email_data["to"], + subject=email_data["subject"], + body=email_data["body"], + substitutions=email_data["substitutions"], + ) + + # Assert: Verify no processing occurred + mock_external_service_dependencies["render_template"].assert_not_called() + mock_external_service_dependencies["email_service"].send_raw_email.assert_not_called() + + def test_send_inner_email_template_rendering_error( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test email sending when template rendering fails. + + This test verifies: + - Exception handling during template rendering + - No email service calls when template fails + """ + # Arrange: Setup template rendering to raise an exception + mock_external_service_dependencies["render_template"].side_effect = Exception("Template rendering failed") + + fake = Faker() + email_data = self._create_test_email_data(fake) + + # Act: Execute the task + send_inner_email_task( + to=email_data["to"], + subject=email_data["subject"], + body=email_data["body"], + substitutions=email_data["substitutions"], + ) + + # Assert: Verify template rendering was attempted + mock_external_service_dependencies["render_template"].assert_called_once() + + # Verify no email service calls due to exception + mock_external_service_dependencies["email_service"].send_raw_email.assert_not_called() + + def test_send_inner_email_service_error(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test email sending when email service fails. + + This test verifies: + - Exception handling during email sending + - Graceful error handling + """ + # Arrange: Setup email service to raise an exception + mock_external_service_dependencies["email_service"].send_raw_email.side_effect = Exception( + "Email service failed" + ) + + fake = Faker() + email_data = self._create_test_email_data(fake) + + # Act: Execute the task + send_inner_email_task( + to=email_data["to"], + subject=email_data["subject"], + body=email_data["body"], + substitutions=email_data["substitutions"], + ) + + # Assert: Verify template rendering occurred + mock_external_service_dependencies["render_template"].assert_called_once() + + # Verify email service was called (and failed) + mock_email_service = mock_external_service_dependencies["email_service"] + mock_email_service.send_raw_email.assert_called_once_with( + to=email_data["to"], + subject=email_data["subject"], + html_content="Test email content", + ) From faaca822e44102d4d3035d02d79dcb24c1fec3b5 Mon Sep 17 00:00:00 2001 From: Charles Liu Date: Tue, 7 Oct 2025 06:49:44 +0200 Subject: [PATCH 16/31] fix bug 26613: get wrong credentials with multiple authorizations plugin (#26615) Co-authored-by: charles liu --- api/services/tools/builtin_tools_manage_service.py | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/api/services/tools/builtin_tools_manage_service.py b/api/services/tools/builtin_tools_manage_service.py index 6b0b6b0f0e..cab4a5c6ab 100644 --- a/api/services/tools/builtin_tools_manage_service.py +++ b/api/services/tools/builtin_tools_manage_service.py @@ -349,14 +349,10 @@ class BuiltinToolManageService: provider_controller = ToolManager.get_builtin_provider(default_provider.provider, tenant_id) credentials: list[ToolProviderCredentialApiEntity] = [] - encrypters = {} for provider in providers: - credential_type = provider.credential_type - if credential_type not in encrypters: - encrypters[credential_type] = BuiltinToolManageService.create_tool_encrypter( - tenant_id, provider, provider.provider, provider_controller - )[0] - encrypter = encrypters[credential_type] + encrypter, _ = BuiltinToolManageService.create_tool_encrypter( + tenant_id, provider, provider.provider, provider_controller + ) decrypt_credential = encrypter.mask_tool_credentials(encrypter.decrypt(provider.credentials)) credential_entity = ToolTransformService.convert_builtin_provider_to_credential_entity( provider=provider, From 04f47836d8095623a84cbf986b0ca4a3ff4f36ad Mon Sep 17 00:00:00 2001 From: yihong Date: Tue, 7 Oct 2025 14:16:14 +0800 Subject: [PATCH 17/31] fix: two functions comments doc is not right (#26624) Signed-off-by: yihong0618 Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> --- .../priority_rag_pipeline_run_task.py | 21 ++++--------------- .../rag_pipeline/rag_pipeline_run_task.py | 21 ++++--------------- 2 files changed, 8 insertions(+), 34 deletions(-) diff --git a/api/tasks/rag_pipeline/priority_rag_pipeline_run_task.py b/api/tasks/rag_pipeline/priority_rag_pipeline_run_task.py index 028f635188..a2c99554f1 100644 --- a/api/tasks/rag_pipeline/priority_rag_pipeline_run_task.py +++ b/api/tasks/rag_pipeline/priority_rag_pipeline_run_task.py @@ -29,23 +29,10 @@ def priority_rag_pipeline_run_task( tenant_id: str, ): """ - Async Run rag pipeline - :param rag_pipeline_invoke_entities: Rag pipeline invoke entities - rag_pipeline_invoke_entities include: - :param pipeline_id: Pipeline ID - :param user_id: User ID - :param tenant_id: Tenant ID - :param workflow_id: Workflow ID - :param invoke_from: Invoke source (debugger, published, etc.) - :param streaming: Whether to stream results - :param datasource_type: Type of datasource - :param datasource_info: Datasource information dict - :param batch: Batch identifier - :param document_id: Document ID (optional) - :param start_node_id: Starting node ID - :param inputs: Input parameters dict - :param workflow_execution_id: Workflow execution ID - :param workflow_thread_pool_id: Thread pool ID for workflow execution + Async Run rag pipeline task using high priority queue. + + :param rag_pipeline_invoke_entities_file_id: File ID containing serialized RAG pipeline invoke entities + :param tenant_id: Tenant ID for the pipeline execution """ # run with threading, thread pool size is 10 diff --git a/api/tasks/rag_pipeline/rag_pipeline_run_task.py b/api/tasks/rag_pipeline/rag_pipeline_run_task.py index ee904c4649..4e00f072bf 100644 --- a/api/tasks/rag_pipeline/rag_pipeline_run_task.py +++ b/api/tasks/rag_pipeline/rag_pipeline_run_task.py @@ -30,23 +30,10 @@ def rag_pipeline_run_task( tenant_id: str, ): """ - Async Run rag pipeline - :param rag_pipeline_invoke_entities: Rag pipeline invoke entities - rag_pipeline_invoke_entities include: - :param pipeline_id: Pipeline ID - :param user_id: User ID - :param tenant_id: Tenant ID - :param workflow_id: Workflow ID - :param invoke_from: Invoke source (debugger, published, etc.) - :param streaming: Whether to stream results - :param datasource_type: Type of datasource - :param datasource_info: Datasource information dict - :param batch: Batch identifier - :param document_id: Document ID (optional) - :param start_node_id: Starting node ID - :param inputs: Input parameters dict - :param workflow_execution_id: Workflow execution ID - :param workflow_thread_pool_id: Thread pool ID for workflow execution + Async Run rag pipeline task using regular priority queue. + + :param rag_pipeline_invoke_entities_file_id: File ID containing serialized RAG pipeline invoke entities + :param tenant_id: Tenant ID for the pipeline execution """ # run with threading, thread pool size is 10 From e00172199a9f99d2ce6d9b2dfc6faacb27266573 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 7 Oct 2025 14:17:05 +0800 Subject: [PATCH 18/31] chore(deps-dev): bump babel-loader from 9.2.1 to 10.0.0 in /web (#26601) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- web/package.json | 2 +- web/pnpm-lock.yaml | 17 +++++++++++++++-- 2 files changed, 16 insertions(+), 3 deletions(-) diff --git a/web/package.json b/web/package.json index 630fef0a40..62cccf0610 100644 --- a/web/package.json +++ b/web/package.json @@ -178,7 +178,7 @@ "@types/sortablejs": "^1.15.1", "@types/uuid": "^10.0.0", "autoprefixer": "^10.4.20", - "babel-loader": "^9.2.1", + "babel-loader": "^10.0.0", "bing-translate-api": "^4.0.2", "code-inspector-plugin": "1.2.9", "cross-env": "^7.0.3", diff --git a/web/pnpm-lock.yaml b/web/pnpm-lock.yaml index 7f89238ca8..8046f94d59 100644 --- a/web/pnpm-lock.yaml +++ b/web/pnpm-lock.yaml @@ -450,8 +450,8 @@ importers: specifier: ^10.4.20 version: 10.4.21(postcss@8.5.6) babel-loader: - specifier: ^9.2.1 - version: 9.2.1(@babel/core@7.28.3)(webpack@5.100.2(esbuild@0.25.0)(uglify-js@3.19.3)) + specifier: ^10.0.0 + version: 10.0.0(@babel/core@7.28.3)(webpack@5.100.2(esbuild@0.25.0)(uglify-js@3.19.3)) bing-translate-api: specifier: ^4.0.2 version: 4.1.0 @@ -3936,6 +3936,13 @@ packages: peerDependencies: '@babel/core': ^7.8.0 + babel-loader@10.0.0: + resolution: {integrity: sha512-z8jt+EdS61AMw22nSfoNJAZ0vrtmhPRVi6ghL3rCeRZI8cdNYFiV5xeV3HbE7rlZZNmGH8BVccwWt8/ED0QOHA==} + engines: {node: ^18.20.0 || ^20.10.0 || >=22.0.0} + peerDependencies: + '@babel/core': ^7.12.0 + webpack: '>=5.61.0' + babel-loader@8.4.1: resolution: {integrity: sha512-nXzRChX+Z1GoE6yWavBQg6jDslyFF3SDjl2paADuoQtQW10JqShJt62R6eJQ5m/pjJFDT8xgKIWSP85OY8eXeA==} engines: {node: '>= 8.9'} @@ -12832,6 +12839,12 @@ snapshots: transitivePeerDependencies: - supports-color + babel-loader@10.0.0(@babel/core@7.28.3)(webpack@5.100.2(esbuild@0.25.0)(uglify-js@3.19.3)): + dependencies: + '@babel/core': 7.28.3 + find-up: 5.0.0 + webpack: 5.100.2(esbuild@0.25.0)(uglify-js@3.19.3) + babel-loader@8.4.1(@babel/core@7.28.3)(webpack@5.100.2(esbuild@0.25.0)(uglify-js@3.19.3)): dependencies: '@babel/core': 7.28.3 From e56c847210e30253547ab232946371a73243ad99 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 7 Oct 2025 14:17:56 +0800 Subject: [PATCH 19/31] chore(deps): bump esdk-obs-python from 3.24.6.1 to 3.25.8 in /api (#26604) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- api/pyproject.toml | 2 +- api/uv.lock | 8 +++++--- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/api/pyproject.toml b/api/pyproject.toml index 4ab1b6d457..3c4efe074b 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -180,7 +180,7 @@ storage = [ "azure-storage-blob==12.26.0", "bce-python-sdk~=0.9.23", "cos-python-sdk-v5==1.9.38", - "esdk-obs-python==3.24.6.1", + "esdk-obs-python==3.25.8", "google-cloud-storage==2.16.0", "opendal~=0.46.0", "oss2==2.18.5", diff --git a/api/uv.lock b/api/uv.lock index 89eb4aab36..675984e3b3 100644 --- a/api/uv.lock +++ b/api/uv.lock @@ -1627,7 +1627,7 @@ storage = [ { name = "azure-storage-blob", specifier = "==12.26.0" }, { name = "bce-python-sdk", specifier = "~=0.9.23" }, { name = "cos-python-sdk-v5", specifier = "==1.9.38" }, - { name = "esdk-obs-python", specifier = "==3.24.6.1" }, + { name = "esdk-obs-python", specifier = "==3.25.8" }, { name = "google-cloud-storage", specifier = "==2.16.0" }, { name = "opendal", specifier = "~=0.46.0" }, { name = "oss2", specifier = "==2.18.5" }, @@ -1778,12 +1778,14 @@ wheels = [ [[package]] name = "esdk-obs-python" -version = "3.24.6.1" +version = "3.25.8" source = { registry = "https://pypi.org/simple" } dependencies = [ + { name = "crcmod" }, { name = "pycryptodome" }, + { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f7/af/d83276f9e288bd6a62f44d67ae1eafd401028ba1b2b643ae4014b51da5bd/esdk-obs-python-3.24.6.1.tar.gz", hash = "sha256:c45fed143e99d9256c8560c1d78f651eae0d2e809d16e962f8b286b773c33bf0", size = 85798, upload-time = "2024-07-26T13:13:22.467Z" } +sdist = { url = "https://files.pythonhosted.org/packages/40/99/52362d6e081a642d6de78f6ab53baa5e3f82f2386c48954e18ee7b4ab22b/esdk-obs-python-3.25.8.tar.gz", hash = "sha256:aeded00b27ecd5a25ffaec38a2cc9416b51923d48db96c663f1a735f859b5273", size = 96302, upload-time = "2025-09-01T11:35:20.432Z" } [[package]] name = "et-xmlfile" From 31e6ef77a61f91c377052d51e3825fe0f85904ed Mon Sep 17 00:00:00 2001 From: Ponder <190127701@qq.com> Date: Tue, 7 Oct 2025 14:20:12 +0800 Subject: [PATCH 20/31] feat: optimize the page jump logic to prevent unnecessary jumps. (#26481) Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com> --- web/app/components/base/pagination/index.tsx | 32 ++++++++++++++++++-- 1 file changed, 30 insertions(+), 2 deletions(-) diff --git a/web/app/components/base/pagination/index.tsx b/web/app/components/base/pagination/index.tsx index 8126f663dd..e0c02df253 100644 --- a/web/app/components/base/pagination/index.tsx +++ b/web/app/components/base/pagination/index.tsx @@ -57,7 +57,34 @@ const CustomizedPagination: FC = ({ if (isNaN(Number.parseInt(value))) return setInputValue('') setInputValue(Number.parseInt(value)) - handlePaging(value) + } + + const handleInputConfirm = () => { + if (inputValue !== '' && String(inputValue) !== String(current + 1)) { + handlePaging(String(inputValue)) + return + } + + if (inputValue === '') + setInputValue(current + 1) + + setShowInput(false) + } + + const handleInputKeyDown = (e: React.KeyboardEvent) => { + if (e.key === 'Enter') { + e.preventDefault() + handleInputConfirm() + } + else if (e.key === 'Escape') { + e.preventDefault() + setInputValue(current + 1) + setShowInput(false) + } + } + + const handleInputBlur = () => { + handleInputConfirm() } return ( @@ -105,7 +132,8 @@ const CustomizedPagination: FC = ({ autoFocus value={inputValue} onChange={handleInputChange} - onBlur={() => setShowInput(false)} + onKeyDown={handleInputKeyDown} + onBlur={handleInputBlur} /> )} Date: Tue, 7 Oct 2025 14:21:08 +0800 Subject: [PATCH 21/31] perf(web): improve app workflow build performance. (#26310) --- .../base/markdown-blocks/code-block.tsx | 4 +- web/app/components/base/markdown/index.tsx | 76 ++--------------- .../base/markdown/react-markdown-wrapper.tsx | 82 +++++++++++++++++++ .../data-source/local-file/index.tsx | 4 +- web/app/components/workflow-app/index.tsx | 2 +- .../workflow/hooks/use-workflow-history.ts | 42 +++++----- .../workflow/workflow-history-store.tsx | 4 +- 7 files changed, 120 insertions(+), 94 deletions(-) create mode 100644 web/app/components/base/markdown/react-markdown-wrapper.tsx diff --git a/web/app/components/base/markdown-blocks/code-block.tsx b/web/app/components/base/markdown-blocks/code-block.tsx index 48de8bf4ab..6814659a00 100644 --- a/web/app/components/base/markdown-blocks/code-block.tsx +++ b/web/app/components/base/markdown-blocks/code-block.tsx @@ -8,12 +8,14 @@ import { import ActionButton from '@/app/components/base/action-button' import CopyIcon from '@/app/components/base/copy-icon' import SVGBtn from '@/app/components/base/svg' -import Flowchart from '@/app/components/base/mermaid' import { Theme } from '@/types/app' import useTheme from '@/hooks/use-theme' import SVGRenderer from '../svg-gallery' // Assumes svg-gallery.tsx is in /base directory import MarkdownMusic from '@/app/components/base/markdown-blocks/music' import ErrorBoundary from '@/app/components/base/markdown/error-boundary' +import dynamic from 'next/dynamic' + +const Flowchart = dynamic(() => import('@/app/components/base/mermaid'), { ssr: false }) // Available language https://github.com/react-syntax-highlighter/react-syntax-highlighter/blob/master/AVAILABLE_LANGUAGES_HLJS.MD const capitalizationLanguageNameMap: Record = { diff --git a/web/app/components/base/markdown/index.tsx b/web/app/components/base/markdown/index.tsx index bab5ac8eba..19f39d8aaa 100644 --- a/web/app/components/base/markdown/index.tsx +++ b/web/app/components/base/markdown/index.tsx @@ -1,25 +1,11 @@ -import ReactMarkdown from 'react-markdown' +import dynamic from 'next/dynamic' import 'katex/dist/katex.min.css' -import RemarkMath from 'remark-math' -import RemarkBreaks from 'remark-breaks' -import RehypeKatex from 'rehype-katex' -import RemarkGfm from 'remark-gfm' -import RehypeRaw from 'rehype-raw' import { flow } from 'lodash-es' import cn from '@/utils/classnames' -import { customUrlTransform, preprocessLaTeX, preprocessThinkTag } from './markdown-utils' -import { - AudioBlock, - CodeBlock, - Img, - Link, - MarkdownButton, - MarkdownForm, - Paragraph, - ScriptBlock, - ThinkBlock, - VideoBlock, -} from '@/app/components/base/markdown-blocks' +import { preprocessLaTeX, preprocessThinkTag } from './markdown-utils' +import type { ReactMarkdownWrapperProps } from './react-markdown-wrapper' + +const ReactMarkdown = dynamic(() => import('./react-markdown-wrapper').then(mod => mod.ReactMarkdownWrapper), { ssr: false }) /** * @fileoverview Main Markdown rendering component. @@ -31,9 +17,7 @@ import { export type MarkdownProps = { content: string className?: string - customDisallowedElements?: string[] - customComponents?: Record> -} +} & Pick export const Markdown = (props: MarkdownProps) => { const { customComponents = {} } = props @@ -44,53 +28,7 @@ export const Markdown = (props: MarkdownProps) => { return (
- { - return (tree: any) => { - const iterate = (node: any) => { - if (node.type === 'element' && node.properties?.ref) - delete node.properties.ref - - if (node.type === 'element' && !/^[a-z][a-z0-9]*$/i.test(node.tagName)) { - node.type = 'text' - node.value = `<${node.tagName}` - } - - if (node.children) - node.children.forEach(iterate) - } - tree.children.forEach(iterate) - } - }, - ]} - urlTransform={customUrlTransform} - disallowedElements={['iframe', 'head', 'html', 'meta', 'link', 'style', 'body', ...(props.customDisallowedElements || [])]} - components={{ - code: CodeBlock, - img: Img, - video: VideoBlock, - audio: AudioBlock, - a: Link, - p: Paragraph, - button: MarkdownButton, - form: MarkdownForm, - script: ScriptBlock as any, - details: ThinkBlock, - ...customComponents, - }} - > - {/* Markdown detect has problem. */} - {latexContent} - +
) } diff --git a/web/app/components/base/markdown/react-markdown-wrapper.tsx b/web/app/components/base/markdown/react-markdown-wrapper.tsx new file mode 100644 index 0000000000..054b5f66cb --- /dev/null +++ b/web/app/components/base/markdown/react-markdown-wrapper.tsx @@ -0,0 +1,82 @@ +import ReactMarkdown from 'react-markdown' +import RemarkMath from 'remark-math' +import RemarkBreaks from 'remark-breaks' +import RehypeKatex from 'rehype-katex' +import RemarkGfm from 'remark-gfm' +import RehypeRaw from 'rehype-raw' +import AudioBlock from '@/app/components/base/markdown-blocks/audio-block' +import Img from '@/app/components/base/markdown-blocks/img' +import Link from '@/app/components/base/markdown-blocks/link' +import MarkdownButton from '@/app/components/base/markdown-blocks/button' +import MarkdownForm from '@/app/components/base/markdown-blocks/form' +import Paragraph from '@/app/components/base/markdown-blocks/paragraph' +import ScriptBlock from '@/app/components/base/markdown-blocks/script-block' +import ThinkBlock from '@/app/components/base/markdown-blocks/think-block' +import VideoBlock from '@/app/components/base/markdown-blocks/video-block' +import { customUrlTransform } from './markdown-utils' + +import type { FC } from 'react' + +import dynamic from 'next/dynamic' + +const CodeBlock = dynamic(() => import('@/app/components/base/markdown-blocks/code-block'), { ssr: false }) + +export type ReactMarkdownWrapperProps = { + latexContent: any + customDisallowedElements?: string[] + customComponents?: Record> +} + +export const ReactMarkdownWrapper: FC = (props) => { + const { customComponents, latexContent } = props + + return ( + { + return (tree: any) => { + const iterate = (node: any) => { + if (node.type === 'element' && node.properties?.ref) + delete node.properties.ref + + if (node.type === 'element' && !/^[a-z][a-z0-9]*$/i.test(node.tagName)) { + node.type = 'text' + node.value = `<${node.tagName}` + } + + if (node.children) + node.children.forEach(iterate) + } + tree.children.forEach(iterate) + } + }, + ]} + urlTransform={customUrlTransform} + disallowedElements={['iframe', 'head', 'html', 'meta', 'link', 'style', 'body', ...(props.customDisallowedElements || [])]} + components={{ + code: CodeBlock, + img: Img, + video: VideoBlock, + audio: AudioBlock, + a: Link, + p: Paragraph, + button: MarkdownButton, + form: MarkdownForm, + script: ScriptBlock as any, + details: ThinkBlock, + ...customComponents, + }} + > + {/* Markdown detect has problem. */} + {latexContent} + + ) +} diff --git a/web/app/components/datasets/documents/create-from-pipeline/data-source/local-file/index.tsx b/web/app/components/datasets/documents/create-from-pipeline/data-source/local-file/index.tsx index 144decada5..0692680005 100644 --- a/web/app/components/datasets/documents/create-from-pipeline/data-source/local-file/index.tsx +++ b/web/app/components/datasets/documents/create-from-pipeline/data-source/local-file/index.tsx @@ -7,7 +7,6 @@ import DocumentFileIcon from '@/app/components/datasets/common/document-file-ico import cn from '@/utils/classnames' import type { CustomFile as File, FileItem } from '@/models/datasets' import { ToastContext } from '@/app/components/base/toast' -import SimplePieChart from '@/app/components/base/simple-pie-chart' import { upload } from '@/service/base' import I18n from '@/context/i18n' import { LanguagesSupported } from '@/i18n-config/language' @@ -17,6 +16,9 @@ import useTheme from '@/hooks/use-theme' import { useFileUploadConfig } from '@/service/use-common' import { useDataSourceStore, useDataSourceStoreWithSelector } from '../store' import produce from 'immer' +import dynamic from 'next/dynamic' + +const SimplePieChart = dynamic(() => import('@/app/components/base/simple-pie-chart'), { ssr: false }) const FILES_NUMBER_LIMIT = 20 diff --git a/web/app/components/workflow-app/index.tsx b/web/app/components/workflow-app/index.tsx index c18a86f981..05654f4e74 100644 --- a/web/app/components/workflow-app/index.tsx +++ b/web/app/components/workflow-app/index.tsx @@ -8,7 +8,7 @@ import { } from '@/app/components/workflow/types' import { useWorkflowInit, -} from './hooks' +} from './hooks/use-workflow-init' import { initialEdges, initialNodes, diff --git a/web/app/components/workflow/hooks/use-workflow-history.ts b/web/app/components/workflow/hooks/use-workflow-history.ts index b7338dc4f8..a9b2f0f699 100644 --- a/web/app/components/workflow/hooks/use-workflow-history.ts +++ b/web/app/components/workflow/hooks/use-workflow-history.ts @@ -16,23 +16,25 @@ import type { WorkflowHistoryEventMeta } from '../workflow-history-store' * - InputChange events in Node Panels do not trigger state changes. * - Resizing UI elements does not trigger state changes. */ -export enum WorkflowHistoryEvent { - NodeTitleChange = 'NodeTitleChange', - NodeDescriptionChange = 'NodeDescriptionChange', - NodeDragStop = 'NodeDragStop', - NodeChange = 'NodeChange', - NodeConnect = 'NodeConnect', - NodePaste = 'NodePaste', - NodeDelete = 'NodeDelete', - EdgeDelete = 'EdgeDelete', - EdgeDeleteByDeleteBranch = 'EdgeDeleteByDeleteBranch', - NodeAdd = 'NodeAdd', - NodeResize = 'NodeResize', - NoteAdd = 'NoteAdd', - NoteChange = 'NoteChange', - NoteDelete = 'NoteDelete', - LayoutOrganize = 'LayoutOrganize', -} +export const WorkflowHistoryEvent = { + NodeTitleChange: 'NodeTitleChange', + NodeDescriptionChange: 'NodeDescriptionChange', + NodeDragStop: 'NodeDragStop', + NodeChange: 'NodeChange', + NodeConnect: 'NodeConnect', + NodePaste: 'NodePaste', + NodeDelete: 'NodeDelete', + EdgeDelete: 'EdgeDelete', + EdgeDeleteByDeleteBranch: 'EdgeDeleteByDeleteBranch', + NodeAdd: 'NodeAdd', + NodeResize: 'NodeResize', + NoteAdd: 'NoteAdd', + NoteChange: 'NoteChange', + NoteDelete: 'NoteDelete', + LayoutOrganize: 'LayoutOrganize', +} as const + +export type WorkflowHistoryEventT = keyof typeof WorkflowHistoryEvent export const useWorkflowHistory = () => { const store = useStoreApi() @@ -65,7 +67,7 @@ export const useWorkflowHistory = () => { // Some events may be triggered multiple times in a short period of time. // We debounce the history state update to avoid creating multiple history states // with minimal changes. - const saveStateToHistoryRef = useRef(debounce((event: WorkflowHistoryEvent, meta?: WorkflowHistoryEventMeta) => { + const saveStateToHistoryRef = useRef(debounce((event: WorkflowHistoryEventT, meta?: WorkflowHistoryEventMeta) => { workflowHistoryStore.setState({ workflowHistoryEvent: event, workflowHistoryEventMeta: meta, @@ -74,7 +76,7 @@ export const useWorkflowHistory = () => { }) }, 500)) - const saveStateToHistory = useCallback((event: WorkflowHistoryEvent, meta?: WorkflowHistoryEventMeta) => { + const saveStateToHistory = useCallback((event: WorkflowHistoryEventT, meta?: WorkflowHistoryEventMeta) => { switch (event) { case WorkflowHistoryEvent.NoteChange: // Hint: Note change does not trigger when note text changes, @@ -105,7 +107,7 @@ export const useWorkflowHistory = () => { } }, []) - const getHistoryLabel = useCallback((event: WorkflowHistoryEvent) => { + const getHistoryLabel = useCallback((event: WorkflowHistoryEventT) => { switch (event) { case WorkflowHistoryEvent.NodeTitleChange: return t('workflow.changeHistory.nodeTitleChange') diff --git a/web/app/components/workflow/workflow-history-store.tsx b/web/app/components/workflow/workflow-history-store.tsx index c250708177..96e87f4fd4 100644 --- a/web/app/components/workflow/workflow-history-store.tsx +++ b/web/app/components/workflow/workflow-history-store.tsx @@ -3,7 +3,7 @@ import { type StoreApi, create } from 'zustand' import { type TemporalState, temporal } from 'zundo' import isDeepEqual from 'fast-deep-equal' import type { Edge, Node } from './types' -import type { WorkflowHistoryEvent } from './hooks' +import type { WorkflowHistoryEventT } from './hooks' import { noop } from 'lodash-es' export const WorkflowHistoryStoreContext = createContext({ store: null, shortcutsEnabled: true, setShortcutsEnabled: noop }) @@ -98,7 +98,7 @@ function createStore({ export type WorkflowHistoryStore = { nodes: Node[] edges: Edge[] - workflowHistoryEvent: WorkflowHistoryEvent | undefined + workflowHistoryEvent: WorkflowHistoryEventT | undefined workflowHistoryEventMeta?: WorkflowHistoryEventMeta } From 11f7a89e250bfbb2bc44ef59f962761cdf184496 Mon Sep 17 00:00:00 2001 From: Asuka Minato Date: Tue, 7 Oct 2025 15:50:44 +0900 Subject: [PATCH 22/31] refactor: Enable type checking for dataset config manager (#26494) Co-authored-by: google-labs-jules[bot] <161369871+google-labs-jules[bot]@users.noreply.github.com> Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> --- .../easy_ui_based_app/dataset/manager.py | 78 +++++++++++-------- api/pyrightconfig.json | 3 +- 2 files changed, 46 insertions(+), 35 deletions(-) diff --git a/api/core/app/app_config/easy_ui_based_app/dataset/manager.py b/api/core/app/app_config/easy_ui_based_app/dataset/manager.py index 4b824bde76..3564cc175b 100644 --- a/api/core/app/app_config/easy_ui_based_app/dataset/manager.py +++ b/api/core/app/app_config/easy_ui_based_app/dataset/manager.py @@ -1,4 +1,5 @@ import uuid +from typing import Literal, cast from core.app.app_config.entities import ( DatasetEntity, @@ -74,6 +75,9 @@ class DatasetConfigManager: return None query_variable = config.get("dataset_query_variable") + metadata_model_config_dict = dataset_configs.get("metadata_model_config") + metadata_filtering_conditions_dict = dataset_configs.get("metadata_filtering_conditions") + if dataset_configs["retrieval_model"] == "single": return DatasetEntity( dataset_ids=dataset_ids, @@ -82,18 +86,23 @@ class DatasetConfigManager: retrieve_strategy=DatasetRetrieveConfigEntity.RetrieveStrategy.value_of( dataset_configs["retrieval_model"] ), - metadata_filtering_mode=dataset_configs.get("metadata_filtering_mode", "disabled"), - metadata_model_config=ModelConfig(**dataset_configs.get("metadata_model_config")) - if dataset_configs.get("metadata_model_config") + metadata_filtering_mode=cast( + Literal["disabled", "automatic", "manual"], + dataset_configs.get("metadata_filtering_mode", "disabled"), + ), + metadata_model_config=ModelConfig(**metadata_model_config_dict) + if isinstance(metadata_model_config_dict, dict) else None, - metadata_filtering_conditions=MetadataFilteringCondition( - **dataset_configs.get("metadata_filtering_conditions", {}) - ) - if dataset_configs.get("metadata_filtering_conditions") + metadata_filtering_conditions=MetadataFilteringCondition(**metadata_filtering_conditions_dict) + if isinstance(metadata_filtering_conditions_dict, dict) else None, ), ) else: + score_threshold_val = dataset_configs.get("score_threshold") + reranking_model_val = dataset_configs.get("reranking_model") + weights_val = dataset_configs.get("weights") + return DatasetEntity( dataset_ids=dataset_ids, retrieve_config=DatasetRetrieveConfigEntity( @@ -101,22 +110,23 @@ class DatasetConfigManager: retrieve_strategy=DatasetRetrieveConfigEntity.RetrieveStrategy.value_of( dataset_configs["retrieval_model"] ), - top_k=dataset_configs.get("top_k", 4), - score_threshold=dataset_configs.get("score_threshold") - if dataset_configs.get("score_threshold_enabled", False) + top_k=int(dataset_configs.get("top_k", 4)), + score_threshold=float(score_threshold_val) + if dataset_configs.get("score_threshold_enabled", False) and score_threshold_val is not None else None, - reranking_model=dataset_configs.get("reranking_model"), - weights=dataset_configs.get("weights"), - reranking_enabled=dataset_configs.get("reranking_enabled", True), + reranking_model=reranking_model_val if isinstance(reranking_model_val, dict) else None, + weights=weights_val if isinstance(weights_val, dict) else None, + reranking_enabled=bool(dataset_configs.get("reranking_enabled", True)), rerank_mode=dataset_configs.get("reranking_mode", "reranking_model"), - metadata_filtering_mode=dataset_configs.get("metadata_filtering_mode", "disabled"), - metadata_model_config=ModelConfig(**dataset_configs.get("metadata_model_config")) - if dataset_configs.get("metadata_model_config") + metadata_filtering_mode=cast( + Literal["disabled", "automatic", "manual"], + dataset_configs.get("metadata_filtering_mode", "disabled"), + ), + metadata_model_config=ModelConfig(**metadata_model_config_dict) + if isinstance(metadata_model_config_dict, dict) else None, - metadata_filtering_conditions=MetadataFilteringCondition( - **dataset_configs.get("metadata_filtering_conditions", {}) - ) - if dataset_configs.get("metadata_filtering_conditions") + metadata_filtering_conditions=MetadataFilteringCondition(**metadata_filtering_conditions_dict) + if isinstance(metadata_filtering_conditions_dict, dict) else None, ), ) @@ -134,18 +144,17 @@ class DatasetConfigManager: config = cls.extract_dataset_config_for_legacy_compatibility(tenant_id, app_mode, config) # dataset_configs - if not config.get("dataset_configs"): - config["dataset_configs"] = {"retrieval_model": "single"} + if "dataset_configs" not in config or not config.get("dataset_configs"): + config["dataset_configs"] = {} + config["dataset_configs"]["retrieval_model"] = config["dataset_configs"].get("retrieval_model", "single") if not isinstance(config["dataset_configs"], dict): raise ValueError("dataset_configs must be of object type") - if not config["dataset_configs"].get("datasets"): + if "datasets" not in config["dataset_configs"] or not config["dataset_configs"].get("datasets"): config["dataset_configs"]["datasets"] = {"strategy": "router", "datasets": []} - need_manual_query_datasets = config.get("dataset_configs") and config["dataset_configs"].get( - "datasets", {} - ).get("datasets") + need_manual_query_datasets = config.get("dataset_configs", {}).get("datasets", {}).get("datasets") if need_manual_query_datasets and app_mode == AppMode.COMPLETION: # Only check when mode is completion @@ -166,8 +175,8 @@ class DatasetConfigManager: :param config: app model config args """ # Extract dataset config for legacy compatibility - if not config.get("agent_mode"): - config["agent_mode"] = {"enabled": False, "tools": []} + if "agent_mode" not in config or not config.get("agent_mode"): + config["agent_mode"] = {} if not isinstance(config["agent_mode"], dict): raise ValueError("agent_mode must be of object type") @@ -180,19 +189,22 @@ class DatasetConfigManager: raise ValueError("enabled in agent_mode must be of boolean type") # tools - if not config["agent_mode"].get("tools"): + if "tools" not in config["agent_mode"] or not config["agent_mode"].get("tools"): config["agent_mode"]["tools"] = [] if not isinstance(config["agent_mode"]["tools"], list): raise ValueError("tools in agent_mode must be a list of objects") # strategy - if not config["agent_mode"].get("strategy"): + if "strategy" not in config["agent_mode"] or not config["agent_mode"].get("strategy"): config["agent_mode"]["strategy"] = PlanningStrategy.ROUTER.value has_datasets = False - if config["agent_mode"]["strategy"] in {PlanningStrategy.ROUTER.value, PlanningStrategy.REACT_ROUTER.value}: - for tool in config["agent_mode"]["tools"]: + if config.get("agent_mode", {}).get("strategy") in { + PlanningStrategy.ROUTER.value, + PlanningStrategy.REACT_ROUTER.value, + }: + for tool in config.get("agent_mode", {}).get("tools", []): key = list(tool.keys())[0] if key == "dataset": # old style, use tool name as key @@ -217,7 +229,7 @@ class DatasetConfigManager: has_datasets = True - need_manual_query_datasets = has_datasets and config["agent_mode"]["enabled"] + need_manual_query_datasets = has_datasets and config.get("agent_mode", {}).get("enabled") if need_manual_query_datasets and app_mode == AppMode.COMPLETION: # Only check when mode is completion diff --git a/api/pyrightconfig.json b/api/pyrightconfig.json index d98e30bb80..c733059331 100644 --- a/api/pyrightconfig.json +++ b/api/pyrightconfig.json @@ -4,8 +4,7 @@ "tests/", ".venv", "migrations/", - "core/rag", - "core/app/app_config/easy_ui_based_app/dataset" + "core/rag" ], "typeCheckingMode": "strict", "allowedUntypedLibraries": [ From 9387cc088cd356acdf9b5b8341456df957b3d73c Mon Sep 17 00:00:00 2001 From: Asuka Minato Date: Wed, 8 Oct 2025 19:38:38 +0900 Subject: [PATCH 23/31] feat: remove unused python dependency (#26629) Co-authored-by: google-labs-jules[bot] <161369871+google-labs-jules[bot]@users.noreply.github.com> Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> --- api/pyproject.toml | 3 --- api/uv.lock | 43 ------------------------------------------- 2 files changed, 46 deletions(-) diff --git a/api/pyproject.toml b/api/pyproject.toml index 3c4efe074b..96ca20c31c 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -5,7 +5,6 @@ requires-python = ">=3.11,<3.13" dependencies = [ "arize-phoenix-otel~=0.9.2", - "authlib==1.6.4", "azure-identity==1.16.1", "beautifulsoup4==4.12.2", "boto3==1.35.99", @@ -34,7 +33,6 @@ dependencies = [ "json-repair>=0.41.1", "langfuse~=2.51.3", "langsmith~=0.1.77", - "mailchimp-transactional~=1.0.50", "markdown~=3.5.1", "numpy~=1.26.4", "openpyxl~=3.1.5", @@ -59,7 +57,6 @@ dependencies = [ "opentelemetry-semantic-conventions==0.48b0", "opentelemetry-util-http==0.48b0", "pandas[excel,output-formatting,performance]~=2.2.2", - "pandoc~=2.4", "psycogreen~=1.0.2", "psycopg2-binary~=2.9.6", "pycryptodome==3.19.1", diff --git a/api/uv.lock b/api/uv.lock index 675984e3b3..804c7abaa5 100644 --- a/api/uv.lock +++ b/api/uv.lock @@ -1281,7 +1281,6 @@ version = "1.9.1" source = { virtual = "." } dependencies = [ { name = "arize-phoenix-otel" }, - { name = "authlib" }, { name = "azure-identity" }, { name = "beautifulsoup4" }, { name = "boto3" }, @@ -1312,7 +1311,6 @@ dependencies = [ { name = "json-repair" }, { name = "langfuse" }, { name = "langsmith" }, - { name = "mailchimp-transactional" }, { name = "markdown" }, { name = "numpy" }, { name = "openpyxl" }, @@ -1336,7 +1334,6 @@ dependencies = [ { name = "opik" }, { name = "packaging" }, { name = "pandas", extra = ["excel", "output-formatting", "performance"] }, - { name = "pandoc" }, { name = "psycogreen" }, { name = "psycopg2-binary" }, { name = "pycryptodome" }, @@ -1474,7 +1471,6 @@ vdb = [ [package.metadata] requires-dist = [ { name = "arize-phoenix-otel", specifier = "~=0.9.2" }, - { name = "authlib", specifier = "==1.6.4" }, { name = "azure-identity", specifier = "==1.16.1" }, { name = "beautifulsoup4", specifier = "==4.12.2" }, { name = "boto3", specifier = "==1.35.99" }, @@ -1505,7 +1501,6 @@ requires-dist = [ { name = "json-repair", specifier = ">=0.41.1" }, { name = "langfuse", specifier = "~=2.51.3" }, { name = "langsmith", specifier = "~=0.1.77" }, - { name = "mailchimp-transactional", specifier = "~=1.0.50" }, { name = "markdown", specifier = "~=3.5.1" }, { name = "numpy", specifier = "~=1.26.4" }, { name = "openpyxl", specifier = "~=3.1.5" }, @@ -1529,7 +1524,6 @@ requires-dist = [ { name = "opik", specifier = "~=1.7.25" }, { name = "packaging", specifier = "~=23.2" }, { name = "pandas", extras = ["excel", "output-formatting", "performance"], specifier = "~=2.2.2" }, - { name = "pandoc", specifier = "~=2.4" }, { name = "psycogreen", specifier = "~=1.0.2" }, { name = "psycopg2-binary", specifier = "~=2.9.6" }, { name = "pycryptodome", specifier = "==3.19.1" }, @@ -3170,21 +3164,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/6c/e1/0686c91738f3e6c2e1a243e0fdd4371667c4d2e5009b0a3605806c2aa020/lz4-4.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:2f4f2965c98ab254feddf6b5072854a6935adab7bc81412ec4fe238f07b85f62", size = 89736, upload-time = "2025-04-01T22:55:40.5Z" }, ] -[[package]] -name = "mailchimp-transactional" -version = "1.0.56" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "certifi" }, - { name = "python-dateutil" }, - { name = "requests" }, - { name = "six" }, - { name = "urllib3" }, -] -wheels = [ - { url = "https://files.pythonhosted.org/packages/5f/bc/cb60d02c00996839bbd87444a97d0ba5ac271b1a324001562afb8f685251/mailchimp_transactional-1.0.56-py3-none-any.whl", hash = "sha256:a76ea88b90a2d47d8b5134586aabbd3a96c459f6066d8886748ab59e50de36eb", size = 31660, upload-time = "2024-02-01T18:39:19.717Z" }, -] - [[package]] name = "mako" version = "1.3.10" @@ -4216,16 +4195,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ec/f8/46141ba8c9d7064dc5008bfb4a6ae5bd3c30e4c61c28b5c5ed485bf358ba/pandas_stubs-2.2.3.250527-py3-none-any.whl", hash = "sha256:cd0a49a95b8c5f944e605be711042a4dd8550e2c559b43d70ba2c4b524b66163", size = 159683, upload-time = "2025-05-27T15:24:28.4Z" }, ] -[[package]] -name = "pandoc" -version = "2.4" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "plumbum" }, - { name = "ply" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/10/9a/e3186e760c57ee5f1c27ea5cea577a0ff9abfca51eefcb4d9a4cd39aff2e/pandoc-2.4.tar.gz", hash = "sha256:ecd1f8cbb7f4180c6b5db4a17a7c1a74df519995f5f186ef81ce72a9cbd0dd9a", size = 34635, upload-time = "2024-08-07T14:33:58.016Z" } - [[package]] name = "pathspec" version = "0.12.1" @@ -4332,18 +4301,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, ] -[[package]] -name = "plumbum" -version = "1.9.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pywin32", marker = "platform_python_implementation != 'PyPy' and sys_platform == 'win32'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/f0/5d/49ba324ad4ae5b1a4caefafbce7a1648540129344481f2ed4ef6bb68d451/plumbum-1.9.0.tar.gz", hash = "sha256:e640062b72642c3873bd5bdc3effed75ba4d3c70ef6b6a7b907357a84d909219", size = 319083, upload-time = "2024-10-05T05:59:27.059Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/4f/9d/d03542c93bb3d448406731b80f39c3d5601282f778328c22c77d270f4ed4/plumbum-1.9.0-py3-none-any.whl", hash = "sha256:9fd0d3b0e8d86e4b581af36edf3f3bbe9d1ae15b45b8caab28de1bcb27aaa7f5", size = 127970, upload-time = "2024-10-05T05:59:25.102Z" }, -] - [[package]] name = "ply" version = "3.11" From dbd23f91e595425f8021560ab098352ed57e99dd Mon Sep 17 00:00:00 2001 From: NeatGuyCoding <15627489+NeatGuyCoding@users.noreply.github.com> Date: Wed, 8 Oct 2025 18:40:19 +0800 Subject: [PATCH 24/31] Feature add test containers mail invite task (#26637) Signed-off-by: NeatGuyCoding <15627489+NeatGuyCoding@users.noreply.github.com> --- .../tasks/test_mail_invite_member_task.py | 543 ++++++++++++++++++ 1 file changed, 543 insertions(+) create mode 100644 api/tests/test_containers_integration_tests/tasks/test_mail_invite_member_task.py diff --git a/api/tests/test_containers_integration_tests/tasks/test_mail_invite_member_task.py b/api/tests/test_containers_integration_tests/tasks/test_mail_invite_member_task.py new file mode 100644 index 0000000000..8fef87b317 --- /dev/null +++ b/api/tests/test_containers_integration_tests/tasks/test_mail_invite_member_task.py @@ -0,0 +1,543 @@ +""" +Integration tests for mail_invite_member_task using testcontainers. + +This module provides integration tests for the invite member email task +using TestContainers infrastructure. The tests ensure that the task properly sends +invitation emails with internationalization support, handles error scenarios, +and integrates correctly with the database and Redis for token management. + +All tests use the testcontainers infrastructure to ensure proper database isolation +and realistic testing scenarios with actual PostgreSQL and Redis instances. +""" + +import json +import uuid +from datetime import UTC, datetime +from unittest.mock import MagicMock, patch + +import pytest +from faker import Faker + +from extensions.ext_redis import redis_client +from libs.email_i18n import EmailType +from models.account import Account, AccountStatus, Tenant, TenantAccountJoin, TenantAccountRole +from tasks.mail_invite_member_task import send_invite_member_mail_task + + +class TestMailInviteMemberTask: + """ + Integration tests for send_invite_member_mail_task using testcontainers. + + This test class covers the core functionality of the invite member email task: + - Email sending with proper internationalization + - Template context generation and URL construction + - Error handling for failure scenarios + - Integration with Redis for token validation + - Mail service initialization checks + - Real database integration with actual invitation flow + + All tests use the testcontainers infrastructure to ensure proper database isolation + and realistic testing environment with actual database and Redis interactions. + """ + + @pytest.fixture(autouse=True) + def cleanup_database(self, db_session_with_containers): + """Clean up database before each test to ensure isolation.""" + # Clear all test data + db_session_with_containers.query(TenantAccountJoin).delete() + db_session_with_containers.query(Tenant).delete() + db_session_with_containers.query(Account).delete() + db_session_with_containers.commit() + + # Clear Redis cache + redis_client.flushdb() + + @pytest.fixture + def mock_external_service_dependencies(self): + """Mock setup for external service dependencies.""" + with ( + patch("tasks.mail_invite_member_task.mail") as mock_mail, + patch("tasks.mail_invite_member_task.get_email_i18n_service") as mock_email_service, + patch("tasks.mail_invite_member_task.dify_config") as mock_config, + ): + # Setup mail service mock + mock_mail.is_inited.return_value = True + + # Setup email service mock + mock_email_service_instance = MagicMock() + mock_email_service_instance.send_email.return_value = None + mock_email_service.return_value = mock_email_service_instance + + # Setup config mock + mock_config.CONSOLE_WEB_URL = "https://console.dify.ai" + + yield { + "mail": mock_mail, + "email_service": mock_email_service_instance, + "config": mock_config, + } + + def _create_test_account_and_tenant(self, db_session_with_containers): + """ + Helper method to create a test account and tenant for testing. + + Args: + db_session_with_containers: Database session from testcontainers infrastructure + + Returns: + tuple: (Account, Tenant) created instances + """ + fake = Faker() + + # Create account + account = Account( + email=fake.email(), + name=fake.name(), + password=fake.password(), + interface_language="en-US", + status=AccountStatus.ACTIVE.value, + created_at=datetime.now(UTC), + updated_at=datetime.now(UTC), + ) + db_session_with_containers.add(account) + db_session_with_containers.commit() + db_session_with_containers.refresh(account) + + # Create tenant + tenant = Tenant( + name=fake.company(), + created_at=datetime.now(UTC), + updated_at=datetime.now(UTC), + ) + db_session_with_containers.add(tenant) + db_session_with_containers.commit() + db_session_with_containers.refresh(tenant) + + # Create tenant member relationship + tenant_join = TenantAccountJoin( + tenant_id=tenant.id, + account_id=account.id, + role=TenantAccountRole.OWNER.value, + created_at=datetime.now(UTC), + ) + db_session_with_containers.add(tenant_join) + db_session_with_containers.commit() + + return account, tenant + + def _create_invitation_token(self, tenant, account): + """ + Helper method to create a valid invitation token in Redis. + + Args: + tenant: Tenant instance + account: Account instance + + Returns: + str: Generated invitation token + """ + token = str(uuid.uuid4()) + invitation_data = { + "account_id": account.id, + "email": account.email, + "workspace_id": tenant.id, + } + cache_key = f"member_invite:token:{token}" + redis_client.setex(cache_key, 24 * 60 * 60, json.dumps(invitation_data)) # 24 hours + return token + + def _create_pending_account_for_invitation(self, db_session_with_containers, email, tenant): + """ + Helper method to create a pending account for invitation testing. + + Args: + db_session_with_containers: Database session + email: Email address for the account + tenant: Tenant instance + + Returns: + Account: Created pending account + """ + account = Account( + email=email, + name=email.split("@")[0], + password="", + interface_language="en-US", + status=AccountStatus.PENDING.value, + created_at=datetime.now(UTC), + updated_at=datetime.now(UTC), + ) + db_session_with_containers.add(account) + db_session_with_containers.commit() + db_session_with_containers.refresh(account) + + # Create tenant member relationship + tenant_join = TenantAccountJoin( + tenant_id=tenant.id, + account_id=account.id, + role=TenantAccountRole.NORMAL.value, + created_at=datetime.now(UTC), + ) + db_session_with_containers.add(tenant_join) + db_session_with_containers.commit() + + return account + + def test_send_invite_member_mail_success(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test successful invitation email sending with all parameters. + + This test verifies: + - Email service is called with correct parameters + - Template context includes all required fields + - URL is constructed correctly with token + - Performance logging is recorded + - No exceptions are raised + """ + # Arrange: Create test data + inviter, tenant = self._create_test_account_and_tenant(db_session_with_containers) + invitee_email = "test@example.com" + language = "en-US" + token = self._create_invitation_token(tenant, inviter) + inviter_name = inviter.name + workspace_name = tenant.name + + # Act: Execute the task + send_invite_member_mail_task( + language=language, + to=invitee_email, + token=token, + inviter_name=inviter_name, + workspace_name=workspace_name, + ) + + # Assert: Verify email service was called correctly + mock_email_service = mock_external_service_dependencies["email_service"] + mock_email_service.send_email.assert_called_once() + + # Verify call arguments + call_args = mock_email_service.send_email.call_args + assert call_args[1]["email_type"] == EmailType.INVITE_MEMBER + assert call_args[1]["language_code"] == language + assert call_args[1]["to"] == invitee_email + + # Verify template context + template_context = call_args[1]["template_context"] + assert template_context["to"] == invitee_email + assert template_context["inviter_name"] == inviter_name + assert template_context["workspace_name"] == workspace_name + assert template_context["url"] == f"https://console.dify.ai/activate?token={token}" + + def test_send_invite_member_mail_different_languages( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test invitation email sending with different language codes. + + This test verifies: + - Email service handles different language codes correctly + - Template context is passed correctly for each language + - No language-specific errors occur + """ + # Arrange: Create test data + inviter, tenant = self._create_test_account_and_tenant(db_session_with_containers) + token = self._create_invitation_token(tenant, inviter) + + test_languages = ["en-US", "zh-CN", "ja-JP", "fr-FR", "de-DE", "es-ES"] + + for language in test_languages: + # Act: Execute the task with different language + send_invite_member_mail_task( + language=language, + to="test@example.com", + token=token, + inviter_name=inviter.name, + workspace_name=tenant.name, + ) + + # Assert: Verify language code was passed correctly + mock_email_service = mock_external_service_dependencies["email_service"] + call_args = mock_email_service.send_email.call_args + assert call_args[1]["language_code"] == language + + def test_send_invite_member_mail_mail_not_initialized( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test behavior when mail service is not initialized. + + This test verifies: + - Task returns early when mail is not initialized + - Email service is not called + - No exceptions are raised + """ + # Arrange: Setup mail service as not initialized + mock_mail = mock_external_service_dependencies["mail"] + mock_mail.is_inited.return_value = False + + # Act: Execute the task + result = send_invite_member_mail_task( + language="en-US", + to="test@example.com", + token="test-token", + inviter_name="Test User", + workspace_name="Test Workspace", + ) + + # Assert: Verify early return + assert result is None + mock_email_service = mock_external_service_dependencies["email_service"] + mock_email_service.send_email.assert_not_called() + + def test_send_invite_member_mail_email_service_exception( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test error handling when email service raises an exception. + + This test verifies: + - Exception is caught and logged + - Task completes without raising exception + - Error logging is performed + """ + # Arrange: Setup email service to raise exception + mock_email_service = mock_external_service_dependencies["email_service"] + mock_email_service.send_email.side_effect = Exception("Email service failed") + + # Act & Assert: Execute task and verify exception is handled + with patch("tasks.mail_invite_member_task.logger") as mock_logger: + send_invite_member_mail_task( + language="en-US", + to="test@example.com", + token="test-token", + inviter_name="Test User", + workspace_name="Test Workspace", + ) + + # Verify error was logged + mock_logger.exception.assert_called_once() + error_call = mock_logger.exception.call_args[0][0] + assert "Send invite member mail to %s failed" in error_call + + def test_send_invite_member_mail_template_context_validation( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test template context contains all required fields for email rendering. + + This test verifies: + - All required template context fields are present + - Field values match expected data + - URL construction is correct + - No missing or None values in context + """ + # Arrange: Create test data with specific values + inviter, tenant = self._create_test_account_and_tenant(db_session_with_containers) + token = "test-token-123" + invitee_email = "invitee@example.com" + inviter_name = "John Doe" + workspace_name = "Acme Corp" + + # Act: Execute the task + send_invite_member_mail_task( + language="en-US", + to=invitee_email, + token=token, + inviter_name=inviter_name, + workspace_name=workspace_name, + ) + + # Assert: Verify template context + mock_email_service = mock_external_service_dependencies["email_service"] + call_args = mock_email_service.send_email.call_args + template_context = call_args[1]["template_context"] + + # Verify all required fields are present + required_fields = ["to", "inviter_name", "workspace_name", "url"] + for field in required_fields: + assert field in template_context + assert template_context[field] is not None + assert template_context[field] != "" + + # Verify specific values + assert template_context["to"] == invitee_email + assert template_context["inviter_name"] == inviter_name + assert template_context["workspace_name"] == workspace_name + assert template_context["url"] == f"https://console.dify.ai/activate?token={token}" + + def test_send_invite_member_mail_integration_with_redis_token( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test integration with Redis token validation. + + This test verifies: + - Task works with real Redis token data + - Token validation can be performed after email sending + - Redis data integrity is maintained + """ + # Arrange: Create test data and store token in Redis + inviter, tenant = self._create_test_account_and_tenant(db_session_with_containers) + token = self._create_invitation_token(tenant, inviter) + + # Verify token exists in Redis before sending email + cache_key = f"member_invite:token:{token}" + assert redis_client.exists(cache_key) == 1 + + # Act: Execute the task + send_invite_member_mail_task( + language="en-US", + to=inviter.email, + token=token, + inviter_name=inviter.name, + workspace_name=tenant.name, + ) + + # Assert: Verify token still exists after email sending + assert redis_client.exists(cache_key) == 1 + + # Verify token data integrity + token_data = redis_client.get(cache_key) + assert token_data is not None + invitation_data = json.loads(token_data) + assert invitation_data["account_id"] == inviter.id + assert invitation_data["email"] == inviter.email + assert invitation_data["workspace_id"] == tenant.id + + def test_send_invite_member_mail_with_special_characters( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test email sending with special characters in names and workspace names. + + This test verifies: + - Special characters are handled correctly in template context + - Email service receives properly formatted data + - No encoding issues occur + """ + # Arrange: Create test data with special characters + inviter, tenant = self._create_test_account_and_tenant(db_session_with_containers) + token = self._create_invitation_token(tenant, inviter) + + special_cases = [ + ("John O'Connor", "Acme & Co."), + ("José María", "Café & Restaurant"), + ("李小明", "北京科技有限公司"), + ("François & Marie", "L'École Internationale"), + ("Александр", "ООО Технологии"), + ("محمد أحمد", "شركة التقنية المتقدمة"), + ] + + for inviter_name, workspace_name in special_cases: + # Act: Execute the task + send_invite_member_mail_task( + language="en-US", + to="test@example.com", + token=token, + inviter_name=inviter_name, + workspace_name=workspace_name, + ) + + # Assert: Verify special characters are preserved + mock_email_service = mock_external_service_dependencies["email_service"] + call_args = mock_email_service.send_email.call_args + template_context = call_args[1]["template_context"] + + assert template_context["inviter_name"] == inviter_name + assert template_context["workspace_name"] == workspace_name + + def test_send_invite_member_mail_real_database_integration( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test real database integration with actual invitation flow. + + This test verifies: + - Task works with real database entities + - Account and tenant relationships are properly maintained + - Database state is consistent after email sending + - Real invitation data flow is tested + """ + # Arrange: Create real database entities + inviter, tenant = self._create_test_account_and_tenant(db_session_with_containers) + invitee_email = "newmember@example.com" + + # Create a pending account for invitation (simulating real invitation flow) + pending_account = self._create_pending_account_for_invitation(db_session_with_containers, invitee_email, tenant) + + # Create invitation token with real account data + token = self._create_invitation_token(tenant, pending_account) + + # Act: Execute the task with real data + send_invite_member_mail_task( + language="en-US", + to=invitee_email, + token=token, + inviter_name=inviter.name, + workspace_name=tenant.name, + ) + + # Assert: Verify email service was called with real data + mock_email_service = mock_external_service_dependencies["email_service"] + mock_email_service.send_email.assert_called_once() + + # Verify database state is maintained + db_session_with_containers.refresh(pending_account) + db_session_with_containers.refresh(tenant) + + assert pending_account.status == AccountStatus.PENDING.value + assert pending_account.email == invitee_email + assert tenant.name is not None + + # Verify tenant relationship exists + tenant_join = ( + db_session_with_containers.query(TenantAccountJoin) + .filter_by(tenant_id=tenant.id, account_id=pending_account.id) + .first() + ) + assert tenant_join is not None + assert tenant_join.role == TenantAccountRole.NORMAL.value + + def test_send_invite_member_mail_token_lifecycle_management( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test token lifecycle management and validation. + + This test verifies: + - Token is properly stored in Redis with correct TTL + - Token data structure is correct + - Token can be retrieved and validated after email sending + - Token expiration is handled correctly + """ + # Arrange: Create test data + inviter, tenant = self._create_test_account_and_tenant(db_session_with_containers) + token = self._create_invitation_token(tenant, inviter) + + # Act: Execute the task + send_invite_member_mail_task( + language="en-US", + to=inviter.email, + token=token, + inviter_name=inviter.name, + workspace_name=tenant.name, + ) + + # Assert: Verify token lifecycle + cache_key = f"member_invite:token:{token}" + + # Token should still exist + assert redis_client.exists(cache_key) == 1 + + # Token should have correct TTL (approximately 24 hours) + ttl = redis_client.ttl(cache_key) + assert 23 * 60 * 60 <= ttl <= 24 * 60 * 60 # Allow some tolerance + + # Token data should be valid + token_data = redis_client.get(cache_key) + assert token_data is not None + + invitation_data = json.loads(token_data) + assert invitation_data["account_id"] == inviter.id + assert invitation_data["email"] == inviter.email + assert invitation_data["workspace_id"] == tenant.id From 9cca605bac55d6088566657b94a88cd05df23955 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=9D=9E=E6=B3=95=E6=93=8D=E4=BD=9C?= Date: Wed, 8 Oct 2025 19:09:03 +0800 Subject: [PATCH 25/31] chore: improve bool input of start node (#26647) --- .../config-var/config-modal/index.tsx | 32 +++++++++++++++++++ .../base/chat/chat-with-history/hooks.tsx | 6 ++-- .../base/chat/embedded-chatbot/hooks.tsx | 5 ++- .../share/text-generation/result/index.tsx | 4 +-- .../share/text-generation/run-once/index.tsx | 8 +++-- web/utils/model-config.ts | 11 +++++++ 6 files changed, 59 insertions(+), 7 deletions(-) diff --git a/web/app/components/app/configuration/config-var/config-modal/index.tsx b/web/app/components/app/configuration/config-var/config-modal/index.tsx index b0f0ea8779..bedcbfedbd 100644 --- a/web/app/components/app/configuration/config-var/config-modal/index.tsx +++ b/web/app/components/app/configuration/config-var/config-modal/index.tsx @@ -32,6 +32,19 @@ import { TransferMethod } from '@/types/app' import type { FileEntity } from '@/app/components/base/file-uploader/types' const TEXT_MAX_LENGTH = 256 +const CHECKBOX_DEFAULT_TRUE_VALUE = 'true' +const CHECKBOX_DEFAULT_FALSE_VALUE = 'false' + +const getCheckboxDefaultSelectValue = (value: InputVar['default']) => { + if (typeof value === 'boolean') + return value ? CHECKBOX_DEFAULT_TRUE_VALUE : CHECKBOX_DEFAULT_FALSE_VALUE + if (typeof value === 'string') + return value.toLowerCase() === CHECKBOX_DEFAULT_TRUE_VALUE ? CHECKBOX_DEFAULT_TRUE_VALUE : CHECKBOX_DEFAULT_FALSE_VALUE + return CHECKBOX_DEFAULT_FALSE_VALUE +} + +const parseCheckboxSelectValue = (value: string) => + value === CHECKBOX_DEFAULT_TRUE_VALUE export type IConfigModalProps = { isCreate?: boolean @@ -198,6 +211,8 @@ const ConfigModal: FC = ({ handlePayloadChange('variable')(e.target.value) }, [handlePayloadChange, t]) + const checkboxDefaultSelectValue = useMemo(() => getCheckboxDefaultSelectValue(tempPayload.default), [tempPayload.default]) + const handleConfirm = () => { const moreInfo = tempPayload.variable === payload?.variable ? undefined @@ -324,6 +339,23 @@ const ConfigModal: FC = ({ )} + {type === InputVarType.checkbox && ( + + handlePayloadChange('default')(parseCheckboxSelectValue(String(item.value)))} + placeholder={t('appDebug.variableConfig.selectDefaultValue')} + allowSearch={false} + /> + + )} + {type === InputVarType.select && ( <> diff --git a/web/app/components/base/chat/chat-with-history/hooks.tsx b/web/app/components/base/chat/chat-with-history/hooks.tsx index fb3e1bb8f3..79b6984bfe 100644 --- a/web/app/components/base/chat/chat-with-history/hooks.tsx +++ b/web/app/components/base/chat/chat-with-history/hooks.tsx @@ -235,13 +235,15 @@ export const useChatWithHistory = (installedAppInfo?: InstalledApp) => { } } - if(item.checkbox) { + if (item.checkbox) { + const preset = initInputs[item.checkbox.variable] === true return { ...item.checkbox, - default: false, + default: preset || item.default || item.checkbox.default, type: 'checkbox', } } + if (item.select) { const isInputInOptions = item.select.options.includes(initInputs[item.select.variable]) return { diff --git a/web/app/components/base/chat/embedded-chatbot/hooks.tsx b/web/app/components/base/chat/embedded-chatbot/hooks.tsx index 14a32860b9..aa7006db25 100644 --- a/web/app/components/base/chat/embedded-chatbot/hooks.tsx +++ b/web/app/components/base/chat/embedded-chatbot/hooks.tsx @@ -195,13 +195,16 @@ export const useEmbeddedChatbot = () => { type: 'number', } } + if (item.checkbox) { + const preset = initInputs[item.checkbox.variable] === true return { ...item.checkbox, - default: false, + default: preset || item.default || item.checkbox.default, type: 'checkbox', } } + if (item.select) { const isInputInOptions = item.select.options.includes(initInputs[item.select.variable]) return { diff --git a/web/app/components/share/text-generation/result/index.tsx b/web/app/components/share/text-generation/result/index.tsx index a7eb7f7591..ddc0d772c3 100644 --- a/web/app/components/share/text-generation/result/index.tsx +++ b/web/app/components/share/text-generation/result/index.tsx @@ -126,8 +126,8 @@ const Result: FC = ({ let hasEmptyInput = '' const requiredVars = prompt_variables?.filter(({ key, name, required, type }) => { - if(type === 'boolean') - return false // boolean input is not required + if(type === 'boolean' || type === 'checkbox') + return false // boolean/checkbox input is not required const res = (!key || !key.trim()) || (!name || !name.trim()) || (required || required === undefined || required === null) return res }) || [] // compatible with old version diff --git a/web/app/components/share/text-generation/run-once/index.tsx b/web/app/components/share/text-generation/run-once/index.tsx index 7896776f35..4f94aa1fe8 100644 --- a/web/app/components/share/text-generation/run-once/index.tsx +++ b/web/app/components/share/text-generation/run-once/index.tsx @@ -51,6 +51,8 @@ const RunOnce: FC = ({ promptConfig.prompt_variables.forEach((item) => { if (item.type === 'string' || item.type === 'paragraph') newInputs[item.key] = '' + else if (item.type === 'checkbox') + newInputs[item.key] = false else newInputs[item.key] = undefined }) @@ -77,6 +79,8 @@ const RunOnce: FC = ({ newInputs[item.key] = item.default || '' else if (item.type === 'number') newInputs[item.key] = item.default + else if (item.type === 'checkbox') + newInputs[item.key] = item.default || false else if (item.type === 'file') newInputs[item.key] = item.default else if (item.type === 'file-list') @@ -96,7 +100,7 @@ const RunOnce: FC = ({ {(inputs === null || inputs === undefined || Object.keys(inputs).length === 0) || !isInitialized ? null : promptConfig.prompt_variables.map(item => (
- {item.type !== 'boolean' && ( + {item.type !== 'checkbox' && ( )}
@@ -134,7 +138,7 @@ const RunOnce: FC = ({ onChange={(e: ChangeEvent) => { handleInputsChange({ ...inputsRef.current, [item.key]: e.target.value }) }} /> )} - {item.type === 'boolean' && ( + {item.type === 'checkbox' && ( Date: Thu, 9 Oct 2025 09:22:45 +0800 Subject: [PATCH 26/31] fix: missing LLM node output var description (#26648) --- web/app/components/workflow/nodes/llm/panel.tsx | 5 +++++ web/i18n/de-DE/workflow.ts | 1 + web/i18n/en-US/workflow.ts | 1 + web/i18n/es-ES/workflow.ts | 1 + web/i18n/fa-IR/workflow.ts | 1 + web/i18n/fr-FR/workflow.ts | 1 + web/i18n/hi-IN/workflow.ts | 1 + web/i18n/id-ID/workflow.ts | 1 + web/i18n/it-IT/workflow.ts | 1 + web/i18n/ja-JP/workflow.ts | 1 + web/i18n/ko-KR/workflow.ts | 1 + web/i18n/pl-PL/workflow.ts | 1 + web/i18n/pt-BR/workflow.ts | 1 + web/i18n/ro-RO/workflow.ts | 1 + web/i18n/ru-RU/workflow.ts | 1 + web/i18n/sl-SI/workflow.ts | 1 + web/i18n/th-TH/workflow.ts | 1 + web/i18n/tr-TR/workflow.ts | 1 + web/i18n/uk-UA/workflow.ts | 1 + web/i18n/vi-VN/workflow.ts | 1 + web/i18n/zh-Hans/workflow.ts | 1 + web/i18n/zh-Hant/workflow.ts | 1 + 22 files changed, 26 insertions(+) diff --git a/web/app/components/workflow/nodes/llm/panel.tsx b/web/app/components/workflow/nodes/llm/panel.tsx index 03569f6f7a..cd79b9f3d9 100644 --- a/web/app/components/workflow/nodes/llm/panel.tsx +++ b/web/app/components/workflow/nodes/llm/panel.tsx @@ -293,6 +293,11 @@ const Panel: FC> = ({ type='string' description={t(`${i18nPrefix}.outputVars.output`)} /> + Date: Thu, 9 Oct 2025 10:23:34 +0900 Subject: [PATCH 27/31] fix oxlint warnings (#26634) --- .../goto-anything/command-selector.test.tsx | 2 +- .../overview/tracing/config-button.tsx | 3 +- .../csv-uploader.tsx | 6 ++-- .../components/app/app-publisher/index.tsx | 3 +- .../app/app-publisher/version-info-modal.tsx | 6 ++-- .../app/configuration/base/icons/citation.tsx | 29 +++++++++++++++++++ .../config-var/config-modal/index.tsx | 4 +-- .../components/app/configuration/index.tsx | 2 +- .../prompt-value-panel/index.tsx | 4 +-- .../app/create-from-dsl-modal/uploader.tsx | 6 ++-- web/app/components/app/overview/app-chart.tsx | 3 +- web/app/components/base/audio-btn/audio.ts | 17 +++++------ .../base/chat/chat-with-history/hooks.tsx | 4 +-- .../base/chat/chat/answer/index.tsx | 12 +++++--- .../components/base/chat/chat/question.tsx | 12 +++++--- .../base/date-and-time-picker/utils/dayjs.ts | 2 +- web/app/components/base/drawer/index.tsx | 12 +++++--- .../components/base/emoji-picker/index.tsx | 4 +-- .../base/form/components/field/select.tsx | 5 +++- .../base/image-uploader/image-list.tsx | 4 +-- .../base/image-uploader/image-preview.tsx | 2 +- .../base/markdown-blocks/code-block.tsx | 2 +- web/app/components/base/mermaid/utils.ts | 2 +- web/app/components/base/pagination/hook.ts | 4 +-- web/app/components/base/popover/index.tsx | 9 ++++-- .../components/base/select/locale-signin.tsx | 2 +- web/app/components/base/select/locale.tsx | 2 +- .../components/base/tag-management/panel.tsx | 11 ++++--- web/app/components/base/voice-input/index.tsx | 3 +- .../create-from-dsl-modal/uploader.tsx | 6 ++-- .../datasets/create/file-uploader/index.tsx | 6 ++-- .../datasets/create/step-two/index.tsx | 19 ++++++------ .../website/jina-reader/base/options-wrap.tsx | 0 .../data-source/local-file/index.tsx | 6 ++-- .../website-crawl/base/crawled-result.tsx | 11 ++++--- .../documents/create-from-pipeline/index.tsx | 5 +++- .../detail/batch-modal/csv-uploader.tsx | 6 ++-- .../documents/detail/completed/index.tsx | 9 ++++-- .../documents/detail/metadata/index.tsx | 4 +-- .../settings/pipeline-settings/index.tsx | 5 +++- .../datasets/hit-testing/textarea.tsx | 3 +- .../datasets/list/dataset-card/index.tsx | 12 ++++---- web/app/components/datasets/loading.tsx | 3 ++ web/app/components/datasets/preview/index.tsx | 3 ++ .../header/account-setting/collapse/index.tsx | 2 +- .../model-load-balancing-configs.tsx | 2 +- .../app-selector/app-picker.tsx | 4 +-- .../model-selector/index.tsx | 2 +- .../panel/input-field/field-list/hooks.ts | 2 +- .../components/panel/test-run/header.tsx | 3 +- .../hooks/use-nodes-sync-draft.ts | 6 ++-- .../share/text-generation/index.tsx | 3 +- web/app/components/swr-initializer.tsx | 6 ++-- .../hooks/use-nodes-sync-draft.ts | 6 ++-- .../workflow/block-selector/tool/tool.tsx | 2 +- .../workflow/hooks/use-shortcuts.ts | 6 ++-- .../components/workflow/hooks/use-workflow.ts | 2 +- .../nodes/_base/components/retry/utils.ts | 1 + .../variable/var-reference-picker.tsx | 10 +++++-- .../variable/var-reference-vars.tsx | 2 +- .../workflow-panel/last-run/index.tsx | 4 +-- .../knowledge-base/components/option-card.tsx | 3 +- .../json-schema-config.tsx | 6 ++-- .../visual-editor/edit-card/index.tsx | 6 ++-- .../visual-editor/hooks.ts | 9 ++++-- .../nodes/loop/components/condition-value.tsx | 2 +- .../components/variable-modal-trigger.tsx | 6 ++-- .../panel/env-panel/variable-trigger.tsx | 6 ++-- web/app/components/workflow/run/index.tsx | 9 ++++-- .../components/nodes/loop/node.tsx | 2 +- .../components/mail-and-password-auth.tsx | 2 +- web/i18n-config/index.ts | 3 +- web/service/base.ts | 2 +- web/service/knowledge/use-hit-testing.ts | 1 + web/service/share.ts | 6 ++-- web/utils/var.ts | 4 +-- 76 files changed, 261 insertions(+), 144 deletions(-) delete mode 100644 web/app/components/datasets/create/website/jina-reader/base/options-wrap.tsx diff --git a/web/__tests__/goto-anything/command-selector.test.tsx b/web/__tests__/goto-anything/command-selector.test.tsx index 1db4be31fb..6d4e045d49 100644 --- a/web/__tests__/goto-anything/command-selector.test.tsx +++ b/web/__tests__/goto-anything/command-selector.test.tsx @@ -16,7 +16,7 @@ jest.mock('cmdk', () => ({ Item: ({ children, onSelect, value, className }: any) => (
onSelect && onSelect()} + onClick={() => onSelect?.()} data-value={value} data-testid={`command-item-${value}`} > diff --git a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config-button.tsx b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config-button.tsx index 1ab40e31bf..246a1eb6a3 100644 --- a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config-button.tsx +++ b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config-button.tsx @@ -4,6 +4,7 @@ import React, { useCallback, useRef, useState } from 'react' import type { PopupProps } from './config-popup' import ConfigPopup from './config-popup' +import cn from '@/utils/classnames' import { PortalToFollowElem, PortalToFollowElemContent, @@ -45,7 +46,7 @@ const ConfigBtn: FC = ({ offset={12} > -
+
{children}
diff --git a/web/app/components/app/annotation/batch-add-annotation-modal/csv-uploader.tsx b/web/app/components/app/annotation/batch-add-annotation-modal/csv-uploader.tsx index dfc8d10087..b98eb815f9 100644 --- a/web/app/components/app/annotation/batch-add-annotation-modal/csv-uploader.tsx +++ b/web/app/components/app/annotation/batch-add-annotation-modal/csv-uploader.tsx @@ -28,7 +28,8 @@ const CSVUploader: FC = ({ const handleDragEnter = (e: DragEvent) => { e.preventDefault() e.stopPropagation() - e.target !== dragRef.current && setDragging(true) + if (e.target !== dragRef.current) + setDragging(true) } const handleDragOver = (e: DragEvent) => { e.preventDefault() @@ -37,7 +38,8 @@ const CSVUploader: FC = ({ const handleDragLeave = (e: DragEvent) => { e.preventDefault() e.stopPropagation() - e.target === dragRef.current && setDragging(false) + if (e.target === dragRef.current) + setDragging(false) } const handleDrop = (e: DragEvent) => { e.preventDefault() diff --git a/web/app/components/app/app-publisher/index.tsx b/web/app/components/app/app-publisher/index.tsx index 53cceb8020..df2618b49c 100644 --- a/web/app/components/app/app-publisher/index.tsx +++ b/web/app/components/app/app-publisher/index.tsx @@ -348,7 +348,8 @@ const AppPublisher = ({ { - publishedAt && handleOpenInExplore() + if (publishedAt) + handleOpenInExplore() }} disabled={!publishedAt || (systemFeatures.webapp_auth.enabled && !userCanAccessApp?.result)} icon={} diff --git a/web/app/components/app/app-publisher/version-info-modal.tsx b/web/app/components/app/app-publisher/version-info-modal.tsx index 4d5d3705c1..263f187736 100644 --- a/web/app/components/app/app-publisher/version-info-modal.tsx +++ b/web/app/components/app/app-publisher/version-info-modal.tsx @@ -40,7 +40,8 @@ const VersionInfoModal: FC = ({ return } else { - titleError && setTitleError(false) + if (titleError) + setTitleError(false) } if (releaseNotes.length > RELEASE_NOTES_MAX_LENGTH) { @@ -52,7 +53,8 @@ const VersionInfoModal: FC = ({ return } else { - releaseNotesError && setReleaseNotesError(false) + if (releaseNotesError) + setReleaseNotesError(false) } onPublish({ title, releaseNotes, id: versionInfo?.id }) diff --git a/web/app/components/app/configuration/base/icons/citation.tsx b/web/app/components/app/configuration/base/icons/citation.tsx index e69de29bb2..3aa6b0f0e1 100644 --- a/web/app/components/app/configuration/base/icons/citation.tsx +++ b/web/app/components/app/configuration/base/icons/citation.tsx @@ -0,0 +1,29 @@ +import type { SVGProps } from 'react' + +const CitationIcon = (props: SVGProps) => ( + +) + +export default CitationIcon diff --git a/web/app/components/app/configuration/config-var/config-modal/index.tsx b/web/app/components/app/configuration/config-var/config-modal/index.tsx index bedcbfedbd..8a02ca8caa 100644 --- a/web/app/components/app/configuration/config-var/config-modal/index.tsx +++ b/web/app/components/app/configuration/config-var/config-modal/index.tsx @@ -79,7 +79,7 @@ const ConfigModal: FC = ({ try { return JSON.stringify(JSON.parse(tempPayload.json_schema).properties, null, 2) } - catch (_e) { + catch { return '' } }, [tempPayload.json_schema]) @@ -123,7 +123,7 @@ const ConfigModal: FC = ({ } handlePayloadChange('json_schema')(JSON.stringify(res, null, 2)) } - catch (_e) { + catch { return null } }, [handlePayloadChange]) diff --git a/web/app/components/app/configuration/index.tsx b/web/app/components/app/configuration/index.tsx index f1f81ebf97..20229c9717 100644 --- a/web/app/components/app/configuration/index.tsx +++ b/web/app/components/app/configuration/index.tsx @@ -480,7 +480,7 @@ const Configuration: FC = () => { Toast.notify({ type: 'warning', message: `${t('common.modelProvider.parametersInvalidRemoved')}: ${Object.entries(removedDetails).map(([k, reason]) => `${k} (${reason})`).join(', ')}` }) setCompletionParams(filtered) } - catch (e) { + catch { Toast.notify({ type: 'error', message: t('common.error') }) setCompletionParams({}) } diff --git a/web/app/components/app/configuration/prompt-value-panel/index.tsx b/web/app/components/app/configuration/prompt-value-panel/index.tsx index e88268ba40..43c836132f 100644 --- a/web/app/components/app/configuration/prompt-value-panel/index.tsx +++ b/web/app/components/app/configuration/prompt-value-panel/index.tsx @@ -192,7 +192,7 @@ const PromptValuePanel: FC = ({
)}
diff --git a/web/app/components/base/emoji-picker/index.tsx b/web/app/components/base/emoji-picker/index.tsx index d3b20bb507..7b91c62797 100644 --- a/web/app/components/base/emoji-picker/index.tsx +++ b/web/app/components/base/emoji-picker/index.tsx @@ -45,7 +45,7 @@ const EmojiPicker: FC = ({
@@ -54,7 +54,7 @@ const EmojiPicker: FC = ({ variant="primary" className='w-full' onClick={() => { - onSelect && onSelect(selectedEmoji, selectedBackground!) + onSelect?.(selectedEmoji, selectedBackground!) }}> {t('app.iconPicker.ok')} diff --git a/web/app/components/base/form/components/field/select.tsx b/web/app/components/base/form/components/field/select.tsx index f12b90335b..dee047e2eb 100644 --- a/web/app/components/base/form/components/field/select.tsx +++ b/web/app/components/base/form/components/field/select.tsx @@ -33,7 +33,10 @@ const SelectField = ({ field.handleChange(value)} + onChange={(value) => { + field.handleChange(value) + onChange?.(value) + }} {...selectProps} />
diff --git a/web/app/components/base/image-uploader/image-list.tsx b/web/app/components/base/image-uploader/image-list.tsx index 758ffe99d5..3b5f6dee9c 100644 --- a/web/app/components/base/image-uploader/image-list.tsx +++ b/web/app/components/base/image-uploader/image-list.tsx @@ -62,7 +62,7 @@ const ImageList: FC = ({ {item.progress === -1 && ( onReUpload && onReUpload(item._id)} + onClick={() => onReUpload?.(item._id)} /> )}
@@ -122,7 +122,7 @@ const ImageList: FC = ({ 'rounded-2xl shadow-lg hover:bg-state-base-hover', item.progress === -1 ? 'flex' : 'hidden group-hover:flex', )} - onClick={() => onRemove && onRemove(item._id)} + onClick={() => onRemove?.(item._id)} > diff --git a/web/app/components/base/image-uploader/image-preview.tsx b/web/app/components/base/image-uploader/image-preview.tsx index e67edaa3ca..53c22e344f 100644 --- a/web/app/components/base/image-uploader/image-preview.tsx +++ b/web/app/components/base/image-uploader/image-preview.tsx @@ -20,7 +20,7 @@ const isBase64 = (str: string): boolean => { try { return btoa(atob(str)) === str } - catch (err) { + catch { return false } } diff --git a/web/app/components/base/markdown-blocks/code-block.tsx b/web/app/components/base/markdown-blocks/code-block.tsx index 6814659a00..bc41c65fd5 100644 --- a/web/app/components/base/markdown-blocks/code-block.tsx +++ b/web/app/components/base/markdown-blocks/code-block.tsx @@ -127,7 +127,7 @@ const CodeBlock: any = memo(({ inline, className, children = '', ...props }: any // Store event handlers in useMemo to avoid recreating them const echartsEvents = useMemo(() => ({ - finished: (params: EChartsEventParams) => { + finished: (_params: EChartsEventParams) => { // Limit finished event frequency to avoid infinite loops finishedEventCountRef.current++ if (finishedEventCountRef.current > 3) { diff --git a/web/app/components/base/mermaid/utils.ts b/web/app/components/base/mermaid/utils.ts index 9d56494227..7e59869de1 100644 --- a/web/app/components/base/mermaid/utils.ts +++ b/web/app/components/base/mermaid/utils.ts @@ -60,7 +60,7 @@ export function svgToBase64(svgGraph: string): Promise { reader.readAsDataURL(blob) }) } - catch (error) { + catch { return Promise.resolve('') } } diff --git a/web/app/components/base/pagination/hook.ts b/web/app/components/base/pagination/hook.ts index 32a2af8013..9b9d86a4ef 100644 --- a/web/app/components/base/pagination/hook.ts +++ b/web/app/components/base/pagination/hook.ts @@ -10,9 +10,7 @@ const usePagination = ({ edgePageCount, middlePagesSiblingCount, }: IPaginationProps): IUsePagination => { - const pages = new Array(totalPages) - .fill(0) - .map((_, i) => i + 1) + const pages = React.useMemo(() => Array.from({ length: totalPages }, (_, i) => i + 1), [totalPages]) const hasPreviousPage = currentPage > 1 const hasNextPage = currentPage < totalPages diff --git a/web/app/components/base/popover/index.tsx b/web/app/components/base/popover/index.tsx index 0e7c384564..41df06f43a 100644 --- a/web/app/components/base/popover/index.tsx +++ b/web/app/components/base/popover/index.tsx @@ -37,13 +37,16 @@ export default function CustomPopover({ const timeOutRef = useRef(null) const onMouseEnter = (isOpen: boolean) => { - timeOutRef.current && window.clearTimeout(timeOutRef.current) - !isOpen && buttonRef.current?.click() + if (timeOutRef.current != null) + window.clearTimeout(timeOutRef.current) + if (!isOpen) + buttonRef.current?.click() } const onMouseLeave = (isOpen: boolean) => { timeOutRef.current = window.setTimeout(() => { - isOpen && buttonRef.current?.click() + if (isOpen) + buttonRef.current?.click() }, timeoutDuration) } diff --git a/web/app/components/base/select/locale-signin.tsx b/web/app/components/base/select/locale-signin.tsx index 2d487c4be3..2ec491f4bf 100644 --- a/web/app/components/base/select/locale-signin.tsx +++ b/web/app/components/base/select/locale-signin.tsx @@ -43,7 +43,7 @@ export default function LocaleSigninSelect({ className={'group flex w-full items-center rounded-lg px-3 py-2 text-sm text-text-secondary data-[active]:bg-state-base-hover'} onClick={(evt) => { evt.preventDefault() - onChange && onChange(item.value) + onChange?.(item.value) }} > {item.name} diff --git a/web/app/components/base/select/locale.tsx b/web/app/components/base/select/locale.tsx index cc5662f53b..2033488435 100644 --- a/web/app/components/base/select/locale.tsx +++ b/web/app/components/base/select/locale.tsx @@ -43,7 +43,7 @@ export default function Select({ className={'group flex w-full items-center rounded-lg px-3 py-2 text-sm text-text-secondary data-[active]:bg-state-base-hover'} onClick={(evt) => { evt.preventDefault() - onChange && onChange(item.value) + onChange?.(item.value) }} > {item.name} diff --git a/web/app/components/base/tag-management/panel.tsx b/web/app/components/base/tag-management/panel.tsx index 79031e9f8a..3cfb68fef0 100644 --- a/web/app/components/base/tag-management/panel.tsx +++ b/web/app/components/base/tag-management/panel.tsx @@ -97,10 +97,13 @@ const Panel = (props: PanelProps) => { const removeTagIDs = value.filter(v => !selectedTagIDs.includes(v)) const selectedTags = tagList.filter(tag => selectedTagIDs.includes(tag.id)) onCacheUpdate(selectedTags) - Promise.all([ - ...(addTagIDs.length ? [bind(addTagIDs)] : []), - ...[removeTagIDs.length ? removeTagIDs.map(tagID => unbind(tagID)) : []], - ]).finally(() => { + const operations: Promise[] = [] + if (addTagIDs.length) + operations.push(bind(addTagIDs)) + if (removeTagIDs.length) + operations.push(...removeTagIDs.map(tagID => unbind(tagID))) + + Promise.all(operations).finally(() => { if (onChange) onChange() }) diff --git a/web/app/components/base/voice-input/index.tsx b/web/app/components/base/voice-input/index.tsx index 5a5400ad30..6587a61217 100644 --- a/web/app/components/base/voice-input/index.tsx +++ b/web/app/components/base/voice-input/index.tsx @@ -81,7 +81,8 @@ const VoiceInput = ({ setStartRecord(false) setStartConvert(true) recorder.current.stop() - drawRecordId.current && cancelAnimationFrame(drawRecordId.current) + if (drawRecordId.current) + cancelAnimationFrame(drawRecordId.current) drawRecordId.current = null const canvas = canvasRef.current! const ctx = ctxRef.current! diff --git a/web/app/components/datasets/create-from-pipeline/create-options/create-from-dsl-modal/uploader.tsx b/web/app/components/datasets/create-from-pipeline/create-options/create-from-dsl-modal/uploader.tsx index 433b7851d7..57509b646f 100644 --- a/web/app/components/datasets/create-from-pipeline/create-options/create-from-dsl-modal/uploader.tsx +++ b/web/app/components/datasets/create-from-pipeline/create-options/create-from-dsl-modal/uploader.tsx @@ -34,7 +34,8 @@ const Uploader: FC = ({ const handleDragEnter = (e: DragEvent) => { e.preventDefault() e.stopPropagation() - e.target !== dragRef.current && setDragging(true) + if (e.target !== dragRef.current) + setDragging(true) } const handleDragOver = (e: DragEvent) => { e.preventDefault() @@ -43,7 +44,8 @@ const Uploader: FC = ({ const handleDragLeave = (e: DragEvent) => { e.preventDefault() e.stopPropagation() - e.target === dragRef.current && setDragging(false) + if (e.target === dragRef.current) + setDragging(false) } const handleDrop = (e: DragEvent) => { e.preventDefault() diff --git a/web/app/components/datasets/create/file-uploader/index.tsx b/web/app/components/datasets/create/file-uploader/index.tsx index 4dfdbc4e96..e2bbad2776 100644 --- a/web/app/components/datasets/create/file-uploader/index.tsx +++ b/web/app/components/datasets/create/file-uploader/index.tsx @@ -185,7 +185,8 @@ const FileUploader = ({ const handleDragEnter = (e: DragEvent) => { e.preventDefault() e.stopPropagation() - e.target !== dragRef.current && setDragging(true) + if (e.target !== dragRef.current) + setDragging(true) } const handleDragOver = (e: DragEvent) => { e.preventDefault() @@ -194,7 +195,8 @@ const FileUploader = ({ const handleDragLeave = (e: DragEvent) => { e.preventDefault() e.stopPropagation() - e.target === dragRef.current && setDragging(false) + if (e.target === dragRef.current) + setDragging(false) } type FileWithPath = { relativePath?: string diff --git a/web/app/components/datasets/create/step-two/index.tsx b/web/app/components/datasets/create/step-two/index.tsx index 650dc9425a..4c2e129cd2 100644 --- a/web/app/components/datasets/create/step-two/index.tsx +++ b/web/app/components/datasets/create/step-two/index.tsx @@ -568,9 +568,9 @@ const StepTwo = ({ params, { onSuccess(data) { - updateIndexingTypeCache && updateIndexingTypeCache(indexType as string) - updateResultCache && updateResultCache(data) - updateRetrievalMethodCache && updateRetrievalMethodCache(retrievalConfig.search_method as string) + updateIndexingTypeCache?.(indexType as string) + updateResultCache?.(data) + updateRetrievalMethodCache?.(retrievalConfig.search_method as string) }, }, ) @@ -578,17 +578,18 @@ const StepTwo = ({ else { await createDocumentMutation.mutateAsync(params, { onSuccess(data) { - updateIndexingTypeCache && updateIndexingTypeCache(indexType as string) - updateResultCache && updateResultCache(data) - updateRetrievalMethodCache && updateRetrievalMethodCache(retrievalConfig.search_method as string) + updateIndexingTypeCache?.(indexType as string) + updateResultCache?.(data) + updateRetrievalMethodCache?.(retrievalConfig.search_method as string) }, }) } if (mutateDatasetRes) mutateDatasetRes() invalidDatasetList() - onStepChange && onStepChange(+1) - isSetting && onSave && onSave() + onStepChange?.(+1) + if (isSetting) + onSave?.() } useEffect(() => { @@ -1026,7 +1027,7 @@ const StepTwo = ({ {!isSetting ? (
- diff --git a/web/app/components/datasets/create/website/jina-reader/base/options-wrap.tsx b/web/app/components/datasets/create/website/jina-reader/base/options-wrap.tsx deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/web/app/components/datasets/documents/create-from-pipeline/data-source/local-file/index.tsx b/web/app/components/datasets/documents/create-from-pipeline/data-source/local-file/index.tsx index 0692680005..ad05f0729b 100644 --- a/web/app/components/datasets/documents/create-from-pipeline/data-source/local-file/index.tsx +++ b/web/app/components/datasets/documents/create-from-pipeline/data-source/local-file/index.tsx @@ -200,7 +200,8 @@ const LocalFile = ({ const handleDragEnter = (e: DragEvent) => { e.preventDefault() e.stopPropagation() - e.target !== dragRef.current && setDragging(true) + if (e.target !== dragRef.current) + setDragging(true) } const handleDragOver = (e: DragEvent) => { e.preventDefault() @@ -209,7 +210,8 @@ const LocalFile = ({ const handleDragLeave = (e: DragEvent) => { e.preventDefault() e.stopPropagation() - e.target === dragRef.current && setDragging(false) + if (e.target === dragRef.current) + setDragging(false) } const handleDrop = useCallback((e: DragEvent) => { diff --git a/web/app/components/datasets/documents/create-from-pipeline/data-source/website-crawl/base/crawled-result.tsx b/web/app/components/datasets/documents/create-from-pipeline/data-source/website-crawl/base/crawled-result.tsx index 28c9ae456e..cd410c4d1e 100644 --- a/web/app/components/datasets/documents/create-from-pipeline/data-source/website-crawl/base/crawled-result.tsx +++ b/web/app/components/datasets/documents/create-from-pipeline/data-source/website-crawl/base/crawled-result.tsx @@ -45,10 +45,13 @@ const CrawledResult = ({ const handleItemCheckChange = useCallback((item: CrawlResultItem) => { return (checked: boolean) => { - if (checked) - isMultipleChoice ? onSelectedChange([...checkedList, item]) : onSelectedChange([item]) - else - onSelectedChange(checkedList.filter(checkedItem => checkedItem.source_url !== item.source_url)) + if (checked) { + if (isMultipleChoice) + onSelectedChange([...checkedList, item]) + else + onSelectedChange([item]) + } + else { onSelectedChange(checkedList.filter(checkedItem => checkedItem.source_url !== item.source_url)) } } }, [checkedList, onSelectedChange, isMultipleChoice]) diff --git a/web/app/components/datasets/documents/create-from-pipeline/index.tsx b/web/app/components/datasets/documents/create-from-pipeline/index.tsx index 75c31acd1b..77b77700ca 100644 --- a/web/app/components/datasets/documents/create-from-pipeline/index.tsx +++ b/web/app/components/datasets/documents/create-from-pipeline/index.tsx @@ -326,7 +326,10 @@ const CreateFormPipeline = () => { }, []) const handleSubmit = useCallback((data: Record) => { - isPreview.current ? handlePreviewChunks(data) : handleProcess(data) + if (isPreview.current) + handlePreviewChunks(data) + else + handleProcess(data) }, [handlePreviewChunks, handleProcess]) const handlePreviewFileChange = useCallback((file: DocumentItem) => { diff --git a/web/app/components/datasets/documents/detail/batch-modal/csv-uploader.tsx b/web/app/components/datasets/documents/detail/batch-modal/csv-uploader.tsx index aaf9ed8ffd..7e8749f0bf 100644 --- a/web/app/components/datasets/documents/detail/batch-modal/csv-uploader.tsx +++ b/web/app/components/datasets/documents/detail/batch-modal/csv-uploader.tsx @@ -99,7 +99,8 @@ const CSVUploader: FC = ({ const handleDragEnter = (e: DragEvent) => { e.preventDefault() e.stopPropagation() - e.target !== dragRef.current && setDragging(true) + if (e.target !== dragRef.current) + setDragging(true) } const handleDragOver = (e: DragEvent) => { e.preventDefault() @@ -108,7 +109,8 @@ const CSVUploader: FC = ({ const handleDragLeave = (e: DragEvent) => { e.preventDefault() e.stopPropagation() - e.target === dragRef.current && setDragging(false) + if (e.target === dragRef.current) + setDragging(false) } const handleDrop = (e: DragEvent) => { e.preventDefault() diff --git a/web/app/components/datasets/documents/detail/completed/index.tsx b/web/app/components/datasets/documents/detail/completed/index.tsx index 726be7519a..8fa167f976 100644 --- a/web/app/components/datasets/documents/detail/completed/index.tsx +++ b/web/app/components/datasets/documents/detail/completed/index.tsx @@ -284,7 +284,8 @@ const Completed: FC = ({ onSuccess: () => { notify({ type: 'success', message: t('common.actionMsg.modifiedSuccessfully') }) resetList() - !segId && setSelectedSegmentIds([]) + if (!segId) + setSelectedSegmentIds([]) }, onError: () => { notify({ type: 'error', message: t('common.actionMsg.modifiedUnsuccessfully') }) @@ -438,7 +439,8 @@ const Completed: FC = ({ } else { resetList() - currentPage !== totalPages && setCurrentPage(totalPages) + if (currentPage !== totalPages) + setCurrentPage(totalPages) } }, [segmentListData, limit, currentPage, resetList]) @@ -491,7 +493,8 @@ const Completed: FC = ({ } else { resetChildList() - currentPage !== totalPages && setCurrentPage(totalPages) + if (currentPage !== totalPages) + setCurrentPage(totalPages) } }, [childChunkListData, limit, currentPage, resetChildList]) diff --git a/web/app/components/datasets/documents/detail/metadata/index.tsx b/web/app/components/datasets/documents/detail/metadata/index.tsx index 4cb5fe97e9..88c359f8b2 100644 --- a/web/app/components/datasets/documents/detail/metadata/index.tsx +++ b/web/app/components/datasets/documents/detail/metadata/index.tsx @@ -66,7 +66,7 @@ export const FieldInfo: FC = ({ ? displayedValue : inputType === 'select' ? onUpdate && onUpdate(value as string)} + onSelect={({ value }) => onUpdate?.(value as string)} items={selectOptions} defaultValue={value} className={s.select} @@ -75,7 +75,7 @@ export const FieldInfo: FC = ({ /> : inputType === 'textarea' ? onUpdate && onUpdate(e.target.value)} + onChange={e => onUpdate?.(e.target.value)} value={value} className={s.textArea} placeholder={`${t('datasetDocuments.metadata.placeholder.add')}${label}`} diff --git a/web/app/components/datasets/documents/detail/settings/pipeline-settings/index.tsx b/web/app/components/datasets/documents/detail/settings/pipeline-settings/index.tsx index e69481c3ea..1ab47be445 100644 --- a/web/app/components/datasets/documents/detail/settings/pipeline-settings/index.tsx +++ b/web/app/components/datasets/documents/detail/settings/pipeline-settings/index.tsx @@ -148,7 +148,10 @@ const PipelineSettings = ({ }, []) const handleSubmit = useCallback((data: Record) => { - isPreview.current ? handlePreviewChunks(data) : handleProcess(data) + if (isPreview.current) + handlePreviewChunks(data) + else + handleProcess(data) }, [handlePreviewChunks, handleProcess]) if (isFetchingLastRunData) { diff --git a/web/app/components/datasets/hit-testing/textarea.tsx b/web/app/components/datasets/hit-testing/textarea.tsx index a9d66b0cc1..0e9dd16d56 100644 --- a/web/app/components/datasets/hit-testing/textarea.tsx +++ b/web/app/components/datasets/hit-testing/textarea.tsx @@ -80,7 +80,8 @@ const TextAreaWithButton = ({ onUpdateList?.() } setLoading(false) - _onSubmit && _onSubmit() + if (_onSubmit) + _onSubmit() } const externalRetrievalTestingOnSubmit = async () => { diff --git a/web/app/components/datasets/list/dataset-card/index.tsx b/web/app/components/datasets/list/dataset-card/index.tsx index db8ee0226d..b1304e578e 100644 --- a/web/app/components/datasets/list/dataset-card/index.tsx +++ b/web/app/components/datasets/list/dataset-card/index.tsx @@ -157,12 +157,12 @@ const DatasetCard = ({ data-disable-nprogress={true} onClick={(e) => { e.preventDefault() - isExternalProvider - ? push(`/datasets/${dataset.id}/hitTesting`) - // eslint-disable-next-line sonarjs/no-nested-conditional - : isPipelineUnpublished - ? push(`/datasets/${dataset.id}/pipeline`) - : push(`/datasets/${dataset.id}/documents`) + if (isExternalProvider) + push(`/datasets/${dataset.id}/hitTesting`) + else if (isPipelineUnpublished) + push(`/datasets/${dataset.id}/pipeline`) + else + push(`/datasets/${dataset.id}/documents`) }} > {!dataset.embedding_available && ( diff --git a/web/app/components/datasets/loading.tsx b/web/app/components/datasets/loading.tsx index e69de29bb2..182c1f91de 100644 --- a/web/app/components/datasets/loading.tsx +++ b/web/app/components/datasets/loading.tsx @@ -0,0 +1,3 @@ +const DatasetsLoading = () => null + +export default DatasetsLoading diff --git a/web/app/components/datasets/preview/index.tsx b/web/app/components/datasets/preview/index.tsx index e69de29bb2..e71c440c20 100644 --- a/web/app/components/datasets/preview/index.tsx +++ b/web/app/components/datasets/preview/index.tsx @@ -0,0 +1,3 @@ +const DatasetPreview = () => null + +export default DatasetPreview diff --git a/web/app/components/header/account-setting/collapse/index.tsx b/web/app/components/header/account-setting/collapse/index.tsx index 2ad4a97cd1..44360df8cd 100644 --- a/web/app/components/header/account-setting/collapse/index.tsx +++ b/web/app/components/header/account-setting/collapse/index.tsx @@ -39,7 +39,7 @@ const Collapse = ({
{ items.map(item => ( -
onSelect && onSelect(item)}> +
onSelect?.(item)}> {renderItem(item)}
)) diff --git a/web/app/components/header/account-setting/model-provider-page/provider-added-card/model-load-balancing-configs.tsx b/web/app/components/header/account-setting/model-provider-page/provider-added-card/model-load-balancing-configs.tsx index 29da0ffc0c..291ba013f7 100644 --- a/web/app/components/header/account-setting/model-provider-page/provider-added-card/model-load-balancing-configs.tsx +++ b/web/app/components/header/account-setting/model-provider-page/provider-added-card/model-load-balancing-configs.tsx @@ -49,7 +49,7 @@ const ModelLoadBalancingConfigs = ({ provider, model, configurationMethod, - currentCustomConfigurationModelFixedFields, + currentCustomConfigurationModelFixedFields: _currentCustomConfigurationModelFixedFields, withSwitch = false, className, modelCredential, diff --git a/web/app/components/plugins/plugin-detail-panel/app-selector/app-picker.tsx b/web/app/components/plugins/plugin-detail-panel/app-selector/app-picker.tsx index 3c79acb653..10c28507f7 100644 --- a/web/app/components/plugins/plugin-detail-panel/app-selector/app-picker.tsx +++ b/web/app/components/plugins/plugin-detail-panel/app-selector/app-picker.tsx @@ -33,7 +33,7 @@ type Props = { } const AppPicker: FC = ({ - scope, + scope: _scope, disabled, trigger, placement = 'right-start', @@ -90,7 +90,7 @@ const AppPicker: FC = ({ } // Set up MutationObserver to watch DOM changes - mutationObserver = new MutationObserver((mutations) => { + mutationObserver = new MutationObserver((_mutations) => { if (observerTarget.current) { setupIntersectionObserver() mutationObserver?.disconnect() diff --git a/web/app/components/plugins/plugin-detail-panel/model-selector/index.tsx b/web/app/components/plugins/plugin-detail-panel/model-selector/index.tsx index fb4c99e1e4..873f187e8f 100644 --- a/web/app/components/plugins/plugin-detail-panel/model-selector/index.tsx +++ b/web/app/components/plugins/plugin-detail-panel/model-selector/index.tsx @@ -148,7 +148,7 @@ const ModelParameterModal: FC = ({ }) } } - catch (e) { + catch { Toast.notify({ type: 'error', message: t('common.error') }) } } diff --git a/web/app/components/rag-pipeline/components/panel/input-field/field-list/hooks.ts b/web/app/components/rag-pipeline/components/panel/input-field/field-list/hooks.ts index d5c0797c9b..376894b1bb 100644 --- a/web/app/components/rag-pipeline/components/panel/input-field/field-list/hooks.ts +++ b/web/app/components/rag-pipeline/components/panel/input-field/field-list/hooks.ts @@ -51,7 +51,7 @@ export const useFieldList = ({ const handleListSortChange = useCallback((list: SortableItem[]) => { const newInputFields = list.map((item) => { - const { id, chosen, selected, ...filed } = item + const { id: _id, chosen: _chosen, selected: _selected, ...filed } = item return filed }) handleInputFieldsChange(newInputFields) diff --git a/web/app/components/rag-pipeline/components/panel/test-run/header.tsx b/web/app/components/rag-pipeline/components/panel/test-run/header.tsx index 16291f868b..a536f66137 100644 --- a/web/app/components/rag-pipeline/components/panel/test-run/header.tsx +++ b/web/app/components/rag-pipeline/components/panel/test-run/header.tsx @@ -15,7 +15,8 @@ const Header = () => { isPreparingDataSource, setIsPreparingDataSource, } = workflowStore.getState() - isPreparingDataSource && setIsPreparingDataSource?.(false) + if (isPreparingDataSource) + setIsPreparingDataSource?.(false) handleCancelDebugAndPreviewPanel() }, [workflowStore]) diff --git a/web/app/components/rag-pipeline/hooks/use-nodes-sync-draft.ts b/web/app/components/rag-pipeline/hooks/use-nodes-sync-draft.ts index 86e44dced7..ad757f36a7 100644 --- a/web/app/components/rag-pipeline/hooks/use-nodes-sync-draft.ts +++ b/web/app/components/rag-pipeline/hooks/use-nodes-sync-draft.ts @@ -104,7 +104,7 @@ export const useNodesSyncDraft = () => { const res = await syncWorkflowDraft(postParams) setSyncWorkflowDraftHash(res.hash) setDraftUpdatedAt(res.updated_at) - callback?.onSuccess && callback.onSuccess() + callback?.onSuccess?.() } catch (error: any) { if (error && error.json && !error.bodyUsed) { @@ -113,10 +113,10 @@ export const useNodesSyncDraft = () => { handleRefreshWorkflowDraft() }) } - callback?.onError && callback.onError() + callback?.onError?.() } finally { - callback?.onSettled && callback.onSettled() + callback?.onSettled?.() } } }, [getPostParams, getNodesReadOnly, workflowStore, handleRefreshWorkflowDraft]) diff --git a/web/app/components/share/text-generation/index.tsx b/web/app/components/share/text-generation/index.tsx index da5b09b065..98804c7311 100644 --- a/web/app/components/share/text-generation/index.tsx +++ b/web/app/components/share/text-generation/index.tsx @@ -363,7 +363,8 @@ const TextGeneration: FC = ({ (async () => { if (!appData || !appParams) return - !isWorkflow && fetchSavedMessage() + if (!isWorkflow) + fetchSavedMessage() const { app_id: appId, site: siteInfo, custom_config } = appData setAppId(appId) setSiteInfo(siteInfo as SiteInfo) diff --git a/web/app/components/swr-initializer.tsx b/web/app/components/swr-initializer.tsx index 0a873400d6..fd9432fdd8 100644 --- a/web/app/components/swr-initializer.tsx +++ b/web/app/components/swr-initializer.tsx @@ -62,8 +62,10 @@ const SwrInitializer = ({ return } if (searchParams.has('access_token') || searchParams.has('refresh_token')) { - consoleToken && localStorage.setItem('console_token', consoleToken) - refreshToken && localStorage.setItem('refresh_token', refreshToken) + if (consoleToken) + localStorage.setItem('console_token', consoleToken) + if (refreshToken) + localStorage.setItem('refresh_token', refreshToken) const redirectUrl = resolvePostLoginRedirect(searchParams) if (redirectUrl) location.replace(redirectUrl) diff --git a/web/app/components/workflow-app/hooks/use-nodes-sync-draft.ts b/web/app/components/workflow-app/hooks/use-nodes-sync-draft.ts index 654977fb15..5705deb0c0 100644 --- a/web/app/components/workflow-app/hooks/use-nodes-sync-draft.ts +++ b/web/app/components/workflow-app/hooks/use-nodes-sync-draft.ts @@ -124,7 +124,7 @@ export const useNodesSyncDraft = () => { const res = await syncWorkflowDraft(postParams) setSyncWorkflowDraftHash(res.hash) setDraftUpdatedAt(res.updated_at) - callback?.onSuccess && callback.onSuccess() + callback?.onSuccess?.() } catch (error: any) { if (error && error.json && !error.bodyUsed) { @@ -133,10 +133,10 @@ export const useNodesSyncDraft = () => { handleRefreshWorkflowDraft() }) } - callback?.onError && callback.onError() + callback?.onError?.() } finally { - callback?.onSettled && callback.onSettled() + callback?.onSettled?.() } } }, [workflowStore, getPostParams, getNodesReadOnly, handleRefreshWorkflowDraft]) diff --git a/web/app/components/workflow/block-selector/tool/tool.tsx b/web/app/components/workflow/block-selector/tool/tool.tsx index 6004bb119a..30d3e218d2 100644 --- a/web/app/components/workflow/block-selector/tool/tool.tsx +++ b/web/app/components/workflow/block-selector/tool/tool.tsx @@ -73,7 +73,7 @@ const Tool: FC = ({ if (isHovering && !isAllSelected) { return ( { + onClick={() => { onSelectMultiple?.(BlockEnum.Tool, actions.filter(action => !getIsDisabled(action)).map((tool) => { const params: Record = {} if (tool.parameters) { diff --git a/web/app/components/workflow/hooks/use-shortcuts.ts b/web/app/components/workflow/hooks/use-shortcuts.ts index b2d71555d7..a744fefd50 100644 --- a/web/app/components/workflow/hooks/use-shortcuts.ts +++ b/web/app/components/workflow/hooks/use-shortcuts.ts @@ -107,7 +107,8 @@ export const useShortcuts = (): void => { const { showDebugAndPreviewPanel } = workflowStore.getState() if (shouldHandleShortcut(e) && !showDebugAndPreviewPanel) { e.preventDefault() - workflowHistoryShortcutsEnabled && handleHistoryBack() + if (workflowHistoryShortcutsEnabled) + handleHistoryBack() } }, { exactMatch: true, useCapture: true }) @@ -116,7 +117,8 @@ export const useShortcuts = (): void => { (e) => { if (shouldHandleShortcut(e)) { e.preventDefault() - workflowHistoryShortcutsEnabled && handleHistoryForward() + if (workflowHistoryShortcutsEnabled) + handleHistoryForward() } }, { exactMatch: true, useCapture: true }, diff --git a/web/app/components/workflow/hooks/use-workflow.ts b/web/app/components/workflow/hooks/use-workflow.ts index 02a2f09d63..3f9f8106cf 100644 --- a/web/app/components/workflow/hooks/use-workflow.ts +++ b/web/app/components/workflow/hooks/use-workflow.ts @@ -354,7 +354,7 @@ export const useWorkflow = () => { return startNodes }, [nodesMap, getRootNodesById]) - const isValidConnection = useCallback(({ source, sourceHandle, target }: Connection) => { + const isValidConnection = useCallback(({ source, sourceHandle: _sourceHandle, target }: Connection) => { const { edges, getNodes, diff --git a/web/app/components/workflow/nodes/_base/components/retry/utils.ts b/web/app/components/workflow/nodes/_base/components/retry/utils.ts index e69de29bb2..336ce12bb9 100644 --- a/web/app/components/workflow/nodes/_base/components/retry/utils.ts +++ b/web/app/components/workflow/nodes/_base/components/retry/utils.ts @@ -0,0 +1 @@ +export {} diff --git a/web/app/components/workflow/nodes/_base/components/variable/var-reference-picker.tsx b/web/app/components/workflow/nodes/_base/components/variable/var-reference-picker.tsx index 273e100f8e..ba27d023e7 100644 --- a/web/app/components/workflow/nodes/_base/components/variable/var-reference-picker.tsx +++ b/web/app/components/workflow/nodes/_base/components/variable/var-reference-picker.tsx @@ -407,7 +407,10 @@ const VarReferencePicker: FC = ({ { if (readonly) return - !isConstant ? setOpen(!open) : setControlFocus(Date.now()) + if (!isConstant) + setOpen(!open) + else + setControlFocus(Date.now()) }} className='group/picker-trigger-wrap relative !flex'> <> {isAddBtnTrigger @@ -457,7 +460,10 @@ const VarReferencePicker: FC = ({ onClick={() => { if (readonly) return - !isConstant ? setOpen(!open) : setControlFocus(Date.now()) + if (!isConstant) + setOpen(!open) + else + setControlFocus(Date.now()) }} className='h-full grow' > diff --git a/web/app/components/workflow/nodes/_base/components/variable/var-reference-vars.tsx b/web/app/components/workflow/nodes/_base/components/variable/var-reference-vars.tsx index 067dbf8652..e70cfed97c 100644 --- a/web/app/components/workflow/nodes/_base/components/variable/var-reference-vars.tsx +++ b/web/app/components/workflow/nodes/_base/components/variable/var-reference-vars.tsx @@ -137,7 +137,7 @@ const Item: FC = ({ const isHovering = isItemHovering || isChildrenHovering const open = (isObj || isStructureOutput) && isHovering useEffect(() => { - onHovering && onHovering(isHovering) + onHovering?.(isHovering) }, [isHovering]) const handleChosen = (e: React.MouseEvent) => { e.stopPropagation() diff --git a/web/app/components/workflow/nodes/_base/components/workflow-panel/last-run/index.tsx b/web/app/components/workflow/nodes/_base/components/workflow-panel/last-run/index.tsx index 06962389c9..b26dd74714 100644 --- a/web/app/components/workflow/nodes/_base/components/workflow-panel/last-run/index.tsx +++ b/web/app/components/workflow/nodes/_base/components/workflow-panel/last-run/index.tsx @@ -25,12 +25,12 @@ type Props = { } & Partial const LastRun: FC = ({ - appId, + appId: _appId, nodeId, canSingleRun, isRunAfterSingleRun, updateNodeRunningStatus, - nodeInfo, + nodeInfo: _nodeInfo, runningStatus: oneStepRunRunningStatus, onSingleRunClicked, singleRunResult, diff --git a/web/app/components/workflow/nodes/knowledge-base/components/option-card.tsx b/web/app/components/workflow/nodes/knowledge-base/components/option-card.tsx index 789e24835f..99ee0d25b5 100644 --- a/web/app/components/workflow/nodes/knowledge-base/components/option-card.tsx +++ b/web/app/components/workflow/nodes/knowledge-base/components/option-card.tsx @@ -88,7 +88,8 @@ const OptionCard = memo(({ )} onClick={(e) => { e.stopPropagation() - !readonly && enableSelect && id && onClick?.(id) + if (!readonly && enableSelect && id) + onClick?.(id) }} >
= ({ const handleResetDefaults = useCallback(() => { if (currentTab === SchemaView.VisualEditor) { setHoveringProperty(null) - advancedEditing && setAdvancedEditing(false) - isAddingNewField && setIsAddingNewField(false) + if (advancedEditing) + setAdvancedEditing(false) + if (isAddingNewField) + setIsAddingNewField(false) } setJsonSchema(DEFAULT_SCHEMA) setJson(JSON.stringify(DEFAULT_SCHEMA, null, 2)) diff --git a/web/app/components/workflow/nodes/llm/components/json-schema-config-modal/visual-editor/edit-card/index.tsx b/web/app/components/workflow/nodes/llm/components/json-schema-config-modal/visual-editor/edit-card/index.tsx index ae72d494d1..1db31cd470 100644 --- a/web/app/components/workflow/nodes/llm/components/json-schema-config-modal/visual-editor/edit-card/index.tsx +++ b/web/app/components/workflow/nodes/llm/components/json-schema-config-modal/visual-editor/edit-card/index.tsx @@ -87,8 +87,10 @@ const EditCard: FC = ({ }) useSubscribe('fieldChangeSuccess', () => { - isAddingNewField && setIsAddingNewField(false) - advancedEditing && setAdvancedEditing(false) + if (isAddingNewField) + setIsAddingNewField(false) + if (advancedEditing) + setAdvancedEditing(false) }) const emitPropertyNameChange = useCallback(() => { diff --git a/web/app/components/workflow/nodes/llm/components/json-schema-config-modal/visual-editor/hooks.ts b/web/app/components/workflow/nodes/llm/components/json-schema-config-modal/visual-editor/hooks.ts index 8256a3c862..4f7e1e6f00 100644 --- a/web/app/components/workflow/nodes/llm/components/json-schema-config-modal/visual-editor/hooks.ts +++ b/web/app/components/workflow/nodes/llm/components/json-schema-config-modal/visual-editor/hooks.ts @@ -45,8 +45,10 @@ export const useSchemaNodeOperations = (props: VisualEditorProps) => { onChange(backupSchema) setBackupSchema(null) } - isAddingNewField && setIsAddingNewField(false) - advancedEditing && setAdvancedEditing(false) + if (isAddingNewField) + setIsAddingNewField(false) + if (advancedEditing) + setAdvancedEditing(false) setHoveringProperty(null) }) @@ -221,7 +223,8 @@ export const useSchemaNodeOperations = (props: VisualEditorProps) => { }) useSubscribe('addField', (params) => { - advancedEditing && setAdvancedEditing(false) + if (advancedEditing) + setAdvancedEditing(false) setBackupSchema(jsonSchema) const { path } = params as AddEventParams setIsAddingNewField(true) diff --git a/web/app/components/workflow/nodes/loop/components/condition-value.tsx b/web/app/components/workflow/nodes/loop/components/condition-value.tsx index 922cff4a61..2f011f870a 100644 --- a/web/app/components/workflow/nodes/loop/components/condition-value.tsx +++ b/web/app/components/workflow/nodes/loop/components/condition-value.tsx @@ -22,7 +22,7 @@ type ConditionValueProps = { } const ConditionValue = ({ variableSelector, - labelName, + labelName: _labelName, operator, value, }: ConditionValueProps) => { diff --git a/web/app/components/workflow/panel/chat-variable-panel/components/variable-modal-trigger.tsx b/web/app/components/workflow/panel/chat-variable-panel/components/variable-modal-trigger.tsx index 07eb79a269..9d19b61093 100644 --- a/web/app/components/workflow/panel/chat-variable-panel/components/variable-modal-trigger.tsx +++ b/web/app/components/workflow/panel/chat-variable-panel/components/variable-modal-trigger.tsx @@ -35,7 +35,8 @@ const VariableModalTrigger = ({ open={open} onOpenChange={() => { setOpen(v => !v) - open && onClose() + if (open) + onClose() }} placement='left-start' offset={{ @@ -45,7 +46,8 @@ const VariableModalTrigger = ({ > { setOpen(v => !v) - open && onClose() + if (open) + onClose() }}>