From 044f96bd936d7d018d94070ff931410f6a9e9bc7 Mon Sep 17 00:00:00 2001 From: 17hz <0x149527@gmail.com> Date: Tue, 2 Sep 2025 11:59:31 +0800 Subject: [PATCH 01/21] feat: LLM prompt Jinja2 template now support more variables (#24944) --- web/app/components/workflow/nodes/llm/panel.tsx | 2 +- web/app/components/workflow/nodes/llm/use-config.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/web/app/components/workflow/nodes/llm/panel.tsx b/web/app/components/workflow/nodes/llm/panel.tsx index 1206e58734..52bbf48b74 100644 --- a/web/app/components/workflow/nodes/llm/panel.tsx +++ b/web/app/components/workflow/nodes/llm/panel.tsx @@ -140,7 +140,7 @@ const Panel: FC> = ({ { }, []) const filterJinja2InputVar = useCallback((varPayload: Var) => { - return [VarType.number, VarType.string, VarType.secret, VarType.arrayString, VarType.arrayNumber].includes(varPayload.type) + return [VarType.number, VarType.string, VarType.secret, VarType.arrayString, VarType.arrayNumber, VarType.arrayBoolean, VarType.arrayObject, VarType.object, VarType.array, VarType.boolean].includes(varPayload.type) }, []) const filterMemoryPromptVar = useCallback((varPayload: Var) => { From 067b0d07c4d1c1d6d8de2a4cf67234c71ef44d97 Mon Sep 17 00:00:00 2001 From: Yongtao Huang Date: Tue, 2 Sep 2025 11:59:38 +0800 Subject: [PATCH 02/21] Fix: ensure InstalledApp deletion uses model instances instead of Row (#24942) Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> --- api/controllers/console/admin.py | 18 +++++++++++------- .../setting/build-in/config-credentials.tsx | 2 +- 2 files changed, 12 insertions(+), 8 deletions(-) diff --git a/api/controllers/console/admin.py b/api/controllers/console/admin.py index 7e5c28200a..cae2d7cbe3 100644 --- a/api/controllers/console/admin.py +++ b/api/controllers/console/admin.py @@ -130,15 +130,19 @@ class InsertExploreAppApi(Resource): app.is_public = False with Session(db.engine) as session: - installed_apps = session.execute( - select(InstalledApp).where( - InstalledApp.app_id == recommended_app.app_id, - InstalledApp.tenant_id != InstalledApp.app_owner_tenant_id, + installed_apps = ( + session.execute( + select(InstalledApp).where( + InstalledApp.app_id == recommended_app.app_id, + InstalledApp.tenant_id != InstalledApp.app_owner_tenant_id, + ) ) - ).all() + .scalars() + .all() + ) - for installed_app in installed_apps: - db.session.delete(installed_app) + for installed_app in installed_apps: + session.delete(installed_app) db.session.delete(recommended_app) db.session.commit() diff --git a/web/app/components/tools/setting/build-in/config-credentials.tsx b/web/app/components/tools/setting/build-in/config-credentials.tsx index 3ce3f62639..f6b9c05c44 100644 --- a/web/app/components/tools/setting/build-in/config-credentials.tsx +++ b/web/app/components/tools/setting/build-in/config-credentials.tsx @@ -111,7 +111,7 @@ const ConfigCredential: FC = ({ ) } - < div className='flex space-x-2'> +
From a32dde5428ce52aa86f9ba48431853912031da2d Mon Sep 17 00:00:00 2001 From: -LAN- Date: Tue, 2 Sep 2025 14:18:29 +0800 Subject: [PATCH 03/21] Fix: Resolve workflow_node_execution primary key conflicts with UUID v7 (#24643) Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> --- ...hemy_workflow_node_execution_repository.py | 84 ++++++- api/core/workflow/workflow_cycle_manager.py | 6 +- ...rkflow_node_execution_conflict_handling.py | 210 ++++++++++++++++++ .../test_sqlalchemy_repository.py | 45 +++- 4 files changed, 325 insertions(+), 20 deletions(-) create mode 100644 api/tests/unit_tests/core/repositories/test_workflow_node_execution_conflict_handling.py diff --git a/api/core/repositories/sqlalchemy_workflow_node_execution_repository.py b/api/core/repositories/sqlalchemy_workflow_node_execution_repository.py index f4532d7f29..e5e5626b26 100644 --- a/api/core/repositories/sqlalchemy_workflow_node_execution_repository.py +++ b/api/core/repositories/sqlalchemy_workflow_node_execution_repository.py @@ -7,9 +7,12 @@ import logging from collections.abc import Sequence from typing import Optional, Union +import psycopg2.errors from sqlalchemy import UnaryExpression, asc, desc, select from sqlalchemy.engine import Engine +from sqlalchemy.exc import IntegrityError from sqlalchemy.orm import sessionmaker +from tenacity import before_sleep_log, retry, retry_if_exception, stop_after_attempt from core.model_runtime.utils.encoders import jsonable_encoder from core.workflow.entities.workflow_node_execution import ( @@ -21,6 +24,7 @@ from core.workflow.nodes.enums import NodeType from core.workflow.repositories.workflow_node_execution_repository import OrderConfig, WorkflowNodeExecutionRepository from core.workflow.workflow_type_encoder import WorkflowRuntimeTypeConverter from libs.helper import extract_tenant_id +from libs.uuid_utils import uuidv7 from models import ( Account, CreatorUserRole, @@ -186,18 +190,31 @@ class SQLAlchemyWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository) db_model.finished_at = domain_model.finished_at return db_model + def _is_duplicate_key_error(self, exception: BaseException) -> bool: + """Check if the exception is a duplicate key constraint violation.""" + return isinstance(exception, IntegrityError) and isinstance(exception.orig, psycopg2.errors.UniqueViolation) + + def _regenerate_id_on_duplicate( + self, execution: WorkflowNodeExecution, db_model: WorkflowNodeExecutionModel + ) -> None: + """Regenerate UUID v7 for both domain and database models when duplicate key detected.""" + new_id = str(uuidv7()) + logger.warning( + "Duplicate key conflict for workflow node execution ID %s, generating new UUID v7: %s", db_model.id, new_id + ) + db_model.id = new_id + execution.id = new_id + def save(self, execution: WorkflowNodeExecution) -> None: """ Save or update a NodeExecution domain entity to the database. This method serves as a domain-to-database adapter that: 1. Converts the domain entity to its database representation - 2. Persists the database model using SQLAlchemy's merge operation + 2. Checks for existing records and updates or inserts accordingly 3. Maintains proper multi-tenancy by including tenant context during conversion 4. Updates the in-memory cache for faster subsequent lookups - - The method handles both creating new records and updating existing ones through - SQLAlchemy's merge operation. + 5. Handles duplicate key conflicts by retrying with a new UUID v7 Args: execution: The NodeExecution domain entity to persist @@ -205,19 +222,62 @@ class SQLAlchemyWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository) # Convert domain model to database model using tenant context and other attributes db_model = self.to_db_model(execution) - # Create a new database session - with self._session_factory() as session: - # SQLAlchemy merge intelligently handles both insert and update operations - # based on the presence of the primary key - session.merge(db_model) - session.commit() + # Use tenacity for retry logic with duplicate key handling + @retry( + stop=stop_after_attempt(3), + retry=retry_if_exception(self._is_duplicate_key_error), + before_sleep=before_sleep_log(logger, logging.WARNING), + reraise=True, + ) + def _save_with_retry(): + try: + self._persist_to_database(db_model) + except IntegrityError as e: + if self._is_duplicate_key_error(e): + # Generate new UUID and retry + self._regenerate_id_on_duplicate(execution, db_model) + raise # Let tenacity handle the retry + else: + # Different integrity error, don't retry + logger.exception("Non-duplicate key integrity error while saving workflow node execution") + raise - # Update the in-memory cache for faster subsequent lookups - # Only cache if we have a node_execution_id to use as the cache key + try: + _save_with_retry() + + # Update the in-memory cache after successful save if db_model.node_execution_id: logger.debug("Updating cache for node_execution_id: %s", db_model.node_execution_id) self._node_execution_cache[db_model.node_execution_id] = db_model + except Exception as e: + logger.exception("Failed to save workflow node execution after all retries") + raise + + def _persist_to_database(self, db_model: WorkflowNodeExecutionModel) -> None: + """ + Persist the database model to the database. + + Checks if a record with the same ID exists and either updates it or creates a new one. + + Args: + db_model: The database model to persist + """ + with self._session_factory() as session: + # Check if record already exists + existing = session.get(WorkflowNodeExecutionModel, db_model.id) + + if existing: + # Update existing record by copying all non-private attributes + for key, value in db_model.__dict__.items(): + if not key.startswith("_"): + setattr(existing, key, value) + else: + # Add new record + session.add(db_model) + + session.commit() + def get_db_models_by_workflow_run( self, workflow_run_id: str, diff --git a/api/core/workflow/workflow_cycle_manager.py b/api/core/workflow/workflow_cycle_manager.py index 03f670707e..3c264e782d 100644 --- a/api/core/workflow/workflow_cycle_manager.py +++ b/api/core/workflow/workflow_cycle_manager.py @@ -2,7 +2,6 @@ from collections.abc import Mapping from dataclasses import dataclass from datetime import datetime from typing import Any, Optional, Union -from uuid import uuid4 from core.app.entities.app_invoke_entities import AdvancedChatAppGenerateEntity, WorkflowAppGenerateEntity from core.app.entities.queue_entities import ( @@ -29,6 +28,7 @@ from core.workflow.repositories.workflow_node_execution_repository import Workfl from core.workflow.system_variable import SystemVariable from core.workflow.workflow_entry import WorkflowEntry from libs.datetime_utils import naive_utc_now +from libs.uuid_utils import uuidv7 @dataclass @@ -266,7 +266,7 @@ class WorkflowCycleManager: """Get execution ID from system variables or generate a new one.""" if self._workflow_system_variables and self._workflow_system_variables.workflow_execution_id: return str(self._workflow_system_variables.workflow_execution_id) - return str(uuid4()) + return str(uuidv7()) def _save_and_cache_workflow_execution(self, execution: WorkflowExecution) -> WorkflowExecution: """Save workflow execution to repository and cache it.""" @@ -371,7 +371,7 @@ class WorkflowCycleManager: } domain_execution = WorkflowNodeExecution( - id=str(uuid4()), + id=str(uuidv7()), workflow_id=workflow_execution.workflow_id, workflow_execution_id=workflow_execution.id_, predecessor_node_id=event.predecessor_node_id, diff --git a/api/tests/unit_tests/core/repositories/test_workflow_node_execution_conflict_handling.py b/api/tests/unit_tests/core/repositories/test_workflow_node_execution_conflict_handling.py new file mode 100644 index 0000000000..84484fe223 --- /dev/null +++ b/api/tests/unit_tests/core/repositories/test_workflow_node_execution_conflict_handling.py @@ -0,0 +1,210 @@ +"""Unit tests for workflow node execution conflict handling.""" + +from datetime import datetime +from unittest.mock import MagicMock, Mock + +import psycopg2.errors +import pytest +from sqlalchemy.exc import IntegrityError +from sqlalchemy.orm import sessionmaker + +from core.repositories.sqlalchemy_workflow_node_execution_repository import ( + SQLAlchemyWorkflowNodeExecutionRepository, +) +from core.workflow.entities.workflow_node_execution import ( + WorkflowNodeExecution, + WorkflowNodeExecutionStatus, +) +from core.workflow.nodes.enums import NodeType +from models import Account, WorkflowNodeExecutionTriggeredFrom + + +class TestWorkflowNodeExecutionConflictHandling: + """Test cases for handling duplicate key conflicts in workflow node execution.""" + + def setup_method(self): + """Set up test fixtures.""" + # Create a mock user with tenant_id + self.mock_user = Mock(spec=Account) + self.mock_user.id = "test-user-id" + self.mock_user.current_tenant_id = "test-tenant-id" + + # Create mock session factory + self.mock_session_factory = Mock(spec=sessionmaker) + + # Create repository instance + self.repository = SQLAlchemyWorkflowNodeExecutionRepository( + session_factory=self.mock_session_factory, + user=self.mock_user, + app_id="test-app-id", + triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN, + ) + + def test_save_with_duplicate_key_retries_with_new_uuid(self): + """Test that save retries with a new UUID v7 when encountering duplicate key error.""" + # Create a mock session + mock_session = MagicMock() + mock_session.__enter__ = Mock(return_value=mock_session) + mock_session.__exit__ = Mock(return_value=None) + self.mock_session_factory.return_value = mock_session + + # Mock session.get to return None (no existing record) + mock_session.get.return_value = None + + # Create IntegrityError for duplicate key with proper psycopg2.errors.UniqueViolation + mock_unique_violation = Mock(spec=psycopg2.errors.UniqueViolation) + duplicate_error = IntegrityError( + "duplicate key value violates unique constraint", + params=None, + orig=mock_unique_violation, + ) + + # First call to session.add raises IntegrityError, second succeeds + mock_session.add.side_effect = [duplicate_error, None] + mock_session.commit.side_effect = [None, None] + + # Create test execution + execution = WorkflowNodeExecution( + id="original-id", + workflow_id="test-workflow-id", + workflow_execution_id="test-workflow-execution-id", + node_execution_id="test-node-execution-id", + node_id="test-node-id", + node_type=NodeType.START, + title="Test Node", + index=1, + status=WorkflowNodeExecutionStatus.RUNNING, + created_at=datetime.utcnow(), + ) + + original_id = execution.id + + # Save should succeed after retry + self.repository.save(execution) + + # Verify that session.add was called twice (initial attempt + retry) + assert mock_session.add.call_count == 2 + + # Verify that the ID was changed (new UUID v7 generated) + assert execution.id != original_id + + def test_save_with_existing_record_updates_instead_of_insert(self): + """Test that save updates existing record instead of inserting duplicate.""" + # Create a mock session + mock_session = MagicMock() + mock_session.__enter__ = Mock(return_value=mock_session) + mock_session.__exit__ = Mock(return_value=None) + self.mock_session_factory.return_value = mock_session + + # Mock existing record + mock_existing = MagicMock() + mock_session.get.return_value = mock_existing + mock_session.commit.return_value = None + + # Create test execution + execution = WorkflowNodeExecution( + id="existing-id", + workflow_id="test-workflow-id", + workflow_execution_id="test-workflow-execution-id", + node_execution_id="test-node-execution-id", + node_id="test-node-id", + node_type=NodeType.START, + title="Test Node", + index=1, + status=WorkflowNodeExecutionStatus.SUCCEEDED, + created_at=datetime.utcnow(), + ) + + # Save should update existing record + self.repository.save(execution) + + # Verify that session.add was not called (update path) + mock_session.add.assert_not_called() + + # Verify that session.commit was called + mock_session.commit.assert_called_once() + + def test_save_exceeds_max_retries_raises_error(self): + """Test that save raises error after exceeding max retries.""" + # Create a mock session + mock_session = MagicMock() + mock_session.__enter__ = Mock(return_value=mock_session) + mock_session.__exit__ = Mock(return_value=None) + self.mock_session_factory.return_value = mock_session + + # Mock session.get to return None (no existing record) + mock_session.get.return_value = None + + # Create IntegrityError for duplicate key with proper psycopg2.errors.UniqueViolation + mock_unique_violation = Mock(spec=psycopg2.errors.UniqueViolation) + duplicate_error = IntegrityError( + "duplicate key value violates unique constraint", + params=None, + orig=mock_unique_violation, + ) + + # All attempts fail with duplicate error + mock_session.add.side_effect = duplicate_error + + # Create test execution + execution = WorkflowNodeExecution( + id="test-id", + workflow_id="test-workflow-id", + workflow_execution_id="test-workflow-execution-id", + node_execution_id="test-node-execution-id", + node_id="test-node-id", + node_type=NodeType.START, + title="Test Node", + index=1, + status=WorkflowNodeExecutionStatus.RUNNING, + created_at=datetime.utcnow(), + ) + + # Save should raise IntegrityError after max retries + with pytest.raises(IntegrityError): + self.repository.save(execution) + + # Verify that session.add was called 3 times (max_retries) + assert mock_session.add.call_count == 3 + + def test_save_non_duplicate_integrity_error_raises_immediately(self): + """Test that non-duplicate IntegrityErrors are raised immediately without retry.""" + # Create a mock session + mock_session = MagicMock() + mock_session.__enter__ = Mock(return_value=mock_session) + mock_session.__exit__ = Mock(return_value=None) + self.mock_session_factory.return_value = mock_session + + # Mock session.get to return None (no existing record) + mock_session.get.return_value = None + + # Create IntegrityError for non-duplicate constraint + other_error = IntegrityError( + "null value in column violates not-null constraint", + params=None, + orig=None, + ) + + # First call raises non-duplicate error + mock_session.add.side_effect = other_error + + # Create test execution + execution = WorkflowNodeExecution( + id="test-id", + workflow_id="test-workflow-id", + workflow_execution_id="test-workflow-execution-id", + node_execution_id="test-node-execution-id", + node_id="test-node-id", + node_type=NodeType.START, + title="Test Node", + index=1, + status=WorkflowNodeExecutionStatus.RUNNING, + created_at=datetime.utcnow(), + ) + + # Save should raise error immediately + with pytest.raises(IntegrityError): + self.repository.save(execution) + + # Verify that session.add was called only once (no retry) + assert mock_session.add.call_count == 1 diff --git a/api/tests/unit_tests/repositories/workflow_node_execution/test_sqlalchemy_repository.py b/api/tests/unit_tests/repositories/workflow_node_execution/test_sqlalchemy_repository.py index c60800c493..b81d55cf5e 100644 --- a/api/tests/unit_tests/repositories/workflow_node_execution/test_sqlalchemy_repository.py +++ b/api/tests/unit_tests/repositories/workflow_node_execution/test_sqlalchemy_repository.py @@ -86,6 +86,8 @@ def test_save(repository, session): session_obj, _ = session # Create a mock execution execution = MagicMock(spec=WorkflowNodeExecutionModel) + execution.id = "test-id" + execution.node_execution_id = "test-node-execution-id" execution.tenant_id = None execution.app_id = None execution.inputs = None @@ -95,7 +97,13 @@ def test_save(repository, session): # Mock the to_db_model method to return the execution itself # This simulates the behavior of setting tenant_id and app_id - repository.to_db_model = MagicMock(return_value=execution) + db_model = MagicMock(spec=WorkflowNodeExecutionModel) + db_model.id = "test-id" + db_model.node_execution_id = "test-node-execution-id" + repository.to_db_model = MagicMock(return_value=db_model) + + # Mock session.get to return None (no existing record) + session_obj.get.return_value = None # Call save method repository.save(execution) @@ -103,8 +111,14 @@ def test_save(repository, session): # Assert to_db_model was called with the execution repository.to_db_model.assert_called_once_with(execution) - # Assert session.merge was called (now using merge for both save and update) - session_obj.merge.assert_called_once_with(execution) + # Assert session.get was called to check for existing record + session_obj.get.assert_called_once_with(WorkflowNodeExecutionModel, db_model.id) + + # Assert session.add was called for new record + session_obj.add.assert_called_once_with(db_model) + + # Assert session.commit was called + session_obj.commit.assert_called_once() def test_save_with_existing_tenant_id(repository, session): @@ -112,6 +126,8 @@ def test_save_with_existing_tenant_id(repository, session): session_obj, _ = session # Create a mock execution with existing tenant_id execution = MagicMock(spec=WorkflowNodeExecutionModel) + execution.id = "existing-id" + execution.node_execution_id = "existing-node-execution-id" execution.tenant_id = "existing-tenant" execution.app_id = None execution.inputs = None @@ -121,20 +137,39 @@ def test_save_with_existing_tenant_id(repository, session): # Create a modified execution that will be returned by _to_db_model modified_execution = MagicMock(spec=WorkflowNodeExecutionModel) + modified_execution.id = "existing-id" + modified_execution.node_execution_id = "existing-node-execution-id" modified_execution.tenant_id = "existing-tenant" # Tenant ID should not change modified_execution.app_id = repository._app_id # App ID should be set + # Create a dictionary to simulate __dict__ for updating attributes + modified_execution.__dict__ = { + "id": "existing-id", + "node_execution_id": "existing-node-execution-id", + "tenant_id": "existing-tenant", + "app_id": repository._app_id, + } # Mock the to_db_model method to return the modified execution repository.to_db_model = MagicMock(return_value=modified_execution) + # Mock session.get to return an existing record + existing_model = MagicMock(spec=WorkflowNodeExecutionModel) + session_obj.get.return_value = existing_model + # Call save method repository.save(execution) # Assert to_db_model was called with the execution repository.to_db_model.assert_called_once_with(execution) - # Assert session.merge was called with the modified execution (now using merge for both save and update) - session_obj.merge.assert_called_once_with(modified_execution) + # Assert session.get was called to check for existing record + session_obj.get.assert_called_once_with(WorkflowNodeExecutionModel, modified_execution.id) + + # Assert session.add was NOT called since we're updating existing + session_obj.add.assert_not_called() + + # Assert session.commit was called + session_obj.commit.assert_called_once() def test_get_by_workflow_run(repository, session, mocker: MockerFixture): From 0caa94bd1c653d056e1452bbcf3edef4f3af753c Mon Sep 17 00:00:00 2001 From: lyzno1 <92089059+lyzno1@users.noreply.github.com> Date: Tue, 2 Sep 2025 14:44:59 +0800 Subject: [PATCH 04/21] fix: add Indonesian (id-ID) language support and improve language selector (#24951) --- api/constants/languages.py | 1 + .../components/base/select/locale-signin.tsx | 2 +- web/i18n-config/README.md | 8 +- web/i18n-config/language.ts | 22 +- web/i18n-config/languages.json | 20 +- web/i18n/id-ID/app-annotation.ts | 97 ++ web/i18n/id-ID/app-api.ts | 85 ++ web/i18n/id-ID/app-debug.ts | 533 ++++++++++ web/i18n/id-ID/app-log.ts | 96 ++ web/i18n/id-ID/app-overview.ts | 171 ++++ web/i18n/id-ID/app.ts | 305 ++++++ web/i18n/id-ID/billing.ts | 178 ++++ web/i18n/id-ID/common.ts | 722 +++++++++++++ web/i18n/id-ID/custom.ts | 32 + web/i18n/id-ID/dataset-creation.ts | 209 ++++ web/i18n/id-ID/dataset-documents.ts | 398 ++++++++ web/i18n/id-ID/dataset-hit-testing.ts | 32 + web/i18n/id-ID/dataset-settings.ts | 43 + web/i18n/id-ID/dataset.ts | 215 ++++ web/i18n/id-ID/education.ts | 75 ++ web/i18n/id-ID/explore.ts | 43 + web/i18n/id-ID/layout.ts | 4 + web/i18n/id-ID/login.ts | 115 +++ web/i18n/id-ID/oauth.ts | 27 + web/i18n/id-ID/plugin-tags.ts | 25 + web/i18n/id-ID/plugin.ts | 281 +++++ web/i18n/id-ID/register.ts | 4 + web/i18n/id-ID/run-log.ts | 31 + web/i18n/id-ID/share.ts | 76 ++ web/i18n/id-ID/time.ts | 44 + web/i18n/id-ID/tools.ts | 235 +++++ web/i18n/id-ID/workflow.ts | 966 ++++++++++++++++++ 32 files changed, 5074 insertions(+), 21 deletions(-) create mode 100644 web/i18n/id-ID/app-annotation.ts create mode 100644 web/i18n/id-ID/app-api.ts create mode 100644 web/i18n/id-ID/app-debug.ts create mode 100644 web/i18n/id-ID/app-log.ts create mode 100644 web/i18n/id-ID/app-overview.ts create mode 100644 web/i18n/id-ID/app.ts create mode 100644 web/i18n/id-ID/billing.ts create mode 100644 web/i18n/id-ID/common.ts create mode 100644 web/i18n/id-ID/custom.ts create mode 100644 web/i18n/id-ID/dataset-creation.ts create mode 100644 web/i18n/id-ID/dataset-documents.ts create mode 100644 web/i18n/id-ID/dataset-hit-testing.ts create mode 100644 web/i18n/id-ID/dataset-settings.ts create mode 100644 web/i18n/id-ID/dataset.ts create mode 100644 web/i18n/id-ID/education.ts create mode 100644 web/i18n/id-ID/explore.ts create mode 100644 web/i18n/id-ID/layout.ts create mode 100644 web/i18n/id-ID/login.ts create mode 100644 web/i18n/id-ID/oauth.ts create mode 100644 web/i18n/id-ID/plugin-tags.ts create mode 100644 web/i18n/id-ID/plugin.ts create mode 100644 web/i18n/id-ID/register.ts create mode 100644 web/i18n/id-ID/run-log.ts create mode 100644 web/i18n/id-ID/share.ts create mode 100644 web/i18n/id-ID/time.ts create mode 100644 web/i18n/id-ID/tools.ts create mode 100644 web/i18n/id-ID/workflow.ts diff --git a/api/constants/languages.py b/api/constants/languages.py index ab19392c59..a509ddcf5d 100644 --- a/api/constants/languages.py +++ b/api/constants/languages.py @@ -19,6 +19,7 @@ language_timezone_mapping = { "fa-IR": "Asia/Tehran", "sl-SI": "Europe/Ljubljana", "th-TH": "Asia/Bangkok", + "id-ID": "Asia/Jakarta", } languages = list(language_timezone_mapping.keys()) diff --git a/web/app/components/base/select/locale-signin.tsx b/web/app/components/base/select/locale-signin.tsx index 48dbee1ca3..4ce6025edd 100644 --- a/web/app/components/base/select/locale-signin.tsx +++ b/web/app/components/base/select/locale-signin.tsx @@ -36,7 +36,7 @@ export default function LocaleSigninSelect({ leaveTo="transform opacity-0 scale-95" > -
+
{items.map((item) => { return
handleFormChange(variable, val === 1)} + value={value[variable]} + onChange={val => handleFormChange(variable, val)} > - True - False + True + False
{fieldMoreInfo?.(formSchema)} diff --git a/web/app/components/header/account-setting/model-provider-page/model-parameter-modal/parameter-item.tsx b/web/app/components/header/account-setting/model-provider-page/model-parameter-modal/parameter-item.tsx index 719817152d..f7f1268212 100644 --- a/web/app/components/header/account-setting/model-provider-page/model-parameter-modal/parameter-item.tsx +++ b/web/app/components/header/account-setting/model-provider-page/model-parameter-modal/parameter-item.tsx @@ -91,8 +91,8 @@ const ParameterItem: FC = ({ numberInputRef.current!.value = `${num}` } - const handleRadioChange = (v: number) => { - handleInputChange(v === 1) + const handleRadioChange = (v: boolean) => { + handleInputChange(v) } const handleStringInputChange = (e: React.ChangeEvent) => { @@ -187,11 +187,11 @@ const ParameterItem: FC = ({ return ( - True - False + True + False ) } From c373b734bc4832bdff4101fb0f55c9eee5685935 Mon Sep 17 00:00:00 2001 From: 17hz <0x149527@gmail.com> Date: Tue, 2 Sep 2025 16:04:12 +0800 Subject: [PATCH 08/21] feat: make secretInput type field prevent browser auto-fill (#24971) --- web/app/components/base/form/components/base/base-field.tsx | 1 + 1 file changed, 1 insertion(+) diff --git a/web/app/components/base/form/components/base/base-field.tsx b/web/app/components/base/form/components/base/base-field.tsx index c909eedc87..f25dfb069d 100644 --- a/web/app/components/base/form/components/base/base-field.tsx +++ b/web/app/components/base/form/components/base/base-field.tsx @@ -143,6 +143,7 @@ const BaseField = ({ onBlur={field.handleBlur} disabled={disabled} placeholder={memorizedPlaceholder} + autoComplete={'new-password'} /> ) } From 7b379e2a61f2b3dda573336f03272d8e44b13af7 Mon Sep 17 00:00:00 2001 From: Bowen Liang Date: Tue, 2 Sep 2025 16:05:13 +0800 Subject: [PATCH 09/21] chore: apply ty checks on api code with script and ci action (#24653) --- .github/workflows/api-tests.yml | 6 +- .github/workflows/style.yml | 3 + .../nacos/http_request.py | 2 +- api/controllers/console/apikey.py | 2 +- .../console/auth/data_source_oauth.py | 4 +- api/controllers/console/auth/oauth.py | 2 +- api/controllers/service_api/app/audio.py | 2 +- .../generate_response_converter.py | 2 +- .../workflow/generate_response_converter.py | 2 +- .../app/features/rate_limiting/rate_limit.py | 6 +- .../based_generate_task_pipeline.py | 2 +- .../api_based_extension_requestor.py | 4 +- api/core/helper/module_import_helper.py | 2 +- api/core/llm_generator/llm_generator.py | 78 ++++++------------- .../aliyun_trace/data_exporter/traceclient.py | 2 +- api/core/plugin/impl/base.py | 2 +- .../analyticdb/analyticdb_vector_openapi.py | 12 +-- .../vdb/clickzetta/clickzetta_vector.py | 2 +- .../vdb/couchbase/couchbase_vector.py | 2 +- .../vdb/elasticsearch/elasticsearch_vector.py | 2 +- .../datasource/vdb/milvus/milvus_vector.py | 7 +- .../vdb/vikingdb/vikingdb_vector.py | 6 +- .../vdb/weaviate/weaviate_vector.py | 10 +-- .../multi_dataset_function_call_router.py | 15 ++-- .../router/multi_dataset_react_route.py | 17 ++-- api/core/tools/builtin_tool/provider.py | 2 +- api/core/tools/tool_label_manager.py | 6 +- .../dataset_retriever_base_tool.py | 3 +- .../nodes/answer/answer_stream_processor.py | 6 +- .../workflow/nodes/if_else/if_else_node.py | 2 +- .../nodes/iteration/iteration_node.py | 4 +- api/core/workflow/nodes/loop/loop_node.py | 4 +- api/extensions/ext_otel.py | 2 +- api/extensions/ext_redis.py | 3 +- api/libs/external_api.py | 2 +- api/libs/gmpy2_pkcs10aep_cipher.py | 4 +- api/libs/passport.py | 8 +- api/libs/sendgrid.py | 6 +- api/pyproject.toml | 1 + api/services/dataset_service.py | 10 ++- api/services/external_knowledge_service.py | 2 +- api/services/model_load_balancing_service.py | 4 +- .../tools/mcp_tools_manage_service.py | 2 +- api/ty.toml | 16 ++++ api/uv.lock | 27 +++++++ dev/reformat | 3 + dev/ty-check | 10 +++ web/.husky/pre-commit | 9 +++ 48 files changed, 188 insertions(+), 142 deletions(-) create mode 100644 api/ty.toml create mode 100755 dev/ty-check diff --git a/.github/workflows/api-tests.yml b/.github/workflows/api-tests.yml index 4debc33229..b76e33242a 100644 --- a/.github/workflows/api-tests.yml +++ b/.github/workflows/api-tests.yml @@ -42,11 +42,7 @@ jobs: - name: Run Unit tests run: | uv run --project api bash dev/pytest/pytest_unit_tests.sh - - name: Run ty check - run: | - cd api - uv add --dev ty - uv run ty check || true + - name: Run pyrefly check run: | cd api diff --git a/.github/workflows/style.yml b/.github/workflows/style.yml index 9c79dbc57e..d5de70ffba 100644 --- a/.github/workflows/style.yml +++ b/.github/workflows/style.yml @@ -44,6 +44,9 @@ jobs: if: steps.changed-files.outputs.any_changed == 'true' run: uv sync --project api --dev + - name: Run ty check + run: dev/ty-check + - name: Dotenv check if: steps.changed-files.outputs.any_changed == 'true' run: uv run --project api dotenv-linter ./api/.env.example ./web/.env.example diff --git a/api/configs/remote_settings_sources/nacos/http_request.py b/api/configs/remote_settings_sources/nacos/http_request.py index 9b3359c6ad..910c948b57 100644 --- a/api/configs/remote_settings_sources/nacos/http_request.py +++ b/api/configs/remote_settings_sources/nacos/http_request.py @@ -27,7 +27,7 @@ class NacosHttpClient: response = requests.request(method, url="http://" + self.server + url, headers=headers, params=params) response.raise_for_status() return response.text - except requests.exceptions.RequestException as e: + except requests.RequestException as e: return f"Request to Nacos failed: {e}" def _inject_auth_info(self, headers, params, module="config"): diff --git a/api/controllers/console/apikey.py b/api/controllers/console/apikey.py index 401e88709a..758b574d1a 100644 --- a/api/controllers/console/apikey.py +++ b/api/controllers/console/apikey.py @@ -84,7 +84,7 @@ class BaseApiKeyListResource(Resource): flask_restx.abort( 400, message=f"Cannot create more than {self.max_keys} API keys for this resource type.", - code="max_keys_exceeded", + custom="max_keys_exceeded", ) key = ApiToken.generate_api_key(self.token_prefix, 24) diff --git a/api/controllers/console/auth/data_source_oauth.py b/api/controllers/console/auth/data_source_oauth.py index 35a91a52ea..8f57b3d03e 100644 --- a/api/controllers/console/auth/data_source_oauth.py +++ b/api/controllers/console/auth/data_source_oauth.py @@ -81,7 +81,7 @@ class OAuthDataSourceBinding(Resource): return {"error": "Invalid code"}, 400 try: oauth_provider.get_access_token(code) - except requests.exceptions.HTTPError as e: + except requests.HTTPError as e: logger.exception( "An error occurred during the OAuthCallback process with %s: %s", provider, e.response.text ) @@ -104,7 +104,7 @@ class OAuthDataSourceSync(Resource): return {"error": "Invalid provider"}, 400 try: oauth_provider.sync_data_source(binding_id) - except requests.exceptions.HTTPError as e: + except requests.HTTPError as e: logger.exception( "An error occurred during the OAuthCallback process with %s: %s", provider, e.response.text ) diff --git a/api/controllers/console/auth/oauth.py b/api/controllers/console/auth/oauth.py index 40c62f1f3e..332a98c474 100644 --- a/api/controllers/console/auth/oauth.py +++ b/api/controllers/console/auth/oauth.py @@ -80,7 +80,7 @@ class OAuthCallback(Resource): try: token = oauth_provider.get_access_token(code) user_info = oauth_provider.get_user_info(token) - except requests.exceptions.RequestException as e: + except requests.RequestException as e: error_text = e.response.text if e.response else str(e) logger.exception("An error occurred during the OAuth process with %s: %s", provider, error_text) return {"error": "OAuth process failed"}, 400 diff --git a/api/controllers/service_api/app/audio.py b/api/controllers/service_api/app/audio.py index 8148fa8ccc..33035123d7 100644 --- a/api/controllers/service_api/app/audio.py +++ b/api/controllers/service_api/app/audio.py @@ -55,7 +55,7 @@ class AudioApi(Resource): file = request.files["file"] try: - response = AudioService.transcript_asr(app_model=app_model, file=file, end_user=end_user) + response = AudioService.transcript_asr(app_model=app_model, file=file, end_user=end_user.id) return response except services.errors.app_model_config.AppModelConfigBrokenError: diff --git a/api/core/app/apps/advanced_chat/generate_response_converter.py b/api/core/app/apps/advanced_chat/generate_response_converter.py index b2bff43208..627f6b47ce 100644 --- a/api/core/app/apps/advanced_chat/generate_response_converter.py +++ b/api/core/app/apps/advanced_chat/generate_response_converter.py @@ -118,7 +118,7 @@ class AdvancedChatAppGenerateResponseConverter(AppGenerateResponseConverter): data = cls._error_to_stream_response(sub_stream_response.err) response_chunk.update(data) elif isinstance(sub_stream_response, NodeStartStreamResponse | NodeFinishStreamResponse): - response_chunk.update(sub_stream_response.to_ignore_detail_dict()) + response_chunk.update(sub_stream_response.to_ignore_detail_dict()) # ty: ignore [unresolved-attribute] else: response_chunk.update(sub_stream_response.to_dict()) diff --git a/api/core/app/apps/workflow/generate_response_converter.py b/api/core/app/apps/workflow/generate_response_converter.py index 10ec73a7d2..917ede6173 100644 --- a/api/core/app/apps/workflow/generate_response_converter.py +++ b/api/core/app/apps/workflow/generate_response_converter.py @@ -89,7 +89,7 @@ class WorkflowAppGenerateResponseConverter(AppGenerateResponseConverter): data = cls._error_to_stream_response(sub_stream_response.err) response_chunk.update(data) elif isinstance(sub_stream_response, NodeStartStreamResponse | NodeFinishStreamResponse): - response_chunk.update(sub_stream_response.to_ignore_detail_dict()) + response_chunk.update(sub_stream_response.to_ignore_detail_dict()) # ty: ignore [unresolved-attribute] else: response_chunk.update(sub_stream_response.to_dict()) yield response_chunk diff --git a/api/core/app/features/rate_limiting/rate_limit.py b/api/core/app/features/rate_limiting/rate_limit.py index 632f35d106..f526d2a16a 100644 --- a/api/core/app/features/rate_limiting/rate_limit.py +++ b/api/core/app/features/rate_limiting/rate_limit.py @@ -96,7 +96,11 @@ class RateLimit: if isinstance(generator, Mapping): return generator else: - return RateLimitGenerator(rate_limit=self, generator=generator, request_id=request_id) + return RateLimitGenerator( + rate_limit=self, + generator=generator, # ty: ignore [invalid-argument-type] + request_id=request_id, + ) class RateLimitGenerator: diff --git a/api/core/app/task_pipeline/based_generate_task_pipeline.py b/api/core/app/task_pipeline/based_generate_task_pipeline.py index 8c0a442158..d04855e992 100644 --- a/api/core/app/task_pipeline/based_generate_task_pipeline.py +++ b/api/core/app/task_pipeline/based_generate_task_pipeline.py @@ -50,7 +50,7 @@ class BasedGenerateTaskPipeline: if isinstance(e, InvokeAuthorizationError): err = InvokeAuthorizationError("Incorrect API key provided") elif isinstance(e, InvokeError | ValueError): - err = e + err = e # ty: ignore [invalid-assignment] else: description = getattr(e, "description", None) err = Exception(description if description is not None else str(e)) diff --git a/api/core/extension/api_based_extension_requestor.py b/api/core/extension/api_based_extension_requestor.py index accccd8c40..4423299f70 100644 --- a/api/core/extension/api_based_extension_requestor.py +++ b/api/core/extension/api_based_extension_requestor.py @@ -43,9 +43,9 @@ class APIBasedExtensionRequestor: timeout=self.timeout, proxies=proxies, ) - except requests.exceptions.Timeout: + except requests.Timeout: raise ValueError("request timeout") - except requests.exceptions.ConnectionError: + except requests.ConnectionError: raise ValueError("request connection error") if response.status_code != 200: diff --git a/api/core/helper/module_import_helper.py b/api/core/helper/module_import_helper.py index 159c5d23fa..6a2f27b8ba 100644 --- a/api/core/helper/module_import_helper.py +++ b/api/core/helper/module_import_helper.py @@ -47,7 +47,7 @@ def get_subclasses_from_module(mod: ModuleType, parent_type: type) -> list[type] def load_single_subclass_from_source( - *, module_name: str, script_path: AnyStr, parent_type: type, use_lazy_loader: bool = False + *, module_name: str, script_path: str, parent_type: type, use_lazy_loader: bool = False ) -> type: """ Load a single subclass from the source diff --git a/api/core/llm_generator/llm_generator.py b/api/core/llm_generator/llm_generator.py index c5c10f096d..8324eb8277 100644 --- a/api/core/llm_generator/llm_generator.py +++ b/api/core/llm_generator/llm_generator.py @@ -56,11 +56,8 @@ class LLMGenerator: prompts = [UserPromptMessage(content=prompt)] with measure_time() as timer: - response = cast( - LLMResult, - model_instance.invoke_llm( - prompt_messages=list(prompts), model_parameters={"max_tokens": 500, "temperature": 1}, stream=False - ), + response: LLMResult = model_instance.invoke_llm( + prompt_messages=list(prompts), model_parameters={"max_tokens": 500, "temperature": 1}, stream=False ) answer = cast(str, response.message.content) cleaned_answer = re.sub(r"^.*(\{.*\}).*$", r"\1", answer, flags=re.DOTALL) @@ -113,13 +110,10 @@ class LLMGenerator: prompt_messages = [UserPromptMessage(content=prompt)] try: - response = cast( - LLMResult, - model_instance.invoke_llm( - prompt_messages=list(prompt_messages), - model_parameters={"max_tokens": 256, "temperature": 0}, - stream=False, - ), + response: LLMResult = model_instance.invoke_llm( + prompt_messages=list(prompt_messages), + model_parameters={"max_tokens": 256, "temperature": 0}, + stream=False, ) text_content = response.message.get_text_content() @@ -162,11 +156,8 @@ class LLMGenerator: ) try: - response = cast( - LLMResult, - model_instance.invoke_llm( - prompt_messages=list(prompt_messages), model_parameters=model_parameters, stream=False - ), + response: LLMResult = model_instance.invoke_llm( + prompt_messages=list(prompt_messages), model_parameters=model_parameters, stream=False ) rule_config["prompt"] = cast(str, response.message.content) @@ -212,11 +203,8 @@ class LLMGenerator: try: try: # the first step to generate the task prompt - prompt_content = cast( - LLMResult, - model_instance.invoke_llm( - prompt_messages=list(prompt_messages), model_parameters=model_parameters, stream=False - ), + prompt_content: LLMResult = model_instance.invoke_llm( + prompt_messages=list(prompt_messages), model_parameters=model_parameters, stream=False ) except InvokeError as e: error = str(e) @@ -248,11 +236,8 @@ class LLMGenerator: statement_messages = [UserPromptMessage(content=statement_generate_prompt)] try: - parameter_content = cast( - LLMResult, - model_instance.invoke_llm( - prompt_messages=list(parameter_messages), model_parameters=model_parameters, stream=False - ), + parameter_content: LLMResult = model_instance.invoke_llm( + prompt_messages=list(parameter_messages), model_parameters=model_parameters, stream=False ) rule_config["variables"] = re.findall(r'"\s*([^"]+)\s*"', cast(str, parameter_content.message.content)) except InvokeError as e: @@ -260,11 +245,8 @@ class LLMGenerator: error_step = "generate variables" try: - statement_content = cast( - LLMResult, - model_instance.invoke_llm( - prompt_messages=list(statement_messages), model_parameters=model_parameters, stream=False - ), + statement_content: LLMResult = model_instance.invoke_llm( + prompt_messages=list(statement_messages), model_parameters=model_parameters, stream=False ) rule_config["opening_statement"] = cast(str, statement_content.message.content) except InvokeError as e: @@ -307,11 +289,8 @@ class LLMGenerator: prompt_messages = [UserPromptMessage(content=prompt)] model_parameters = model_config.get("completion_params", {}) try: - response = cast( - LLMResult, - model_instance.invoke_llm( - prompt_messages=list(prompt_messages), model_parameters=model_parameters, stream=False - ), + response: LLMResult = model_instance.invoke_llm( + prompt_messages=list(prompt_messages), model_parameters=model_parameters, stream=False ) generated_code = cast(str, response.message.content) @@ -338,13 +317,10 @@ class LLMGenerator: prompt_messages = [SystemPromptMessage(content=prompt), UserPromptMessage(content=query)] - response = cast( - LLMResult, - model_instance.invoke_llm( - prompt_messages=prompt_messages, - model_parameters={"temperature": 0.01, "max_tokens": 2000}, - stream=False, - ), + response: LLMResult = model_instance.invoke_llm( + prompt_messages=prompt_messages, + model_parameters={"temperature": 0.01, "max_tokens": 2000}, + stream=False, ) answer = cast(str, response.message.content) @@ -367,11 +343,8 @@ class LLMGenerator: model_parameters = model_config.get("model_parameters", {}) try: - response = cast( - LLMResult, - model_instance.invoke_llm( - prompt_messages=list(prompt_messages), model_parameters=model_parameters, stream=False - ), + response: LLMResult = model_instance.invoke_llm( + prompt_messages=list(prompt_messages), model_parameters=model_parameters, stream=False ) raw_content = response.message.content @@ -555,11 +528,8 @@ class LLMGenerator: model_parameters = {"temperature": 0.4} try: - response = cast( - LLMResult, - model_instance.invoke_llm( - prompt_messages=list(prompt_messages), model_parameters=model_parameters, stream=False - ), + response: LLMResult = model_instance.invoke_llm( + prompt_messages=list(prompt_messages), model_parameters=model_parameters, stream=False ) generated_raw = cast(str, response.message.content) diff --git a/api/core/ops/aliyun_trace/data_exporter/traceclient.py b/api/core/ops/aliyun_trace/data_exporter/traceclient.py index 3eb7c30d55..881ec2141c 100644 --- a/api/core/ops/aliyun_trace/data_exporter/traceclient.py +++ b/api/core/ops/aliyun_trace/data_exporter/traceclient.py @@ -72,7 +72,7 @@ class TraceClient: else: logger.debug("AliyunTrace API check failed: Unexpected status code: %s", response.status_code) return False - except requests.exceptions.RequestException as e: + except requests.RequestException as e: logger.debug("AliyunTrace API check failed: %s", str(e)) raise ValueError(f"AliyunTrace API check failed: {str(e)}") diff --git a/api/core/plugin/impl/base.py b/api/core/plugin/impl/base.py index 6c65bdb0fd..8e3df4da2c 100644 --- a/api/core/plugin/impl/base.py +++ b/api/core/plugin/impl/base.py @@ -64,7 +64,7 @@ class BasePluginClient: response = requests.request( method=method, url=str(url), headers=headers, data=data, params=params, stream=stream, files=files ) - except requests.exceptions.ConnectionError: + except requests.ConnectionError: logger.exception("Request to Plugin Daemon Service failed") raise PluginDaemonInnerError(code=-500, message="Request to Plugin Daemon Service failed") diff --git a/api/core/rag/datasource/vdb/analyticdb/analyticdb_vector_openapi.py b/api/core/rag/datasource/vdb/analyticdb/analyticdb_vector_openapi.py index aa0204ba70..48e3f20e38 100644 --- a/api/core/rag/datasource/vdb/analyticdb/analyticdb_vector_openapi.py +++ b/api/core/rag/datasource/vdb/analyticdb/analyticdb_vector_openapi.py @@ -192,8 +192,8 @@ class AnalyticdbVectorOpenAPI: collection=self._collection_name, metrics=self.config.metrics, include_values=True, - vector=None, - content=None, + vector=None, # ty: ignore [invalid-argument-type] + content=None, # ty: ignore [invalid-argument-type] top_k=1, filter=f"ref_doc_id='{id}'", ) @@ -211,7 +211,7 @@ class AnalyticdbVectorOpenAPI: namespace=self.config.namespace, namespace_password=self.config.namespace_password, collection=self._collection_name, - collection_data=None, + collection_data=None, # ty: ignore [invalid-argument-type] collection_data_filter=f"ref_doc_id IN {ids_str}", ) self._client.delete_collection_data(request) @@ -225,7 +225,7 @@ class AnalyticdbVectorOpenAPI: namespace=self.config.namespace, namespace_password=self.config.namespace_password, collection=self._collection_name, - collection_data=None, + collection_data=None, # ty: ignore [invalid-argument-type] collection_data_filter=f"metadata_ ->> '{key}' = '{value}'", ) self._client.delete_collection_data(request) @@ -249,7 +249,7 @@ class AnalyticdbVectorOpenAPI: include_values=kwargs.pop("include_values", True), metrics=self.config.metrics, vector=query_vector, - content=None, + content=None, # ty: ignore [invalid-argument-type] top_k=kwargs.get("top_k", 4), filter=where_clause, ) @@ -285,7 +285,7 @@ class AnalyticdbVectorOpenAPI: collection=self._collection_name, include_values=kwargs.pop("include_values", True), metrics=self.config.metrics, - vector=None, + vector=None, # ty: ignore [invalid-argument-type] content=query, top_k=kwargs.get("top_k", 4), filter=where_clause, diff --git a/api/core/rag/datasource/vdb/clickzetta/clickzetta_vector.py b/api/core/rag/datasource/vdb/clickzetta/clickzetta_vector.py index 6e8077ffd9..b2102ed2e0 100644 --- a/api/core/rag/datasource/vdb/clickzetta/clickzetta_vector.py +++ b/api/core/rag/datasource/vdb/clickzetta/clickzetta_vector.py @@ -12,7 +12,7 @@ import clickzetta # type: ignore from pydantic import BaseModel, model_validator if TYPE_CHECKING: - from clickzetta import Connection + from clickzetta.connector.v0.connection import Connection # type: ignore from configs import dify_config from core.rag.datasource.vdb.field import Field diff --git a/api/core/rag/datasource/vdb/couchbase/couchbase_vector.py b/api/core/rag/datasource/vdb/couchbase/couchbase_vector.py index d22a7e4fd4..72b42b6e3c 100644 --- a/api/core/rag/datasource/vdb/couchbase/couchbase_vector.py +++ b/api/core/rag/datasource/vdb/couchbase/couchbase_vector.py @@ -306,7 +306,7 @@ class CouchbaseVector(BaseVector): def search_by_full_text(self, query: str, **kwargs: Any) -> list[Document]: top_k = kwargs.get("top_k", 4) try: - CBrequest = search.SearchRequest.create(search.QueryStringQuery("text:" + query)) + CBrequest = search.SearchRequest.create(search.QueryStringQuery("text:" + query)) # ty: ignore [too-many-positional-arguments] search_iter = self._scope.search( self._collection_name + "_search", CBrequest, SearchOptions(limit=top_k, fields=["*"]) ) diff --git a/api/core/rag/datasource/vdb/elasticsearch/elasticsearch_vector.py b/api/core/rag/datasource/vdb/elasticsearch/elasticsearch_vector.py index cbad0e67de..4e288ccc08 100644 --- a/api/core/rag/datasource/vdb/elasticsearch/elasticsearch_vector.py +++ b/api/core/rag/datasource/vdb/elasticsearch/elasticsearch_vector.py @@ -138,7 +138,7 @@ class ElasticSearchVector(BaseVector): if not client.ping(): raise ConnectionError("Failed to connect to Elasticsearch") - except requests.exceptions.ConnectionError as e: + except requests.ConnectionError as e: raise ConnectionError(f"Vector database connection error: {str(e)}") except Exception as e: raise ConnectionError(f"Elasticsearch client initialization failed: {str(e)}") diff --git a/api/core/rag/datasource/vdb/milvus/milvus_vector.py b/api/core/rag/datasource/vdb/milvus/milvus_vector.py index 8ae616fa77..4ad0fada15 100644 --- a/api/core/rag/datasource/vdb/milvus/milvus_vector.py +++ b/api/core/rag/datasource/vdb/milvus/milvus_vector.py @@ -376,7 +376,12 @@ class MilvusVector(BaseVector): if config.token: client = MilvusClient(uri=config.uri, token=config.token, db_name=config.database) else: - client = MilvusClient(uri=config.uri, user=config.user, password=config.password, db_name=config.database) + client = MilvusClient( + uri=config.uri, + user=config.user or "", + password=config.password or "", + db_name=config.database, + ) return client diff --git a/api/core/rag/datasource/vdb/vikingdb/vikingdb_vector.py b/api/core/rag/datasource/vdb/vikingdb/vikingdb_vector.py index a0a2e47d19..33267741c2 100644 --- a/api/core/rag/datasource/vdb/vikingdb/vikingdb_vector.py +++ b/api/core/rag/datasource/vdb/vikingdb/vikingdb_vector.py @@ -32,9 +32,9 @@ class VikingDBConfig(BaseModel): scheme: str connection_timeout: int socket_timeout: int - index_type: str = IndexType.HNSW - distance: str = DistanceType.L2 - quant: str = QuantType.Float + index_type: str = str(IndexType.HNSW) + distance: str = str(DistanceType.L2) + quant: str = str(QuantType.Float) class VikingDBVector(BaseVector): diff --git a/api/core/rag/datasource/vdb/weaviate/weaviate_vector.py b/api/core/rag/datasource/vdb/weaviate/weaviate_vector.py index a7e0789a92..b3fe013e70 100644 --- a/api/core/rag/datasource/vdb/weaviate/weaviate_vector.py +++ b/api/core/rag/datasource/vdb/weaviate/weaviate_vector.py @@ -37,22 +37,22 @@ class WeaviateVector(BaseVector): self._attributes = attributes def _init_client(self, config: WeaviateConfig) -> weaviate.Client: - auth_config = weaviate.auth.AuthApiKey(api_key=config.api_key) + auth_config = weaviate.AuthApiKey(api_key=config.api_key or "") - weaviate.connect.connection.has_grpc = False + weaviate.connect.connection.has_grpc = False # ty: ignore [unresolved-attribute] # Fix to minimize the performance impact of the deprecation check in weaviate-client 3.24.0, # by changing the connection timeout to pypi.org from 1 second to 0.001 seconds. # TODO: This can be removed once weaviate-client is updated to 3.26.7 or higher, # which does not contain the deprecation check. - if hasattr(weaviate.connect.connection, "PYPI_TIMEOUT"): - weaviate.connect.connection.PYPI_TIMEOUT = 0.001 + if hasattr(weaviate.connect.connection, "PYPI_TIMEOUT"): # ty: ignore [unresolved-attribute] + weaviate.connect.connection.PYPI_TIMEOUT = 0.001 # ty: ignore [unresolved-attribute] try: client = weaviate.Client( url=config.endpoint, auth_client_secret=auth_config, timeout_config=(5, 60), startup_period=None ) - except requests.exceptions.ConnectionError: + except requests.ConnectionError: raise ConnectionError("Vector database connection error") client.batch.configure( diff --git a/api/core/rag/retrieval/router/multi_dataset_function_call_router.py b/api/core/rag/retrieval/router/multi_dataset_function_call_router.py index b008d0df9c..e6dab1d090 100644 --- a/api/core/rag/retrieval/router/multi_dataset_function_call_router.py +++ b/api/core/rag/retrieval/router/multi_dataset_function_call_router.py @@ -1,4 +1,4 @@ -from typing import Union, cast +from typing import Union from core.app.entities.app_invoke_entities import ModelConfigWithCredentialsEntity from core.model_manager import ModelInstance @@ -28,14 +28,11 @@ class FunctionCallMultiDatasetRouter: SystemPromptMessage(content="You are a helpful AI assistant."), UserPromptMessage(content=query), ] - result = cast( - LLMResult, - model_instance.invoke_llm( - prompt_messages=prompt_messages, - tools=dataset_tools, - stream=False, - model_parameters={"temperature": 0.2, "top_p": 0.3, "max_tokens": 1500}, - ), + result: LLMResult = model_instance.invoke_llm( + prompt_messages=prompt_messages, + tools=dataset_tools, + stream=False, + model_parameters={"temperature": 0.2, "top_p": 0.3, "max_tokens": 1500}, ) if result.message.tool_calls: # get retrieval model config diff --git a/api/core/rag/retrieval/router/multi_dataset_react_route.py b/api/core/rag/retrieval/router/multi_dataset_react_route.py index 33a283771d..8ffde26f8e 100644 --- a/api/core/rag/retrieval/router/multi_dataset_react_route.py +++ b/api/core/rag/retrieval/router/multi_dataset_react_route.py @@ -1,5 +1,5 @@ from collections.abc import Generator, Sequence -from typing import Union, cast +from typing import Union from core.app.entities.app_invoke_entities import ModelConfigWithCredentialsEntity from core.model_manager import ModelInstance @@ -150,15 +150,12 @@ class ReactMultiDatasetRouter: :param stop: stop :return: """ - invoke_result = cast( - Generator[LLMResult, None, None], - model_instance.invoke_llm( - prompt_messages=prompt_messages, - model_parameters=completion_param, - stop=stop, - stream=True, - user=user_id, - ), + invoke_result: Generator[LLMResult, None, None] = model_instance.invoke_llm( + prompt_messages=prompt_messages, + model_parameters=completion_param, + stop=stop, + stream=True, + user=user_id, ) # handle invoke result diff --git a/api/core/tools/builtin_tool/provider.py b/api/core/tools/builtin_tool/provider.py index a70ded9efd..375a32f39d 100644 --- a/api/core/tools/builtin_tool/provider.py +++ b/api/core/tools/builtin_tool/provider.py @@ -74,7 +74,7 @@ class BuiltinToolProviderController(ToolProviderController): tool = load_yaml_file(path.join(tool_path, tool_file), ignore_error=False) # get tool class, import the module - assistant_tool_class: type[BuiltinTool] = load_single_subclass_from_source( + assistant_tool_class: type = load_single_subclass_from_source( module_name=f"core.tools.builtin_tool.providers.{provider}.tools.{tool_name}", script_path=path.join( path.dirname(path.realpath(__file__)), diff --git a/api/core/tools/tool_label_manager.py b/api/core/tools/tool_label_manager.py index 90c09a4441..84b874975a 100644 --- a/api/core/tools/tool_label_manager.py +++ b/api/core/tools/tool_label_manager.py @@ -26,7 +26,7 @@ class ToolLabelManager: labels = cls.filter_tool_labels(labels) if isinstance(controller, ApiToolProviderController | WorkflowToolProviderController): - provider_id = controller.provider_id + provider_id = controller.provider_id # ty: ignore [unresolved-attribute] else: raise ValueError("Unsupported tool type") @@ -51,7 +51,7 @@ class ToolLabelManager: Get tool labels """ if isinstance(controller, ApiToolProviderController | WorkflowToolProviderController): - provider_id = controller.provider_id + provider_id = controller.provider_id # ty: ignore [unresolved-attribute] elif isinstance(controller, BuiltinToolProviderController): return controller.tool_labels else: @@ -85,7 +85,7 @@ class ToolLabelManager: provider_ids = [] for controller in tool_providers: assert isinstance(controller, ApiToolProviderController | WorkflowToolProviderController) - provider_ids.append(controller.provider_id) + provider_ids.append(controller.provider_id) # ty: ignore [unresolved-attribute] labels: list[ToolLabelBinding] = ( db.session.query(ToolLabelBinding).where(ToolLabelBinding.tool_id.in_(provider_ids)).all() diff --git a/api/core/tools/utils/dataset_retriever/dataset_retriever_base_tool.py b/api/core/tools/utils/dataset_retriever/dataset_retriever_base_tool.py index 4f489e00f4..2e572099b3 100644 --- a/api/core/tools/utils/dataset_retriever/dataset_retriever_base_tool.py +++ b/api/core/tools/utils/dataset_retriever/dataset_retriever_base_tool.py @@ -1,7 +1,6 @@ -from abc import abstractmethod +from abc import ABC, abstractmethod from typing import Optional -from msal_extensions.persistence import ABC # type: ignore from pydantic import BaseModel, ConfigDict from core.callback_handler.index_tool_callback_handler import DatasetIndexToolCallbackHandler diff --git a/api/core/workflow/nodes/answer/answer_stream_processor.py b/api/core/workflow/nodes/answer/answer_stream_processor.py index 4a75c9edd4..77b0a78c8a 100644 --- a/api/core/workflow/nodes/answer/answer_stream_processor.py +++ b/api/core/workflow/nodes/answer/answer_stream_processor.py @@ -52,12 +52,12 @@ class AnswerStreamProcessor(StreamProcessor): yield event elif isinstance(event, NodeRunSucceededEvent | NodeRunExceptionEvent): yield event - if event.route_node_state.node_id in self.current_stream_chunk_generating_node_ids: + if event.route_node_state.node_id in self.current_stream_chunk_generating_node_ids: # ty: ignore [unresolved-attribute] # update self.route_position after all stream event finished - for answer_node_id in self.current_stream_chunk_generating_node_ids[event.route_node_state.node_id]: + for answer_node_id in self.current_stream_chunk_generating_node_ids[event.route_node_state.node_id]: # ty: ignore [unresolved-attribute] self.route_position[answer_node_id] += 1 - del self.current_stream_chunk_generating_node_ids[event.route_node_state.node_id] + del self.current_stream_chunk_generating_node_ids[event.route_node_state.node_id] # ty: ignore [unresolved-attribute] self._remove_unreachable_nodes(event) diff --git a/api/core/workflow/nodes/if_else/if_else_node.py b/api/core/workflow/nodes/if_else/if_else_node.py index 2c83ea3d4f..c2bed870b0 100644 --- a/api/core/workflow/nodes/if_else/if_else_node.py +++ b/api/core/workflow/nodes/if_else/if_else_node.py @@ -83,7 +83,7 @@ class IfElseNode(BaseNode): else: # TODO: Update database then remove this # Fallback to old structure if cases are not defined - input_conditions, group_result, final_result = _should_not_use_old_function( + input_conditions, group_result, final_result = _should_not_use_old_function( # ty: ignore [deprecated] condition_processor=condition_processor, variable_pool=self.graph_runtime_state.variable_pool, conditions=self._node_data.conditions or [], diff --git a/api/core/workflow/nodes/iteration/iteration_node.py b/api/core/workflow/nodes/iteration/iteration_node.py index 7f591a3ea9..9deac1748a 100644 --- a/api/core/workflow/nodes/iteration/iteration_node.py +++ b/api/core/workflow/nodes/iteration/iteration_node.py @@ -441,8 +441,8 @@ class IterationNode(BaseNode): iteration_run_id = parallel_mode_run_id if parallel_mode_run_id is not None else f"{current_index}" next_index = int(current_index) + 1 for event in rst: - if isinstance(event, (BaseNodeEvent | BaseParallelBranchEvent)) and not event.in_iteration_id: - event.in_iteration_id = self.node_id + if isinstance(event, (BaseNodeEvent | BaseParallelBranchEvent)) and not event.in_iteration_id: # ty: ignore [unresolved-attribute] + event.in_iteration_id = self.node_id # ty: ignore [unresolved-attribute] if ( isinstance(event, BaseNodeEvent) diff --git a/api/core/workflow/nodes/loop/loop_node.py b/api/core/workflow/nodes/loop/loop_node.py index 64296dc046..adde0837e0 100644 --- a/api/core/workflow/nodes/loop/loop_node.py +++ b/api/core/workflow/nodes/loop/loop_node.py @@ -299,8 +299,8 @@ class LoopNode(BaseNode): check_break_result = False for event in rst: - if isinstance(event, (BaseNodeEvent | BaseParallelBranchEvent)) and not event.in_loop_id: - event.in_loop_id = self.node_id + if isinstance(event, (BaseNodeEvent | BaseParallelBranchEvent)) and not event.in_loop_id: # ty: ignore [unresolved-attribute] + event.in_loop_id = self.node_id # ty: ignore [unresolved-attribute] if ( isinstance(event, BaseNodeEvent) diff --git a/api/extensions/ext_otel.py b/api/extensions/ext_otel.py index 12bb185535..b0059693e2 100644 --- a/api/extensions/ext_otel.py +++ b/api/extensions/ext_otel.py @@ -103,7 +103,7 @@ def init_app(app: DifyApp): def shutdown_tracer(): provider = trace.get_tracer_provider() if hasattr(provider, "force_flush"): - provider.force_flush() + provider.force_flush() # ty: ignore [call-non-callable] class ExceptionLoggingHandler(logging.Handler): """Custom logging handler that creates spans for logging.exception() calls""" diff --git a/api/extensions/ext_redis.py b/api/extensions/ext_redis.py index 1b22886fc1..61b26b5b95 100644 --- a/api/extensions/ext_redis.py +++ b/api/extensions/ext_redis.py @@ -260,7 +260,8 @@ def redis_fallback(default_return: Optional[Any] = None): try: return func(*args, **kwargs) except RedisError as e: - logger.warning("Redis operation failed in %s: %s", func.__name__, str(e), exc_info=True) + func_name = getattr(func, "__name__", "Unknown") + logger.warning("Redis operation failed in %s: %s", func_name, str(e), exc_info=True) return default_return return wrapper diff --git a/api/libs/external_api.py b/api/libs/external_api.py index a630a97fd6..d5409c4b4c 100644 --- a/api/libs/external_api.py +++ b/api/libs/external_api.py @@ -101,7 +101,7 @@ def register_external_error_handlers(api: Api) -> None: exc_info: Any = sys.exc_info() if exc_info[1] is None: exc_info = None - current_app.log_exception(exc_info) + current_app.log_exception(exc_info) # ty: ignore [invalid-argument-type] return data, status_code diff --git a/api/libs/gmpy2_pkcs10aep_cipher.py b/api/libs/gmpy2_pkcs10aep_cipher.py index 2dae87e171..9759156c0f 100644 --- a/api/libs/gmpy2_pkcs10aep_cipher.py +++ b/api/libs/gmpy2_pkcs10aep_cipher.py @@ -136,7 +136,7 @@ class PKCS1OAepCipher: # Step 3a (OS2IP) em_int = bytes_to_long(em) # Step 3b (RSAEP) - m_int = gmpy2.powmod(em_int, self._key.e, self._key.n) + m_int = gmpy2.powmod(em_int, self._key.e, self._key.n) # ty: ignore [unresolved-attribute] # Step 3c (I2OSP) c = long_to_bytes(m_int, k) return c @@ -169,7 +169,7 @@ class PKCS1OAepCipher: ct_int = bytes_to_long(ciphertext) # Step 2b (RSADP) # m_int = self._key._decrypt(ct_int) - m_int = gmpy2.powmod(ct_int, self._key.d, self._key.n) + m_int = gmpy2.powmod(ct_int, self._key.d, self._key.n) # ty: ignore [unresolved-attribute] # Complete step 2c (I2OSP) em = long_to_bytes(m_int, k) # Step 3a diff --git a/api/libs/passport.py b/api/libs/passport.py index fe8fc33b5f..22dd20b73b 100644 --- a/api/libs/passport.py +++ b/api/libs/passport.py @@ -14,11 +14,11 @@ class PassportService: def verify(self, token): try: return jwt.decode(token, self.sk, algorithms=["HS256"]) - except jwt.exceptions.ExpiredSignatureError: + except jwt.ExpiredSignatureError: raise Unauthorized("Token has expired.") - except jwt.exceptions.InvalidSignatureError: + except jwt.InvalidSignatureError: raise Unauthorized("Invalid token signature.") - except jwt.exceptions.DecodeError: + except jwt.DecodeError: raise Unauthorized("Invalid token.") - except jwt.exceptions.PyJWTError: # Catch-all for other JWT errors + except jwt.PyJWTError: # Catch-all for other JWT errors raise Unauthorized("Invalid token.") diff --git a/api/libs/sendgrid.py b/api/libs/sendgrid.py index 5f7d31d47d..9a4861f1e7 100644 --- a/api/libs/sendgrid.py +++ b/api/libs/sendgrid.py @@ -26,9 +26,9 @@ class SendGridClient: to_email = To(_to) subject = mail["subject"] content = Content("text/html", mail["html"]) - mail = Mail(from_email, to_email, subject, content) - mail_json = mail.get() # type: ignore - response = sg.client.mail.send.post(request_body=mail_json) + sg_mail = Mail(from_email, to_email, subject, content) + mail_json = sg_mail.get() + response = sg.client.mail.send.post(request_body=mail_json) # ty: ignore [call-non-callable] logger.debug(response.status_code) logger.debug(response.body) logger.debug(response.headers) diff --git a/api/pyproject.toml b/api/pyproject.toml index 3078202498..8304d8e46a 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -110,6 +110,7 @@ dev = [ "dotenv-linter~=0.5.0", "faker~=32.1.0", "lxml-stubs~=0.5.1", + "ty~=0.0.1a19", "mypy~=1.17.1", "ruff~=0.12.3", "pytest~=8.3.2", diff --git a/api/services/dataset_service.py b/api/services/dataset_service.py index d3a98bf0a8..2e057b81c2 100644 --- a/api/services/dataset_service.py +++ b/api/services/dataset_service.py @@ -133,7 +133,11 @@ class DatasetService: # Check if tag_ids is not empty to avoid WHERE false condition if tag_ids and len(tag_ids) > 0: - target_ids = TagService.get_target_ids_by_tag_ids("knowledge", tenant_id, tag_ids) + target_ids = TagService.get_target_ids_by_tag_ids( + "knowledge", + tenant_id, # ty: ignore [invalid-argument-type] + tag_ids, + ) if target_ids and len(target_ids) > 0: query = query.where(Dataset.id.in_(target_ids)) else: @@ -2361,7 +2365,9 @@ class SegmentService: index_node_ids = [seg.index_node_id for seg in segments] total_words = sum(seg.word_count for seg in segments) - document.word_count -= total_words + document.word_count = ( + document.word_count - total_words if document.word_count and document.word_count > total_words else 0 + ) db.session.add(document) delete_segment_from_index_task.delay(index_node_ids, dataset.id, document.id) diff --git a/api/services/external_knowledge_service.py b/api/services/external_knowledge_service.py index fcf57070ee..4488519f4a 100644 --- a/api/services/external_knowledge_service.py +++ b/api/services/external_knowledge_service.py @@ -229,7 +229,7 @@ class ExternalDatasetService: @staticmethod def get_external_knowledge_api_settings(settings: dict) -> ExternalKnowledgeApiSetting: - return ExternalKnowledgeApiSetting.parse_obj(settings) + return ExternalKnowledgeApiSetting.model_validate(settings) @staticmethod def create_external_dataset(tenant_id: str, user_id: str, args: dict) -> Dataset: diff --git a/api/services/model_load_balancing_service.py b/api/services/model_load_balancing_service.py index 2145b4cdd5..d830034f11 100644 --- a/api/services/model_load_balancing_service.py +++ b/api/services/model_load_balancing_service.py @@ -170,7 +170,9 @@ class ModelLoadBalancingService: if variable in credentials: try: credentials[variable] = encrypter.decrypt_token_with_decoding( - credentials.get(variable), decoding_rsa_key, decoding_cipher_rsa + credentials.get(variable), # ty: ignore [invalid-argument-type] + decoding_rsa_key, + decoding_cipher_rsa, ) except ValueError: pass diff --git a/api/services/tools/mcp_tools_manage_service.py b/api/services/tools/mcp_tools_manage_service.py index f45c931768..665ef27d66 100644 --- a/api/services/tools/mcp_tools_manage_service.py +++ b/api/services/tools/mcp_tools_manage_service.py @@ -229,7 +229,7 @@ class MCPToolManageService: provider_controller = MCPToolProviderController._from_db(mcp_provider) tool_configuration = ProviderConfigEncrypter( tenant_id=mcp_provider.tenant_id, - config=list(provider_controller.get_credentials_schema()), + config=list(provider_controller.get_credentials_schema()), # ty: ignore [invalid-argument-type] provider_config_cache=NoOpProviderCredentialCache(), ) credentials = tool_configuration.encrypt(credentials) diff --git a/api/ty.toml b/api/ty.toml new file mode 100644 index 0000000000..bb4ff5bbcf --- /dev/null +++ b/api/ty.toml @@ -0,0 +1,16 @@ +[src] +exclude = [ + # TODO: enable when violations fixed + "core/app/apps/workflow_app_runner.py", + "controllers/console/app", + "controllers/console/explore", + "controllers/console/datasets", + "controllers/console/workspace", + # non-producition or generated code + "migrations", + "tests", +] + +[rules] +missing-argument = "ignore" # TODO: restore when **args for constructor is supported properly +possibly-unbound-attribute = "ignore" diff --git a/api/uv.lock b/api/uv.lock index 6818fcf019..9f234e7edb 100644 --- a/api/uv.lock +++ b/api/uv.lock @@ -1353,6 +1353,7 @@ dev = [ { name = "ruff" }, { name = "scipy-stubs" }, { name = "testcontainers" }, + { name = "ty" }, { name = "types-aiofiles" }, { name = "types-beautifulsoup4" }, { name = "types-cachetools" }, @@ -1542,6 +1543,7 @@ dev = [ { name = "ruff", specifier = "~=0.12.3" }, { name = "scipy-stubs", specifier = ">=1.15.3.0" }, { name = "testcontainers", specifier = "~=4.10.0" }, + { name = "ty", specifier = "~=0.0.1a19" }, { name = "types-aiofiles", specifier = "~=24.1.0" }, { name = "types-beautifulsoup4", specifier = "~=4.12.0" }, { name = "types-cachetools", specifier = "~=5.5.0" }, @@ -5782,6 +5784,31 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/41/b1/d7520cc5cb69c825599042eb3a7c986fa9baa8a8d2dea9acd78e152c81e2/transformers-4.53.3-py3-none-any.whl", hash = "sha256:5aba81c92095806b6baf12df35d756cf23b66c356975fb2a7fa9e536138d7c75", size = 10826382 }, ] +[[package]] +name = "ty" +version = "0.0.1a19" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c0/04/281c1a3c9c53dae5826b9d01a3412de653e3caf1ca50ce1265da66e06d73/ty-0.0.1a19.tar.gz", hash = "sha256:894f6a13a43989c8ef891ae079b3b60a0c0eae00244abbfbbe498a3840a235ac", size = 4098412, upload-time = "2025-08-19T13:29:58.559Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3e/65/a61cfcc7248b0257a3110bf98d3d910a4729c1063abdbfdcd1cad9012323/ty-0.0.1a19-py3-none-linux_armv6l.whl", hash = "sha256:e0e7762f040f4bab1b37c57cb1b43cc3bc5afb703fa5d916dfcafa2ef885190e", size = 8143744, upload-time = "2025-08-19T13:29:13.88Z" }, + { url = "https://files.pythonhosted.org/packages/02/d9/232afef97d9afa2274d23a4c49a3ad690282ca9696e1b6bbb6e4e9a1b072/ty-0.0.1a19-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:cd0a67ac875f49f34d9a0b42dcabf4724194558a5dd36867209d5695c67768f7", size = 8305799, upload-time = "2025-08-19T13:29:17.322Z" }, + { url = "https://files.pythonhosted.org/packages/20/14/099d268da7a9cccc6ba38dfc124f6742a1d669bc91f2c61a3465672b4f71/ty-0.0.1a19-py3-none-macosx_11_0_arm64.whl", hash = "sha256:ff8b1c0b85137333c39eccd96c42603af8ba7234d6e2ed0877f66a4a26750dd4", size = 7901431, upload-time = "2025-08-19T13:29:21.635Z" }, + { url = "https://files.pythonhosted.org/packages/c2/cd/3f1ca6e1d7f77cc4d08910a3fc4826313c031c0aae72286ae859e737670c/ty-0.0.1a19-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4fef34a29f4b97d78aa30e60adbbb12137cf52b8b2b0f1a408dd0feb0466908a", size = 8051501, upload-time = "2025-08-19T13:29:23.741Z" }, + { url = "https://files.pythonhosted.org/packages/47/72/ddbec39f48ce3f5f6a3fa1f905c8fff2873e59d2030f738814032bd783e3/ty-0.0.1a19-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b0f219cb43c0c50fc1091f8ebd5548d3ef31ee57866517b9521d5174978af9fd", size = 7981234, upload-time = "2025-08-19T13:29:25.839Z" }, + { url = "https://files.pythonhosted.org/packages/f2/0f/58e76b8d4634df066c790d362e8e73b25852279cd6f817f099b42a555a66/ty-0.0.1a19-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22abb6c1f14c65c1a2fafd38e25dd3c87994b3ab88cb0b323235b51dbad082d9", size = 8916394, upload-time = "2025-08-19T13:29:27.932Z" }, + { url = "https://files.pythonhosted.org/packages/70/30/01bfd93ccde11540b503e2539e55f6a1fc6e12433a229191e248946eb753/ty-0.0.1a19-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:5b49225c349a3866e38dd297cb023a92d084aec0e895ed30ca124704bff600e6", size = 9412024, upload-time = "2025-08-19T13:29:30.942Z" }, + { url = "https://files.pythonhosted.org/packages/a8/a2/2216d752f5f22c5c0995f9b13f18337301220f2a7d952c972b33e6a63583/ty-0.0.1a19-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:88f41728b3b07402e0861e3c34412ca963268e55f6ab1690208f25d37cb9d63c", size = 9032657, upload-time = "2025-08-19T13:29:33.933Z" }, + { url = "https://files.pythonhosted.org/packages/24/c7/e6650b0569be1b69a03869503d07420c9fb3e90c9109b09726c44366ce63/ty-0.0.1a19-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33814a1197ec3e930fcfba6fb80969fe7353957087b42b88059f27a173f7510b", size = 8812775, upload-time = "2025-08-19T13:29:36.505Z" }, + { url = "https://files.pythonhosted.org/packages/35/c6/b8a20e06b97fe8203059d56d8f91cec4f9633e7ba65f413d80f16aa0be04/ty-0.0.1a19-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d71b7f2b674a287258f628acafeecd87691b169522945ff6192cd8a69af15857", size = 8631417, upload-time = "2025-08-19T13:29:38.837Z" }, + { url = "https://files.pythonhosted.org/packages/be/99/821ca1581dcf3d58ffb7bbe1cde7e1644dbdf53db34603a16a459a0b302c/ty-0.0.1a19-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3a7f8ef9ac4c38e8651c18c7380649c5a3fa9adb1a6012c721c11f4bbdc0ce24", size = 7928900, upload-time = "2025-08-19T13:29:41.08Z" }, + { url = "https://files.pythonhosted.org/packages/08/cb/59f74a0522e57565fef99e2287b2bc803ee47ff7dac250af26960636939f/ty-0.0.1a19-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:60f40e72f0fbf4e54aa83d9a6cb1959f551f83de73af96abbb94711c1546bd60", size = 8003310, upload-time = "2025-08-19T13:29:43.165Z" }, + { url = "https://files.pythonhosted.org/packages/4c/b3/1209b9acb5af00a2755114042e48fb0f71decc20d9d77a987bf5b3d1a102/ty-0.0.1a19-py3-none-musllinux_1_2_i686.whl", hash = "sha256:64971e4d3e3f83dc79deb606cc438255146cab1ab74f783f7507f49f9346d89d", size = 8496463, upload-time = "2025-08-19T13:29:46.136Z" }, + { url = "https://files.pythonhosted.org/packages/a2/d6/a4b6ba552d347a08196d83a4d60cb23460404a053dd3596e23a922bce544/ty-0.0.1a19-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:9aadbff487e2e1486e83543b4f4c2165557f17432369f419be9ba48dc47625ca", size = 8700633, upload-time = "2025-08-19T13:29:49.351Z" }, + { url = "https://files.pythonhosted.org/packages/96/c5/258f318d68b95685c8d98fb654a38882c9d01ce5d9426bed06124f690f04/ty-0.0.1a19-py3-none-win32.whl", hash = "sha256:00b75b446357ee22bcdeb837cb019dc3bc1dc5e5013ff0f46a22dfe6ce498fe2", size = 7811441, upload-time = "2025-08-19T13:29:52.077Z" }, + { url = "https://files.pythonhosted.org/packages/fb/bb/039227eee3c0c0cddc25f45031eea0f7f10440713f12d333f2f29cf8e934/ty-0.0.1a19-py3-none-win_amd64.whl", hash = "sha256:aaef76b2f44f6379c47adfe58286f0c56041cb2e374fd8462ae8368788634469", size = 8441186, upload-time = "2025-08-19T13:29:54.53Z" }, + { url = "https://files.pythonhosted.org/packages/74/5f/bceb29009670ae6f759340f9cb434121bc5ed84ad0f07bdc6179eaaa3204/ty-0.0.1a19-py3-none-win_arm64.whl", hash = "sha256:893755bb35f30653deb28865707e3b16907375c830546def2741f6ff9a764710", size = 8000810, upload-time = "2025-08-19T13:29:56.796Z" }, +] + [[package]] name = "typer" version = "0.16.0" diff --git a/dev/reformat b/dev/reformat index 71cb6abb1e..9e4f5d2a59 100755 --- a/dev/reformat +++ b/dev/reformat @@ -14,5 +14,8 @@ uv run --directory api --dev ruff format ./ # run dotenv-linter linter uv run --project api --dev dotenv-linter ./api/.env.example ./web/.env.example +# run ty check +dev/ty-check + # run mypy check dev/mypy-check diff --git a/dev/ty-check b/dev/ty-check new file mode 100755 index 0000000000..c6ad827bc8 --- /dev/null +++ b/dev/ty-check @@ -0,0 +1,10 @@ +#!/bin/bash + +set -x + +SCRIPT_DIR="$(dirname "$(realpath "$0")")" +cd "$SCRIPT_DIR/.." + +# run ty checks +uv run --directory api --dev \ + ty check diff --git a/web/.husky/pre-commit b/web/.husky/pre-commit index 2ad3922e99..55a8124938 100644 --- a/web/.husky/pre-commit +++ b/web/.husky/pre-commit @@ -41,6 +41,15 @@ if $api_modified; then echo "Please run 'dev/reformat' to fix the fixable linting errors." exit 1 fi + + # run ty checks + uv run --directory api --dev ty check || status=$? + status=${status:-0} + if [ $status -ne 0 ]; then + echo "ty type checker on api module error, exit code: $status" + echo "Please run 'dev/ty-check' to check the type errors." + exit 1 + fi fi if $web_modified; then From 68c75f221b788f76e5bf10d205e276da8d382ded Mon Sep 17 00:00:00 2001 From: Novice Date: Tue, 2 Sep 2025 16:24:03 +0800 Subject: [PATCH 10/21] fix: workflow log status filter add parial success status (#24977) --- api/controllers/console/app/workflow_app_log.py | 4 +++- web/app/components/app/workflow-log/filter.tsx | 1 + 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/api/controllers/console/app/workflow_app_log.py b/api/controllers/console/app/workflow_app_log.py index 8d8cdc93cf..76f02041ef 100644 --- a/api/controllers/console/app/workflow_app_log.py +++ b/api/controllers/console/app/workflow_app_log.py @@ -27,7 +27,9 @@ class WorkflowAppLogApi(Resource): """ parser = reqparse.RequestParser() parser.add_argument("keyword", type=str, location="args") - parser.add_argument("status", type=str, choices=["succeeded", "failed", "stopped"], location="args") + parser.add_argument( + "status", type=str, choices=["succeeded", "failed", "stopped", "partial-succeeded"], location="args" + ) parser.add_argument( "created_at__before", type=str, location="args", help="Filter logs created before this timestamp" ) diff --git a/web/app/components/app/workflow-log/filter.tsx b/web/app/components/app/workflow-log/filter.tsx index f60e1f9da2..1ef1bd7a29 100644 --- a/web/app/components/app/workflow-log/filter.tsx +++ b/web/app/components/app/workflow-log/filter.tsx @@ -43,6 +43,7 @@ const Filter: FC = ({ queryParams, setQueryParams }: IFilterProps) { value: 'succeeded', name: 'Success' }, { value: 'failed', name: 'Fail' }, { value: 'stopped', name: 'Stop' }, + { value: 'partial-succeeded', name: 'Partial Success' }, ]} /> Date: Tue, 2 Sep 2025 16:24:21 +0800 Subject: [PATCH 11/21] feat: add development environment setup commands to Makefile (#24976) --- Makefile | 60 +++++++++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 59 insertions(+), 1 deletion(-) diff --git a/Makefile b/Makefile index ff61a00313..388c367fdf 100644 --- a/Makefile +++ b/Makefile @@ -4,6 +4,48 @@ WEB_IMAGE=$(DOCKER_REGISTRY)/dify-web API_IMAGE=$(DOCKER_REGISTRY)/dify-api VERSION=latest +# Backend Development Environment Setup +.PHONY: dev-setup prepare-docker prepare-web prepare-api + +# Default dev setup target +dev-setup: prepare-docker prepare-web prepare-api + @echo "✅ Backend development environment setup complete!" + +# Step 1: Prepare Docker middleware +prepare-docker: + @echo "🐳 Setting up Docker middleware..." + @cp -n docker/middleware.env.example docker/middleware.env 2>/dev/null || echo "Docker middleware.env already exists" + @cd docker && docker compose -f docker-compose.middleware.yaml --env-file middleware.env -p dify-middlewares-dev up -d + @echo "✅ Docker middleware started" + +# Step 2: Prepare web environment +prepare-web: + @echo "🌐 Setting up web environment..." + @cp -n web/.env.example web/.env 2>/dev/null || echo "Web .env already exists" + @cd web && pnpm install + @cd web && pnpm build + @echo "✅ Web environment prepared (not started)" + +# Step 3: Prepare API environment +prepare-api: + @echo "🔧 Setting up API environment..." + @cp -n api/.env.example api/.env 2>/dev/null || echo "API .env already exists" + @cd api && uv sync --dev --extra all + @cd api && uv run flask db upgrade + @echo "✅ API environment prepared (not started)" + +# Clean dev environment +dev-clean: + @echo "⚠️ Stopping Docker containers..." + @cd docker && docker compose -f docker-compose.middleware.yaml --env-file middleware.env -p dify-middlewares-dev down + @echo "🗑️ Removing volumes..." + @rm -rf docker/volumes/db + @rm -rf docker/volumes/redis + @rm -rf docker/volumes/plugin_daemon + @rm -rf docker/volumes/weaviate + @rm -rf api/storage + @echo "✅ Cleanup complete" + # Build Docker images build-web: @echo "Building web Docker image: $(WEB_IMAGE):$(VERSION)..." @@ -39,5 +81,21 @@ build-push-web: build-web push-web build-push-all: build-all push-all @echo "All Docker images have been built and pushed." +# Help target +help: + @echo "Development Setup Targets:" + @echo " make dev-setup - Run all setup steps for backend dev environment" + @echo " make prepare-docker - Set up Docker middleware" + @echo " make prepare-web - Set up web environment" + @echo " make prepare-api - Set up API environment" + @echo " make dev-clean - Stop Docker middleware containers" + @echo "" + @echo "Docker Build Targets:" + @echo " make build-web - Build web Docker image" + @echo " make build-api - Build API Docker image" + @echo " make build-all - Build all Docker images" + @echo " make push-all - Push all Docker images" + @echo " make build-push-all - Build and push all Docker images" + # Phony targets -.PHONY: build-web build-api push-web push-api build-all push-all build-push-all +.PHONY: build-web build-api push-web push-api build-all push-all build-push-all dev-setup prepare-docker prepare-web prepare-api dev-clean help From af88266212f74bc56b25e0ede6589cc8967df555 Mon Sep 17 00:00:00 2001 From: Bowen Liang Date: Tue, 2 Sep 2025 16:59:11 +0800 Subject: [PATCH 12/21] chore: run ty check CI action only when api code changed (#24986) --- .github/workflows/style.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/style.yml b/.github/workflows/style.yml index d5de70ffba..a3643c9931 100644 --- a/.github/workflows/style.yml +++ b/.github/workflows/style.yml @@ -45,6 +45,7 @@ jobs: run: uv sync --project api --dev - name: Run ty check + if: steps.changed-files.outputs.any_changed == 'true' run: dev/ty-check - name: Dotenv check From af351b17232ce7ff945e01d725e591fbf3f23271 Mon Sep 17 00:00:00 2001 From: 17hz <0x149527@gmail.com> Date: Tue, 2 Sep 2025 17:06:10 +0800 Subject: [PATCH 13/21] fix: ensure the modal closed by level (#24984) --- .../model-provider-page/model-modal/index.tsx | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/web/app/components/header/account-setting/model-provider-page/model-modal/index.tsx b/web/app/components/header/account-setting/model-provider-page/model-modal/index.tsx index e9050e4837..d754d24d90 100644 --- a/web/app/components/header/account-setting/model-provider-page/model-modal/index.tsx +++ b/web/app/components/header/account-setting/model-provider-page/model-modal/index.tsx @@ -2,6 +2,7 @@ import type { FC } from 'react' import { memo, useCallback, + useEffect, useMemo, useRef, } from 'react' @@ -188,6 +189,20 @@ const ModelModal: FC = ({ return null }, [model, provider]) + useEffect(() => { + const handleKeyDown = (event: KeyboardEvent) => { + if (event.key === 'Escape') { + event.stopPropagation() + onCancel() + } + } + + document.addEventListener('keydown', handleKeyDown, true) + return () => { + document.removeEventListener('keydown', handleKeyDown, true) + } + }, [onCancel]) + return ( From 32972b45dbc19276ecf01bcf81e3479d667ebd8f Mon Sep 17 00:00:00 2001 From: 17hz <0x149527@gmail.com> Date: Tue, 2 Sep 2025 18:57:24 +0800 Subject: [PATCH 14/21] fix: remove unnecessary modal visibility toggle on error in name save (#25001) --- web/app/account/(commonLayout)/account-page/index.tsx | 1 - 1 file changed, 1 deletion(-) diff --git a/web/app/account/(commonLayout)/account-page/index.tsx b/web/app/account/(commonLayout)/account-page/index.tsx index 47b8f045d2..2cddc01876 100644 --- a/web/app/account/(commonLayout)/account-page/index.tsx +++ b/web/app/account/(commonLayout)/account-page/index.tsx @@ -69,7 +69,6 @@ export default function AccountPage() { } catch (e) { notify({ type: 'error', message: (e as Error).message }) - setEditNameModalVisible(false) setEditing(false) } } From 5aa8c9c8dfb21e6e41686b742cef3e8533b3e934 Mon Sep 17 00:00:00 2001 From: 17hz <0x149527@gmail.com> Date: Tue, 2 Sep 2025 18:57:35 +0800 Subject: [PATCH 15/21] fix: refresh UI after user profile change (#24998) --- web/context/app-context.tsx | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/web/context/app-context.tsx b/web/context/app-context.tsx index c033e1dcfa..ae1bc1269c 100644 --- a/web/context/app-context.tsx +++ b/web/context/app-context.tsx @@ -86,10 +86,9 @@ export const AppContextProvider: FC = ({ children }) => const isCurrentWorkspaceEditor = useMemo(() => ['owner', 'admin', 'editor'].includes(currentWorkspace.role), [currentWorkspace.role]) const isCurrentWorkspaceDatasetOperator = useMemo(() => currentWorkspace.role === 'dataset_operator', [currentWorkspace.role]) const updateUserProfileAndVersion = useCallback(async () => { - if (userProfileResponse) { + if (userProfileResponse && !userProfileResponse.bodyUsed) { try { - const clonedResponse = (userProfileResponse as Response).clone() - const result = await clonedResponse.json() + const result = await userProfileResponse.json() setUserProfile(result) const current_version = userProfileResponse.headers.get('x-version') const current_env = process.env.NODE_ENV === 'development' ? 'DEVELOPMENT' : userProfileResponse.headers.get('x-env') From a418c43d32b066331d2b75be095a01b3e233d412 Mon Sep 17 00:00:00 2001 From: Asuka Minato Date: Tue, 2 Sep 2025 20:13:43 +0900 Subject: [PATCH 16/21] example add more type check (#24999) Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> --- api/models/base.py | 10 +++++++++- api/models/tools.py | 6 +++--- 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/api/models/base.py b/api/models/base.py index bd120f5487..76848825fe 100644 --- a/api/models/base.py +++ b/api/models/base.py @@ -1,7 +1,15 @@ -from sqlalchemy.orm import DeclarativeBase +from sqlalchemy.orm import DeclarativeBase, MappedAsDataclass from models.engine import metadata class Base(DeclarativeBase): metadata = metadata + + +class TypeBase(MappedAsDataclass, DeclarativeBase): + """ + This is for adding type, after all finished, rename to Base. + """ + + metadata = metadata diff --git a/api/models/tools.py b/api/models/tools.py index d88d817374..08219ebd2f 100644 --- a/api/models/tools.py +++ b/api/models/tools.py @@ -14,7 +14,7 @@ from core.mcp.types import Tool from core.tools.entities.common_entities import I18nObject from core.tools.entities.tool_bundle import ApiToolBundle from core.tools.entities.tool_entities import ApiProviderSchemaType, WorkflowToolParameterConfiguration -from models.base import Base +from models.base import Base, TypeBase from .engine import db from .model import Account, App, Tenant @@ -160,7 +160,7 @@ class ApiToolProvider(Base): return db.session.query(Tenant).where(Tenant.id == self.tenant_id).first() -class ToolLabelBinding(Base): +class ToolLabelBinding(TypeBase): """ The table stores the labels for tools. """ @@ -171,7 +171,7 @@ class ToolLabelBinding(Base): sa.UniqueConstraint("tool_id", "label_name", name="unique_tool_label_bind"), ) - id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False) # tool id tool_id: Mapped[str] = mapped_column(String(64), nullable=False) # tool type From ed5ed0306e01da97088d1847088c152212480f1f Mon Sep 17 00:00:00 2001 From: NeatGuyCoding <15627489+NeatGuyCoding@users.noreply.github.com> Date: Tue, 2 Sep 2025 19:14:30 +0800 Subject: [PATCH 17/21] minor fix: fix the check of subscription capacity limit (#24991) Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> --- api/tasks/duplicate_document_indexing_task.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/api/tasks/duplicate_document_indexing_task.py b/api/tasks/duplicate_document_indexing_task.py index 955e898ec1..d93f30ba37 100644 --- a/api/tasks/duplicate_document_indexing_task.py +++ b/api/tasks/duplicate_document_indexing_task.py @@ -45,9 +45,11 @@ def duplicate_document_indexing_task(dataset_id: str, document_ids: list): batch_upload_limit = int(dify_config.BATCH_UPLOAD_LIMIT) if count > batch_upload_limit: raise ValueError(f"You have reached the batch upload limit of {batch_upload_limit}.") - if 0 < vector_space.limit <= vector_space.size: + current = int(getattr(vector_space, "size", 0) or 0) + limit = int(getattr(vector_space, "limit", 0) or 0) + if limit > 0 and (current + count) > limit: raise ValueError( - "Your total number of documents plus the number of uploads have over the limit of " + "Your total number of documents plus the number of uploads have exceeded the limit of " "your subscription." ) except Exception as e: From 8fcc864fb764b8c86c7789c5fbbb6ae47c481527 Mon Sep 17 00:00:00 2001 From: Yongtao Huang Date: Tue, 2 Sep 2025 20:59:08 +0800 Subject: [PATCH 18/21] Post fix of #23224 (#25007) --- api/core/rag/retrieval/dataset_retrieval.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/api/core/rag/retrieval/dataset_retrieval.py b/api/core/rag/retrieval/dataset_retrieval.py index 11010c9d60..1c1019e18c 100644 --- a/api/core/rag/retrieval/dataset_retrieval.py +++ b/api/core/rag/retrieval/dataset_retrieval.py @@ -534,7 +534,6 @@ class DatasetRetrieval: synchronize_session=False, ) ) - db.session.commit() else: query = db.session.query(DocumentSegment).where( DocumentSegment.index_node_id == document.metadata["doc_id"] @@ -596,7 +595,7 @@ class DatasetRetrieval: with flask_app.app_context(): with Session(db.engine) as session: dataset_stmt = select(Dataset).where(Dataset.id == dataset_id) - dataset = db.session.scalar(dataset_stmt) + dataset = session.scalar(dataset_stmt) if not dataset: return [] From 25a11bfafc91df9dee53e79c51d6f73408e40a66 Mon Sep 17 00:00:00 2001 From: GuanMu Date: Tue, 2 Sep 2025 21:36:52 +0800 Subject: [PATCH 19/21] Export DSL from history (#24939) Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> --- api/controllers/console/app/app.py | 7 +- api/services/app_dsl_service.py | 10 ++- api/services/workflow_service.py | 8 +- .../services/test_app_dsl_service.py | 82 ++++++++++++++++++- .../hooks/use-workflow-interactions.ts | 3 +- .../context-menu/use-context-menu.ts | 4 + .../panel/version-history-panel/index.tsx | 8 +- web/app/components/workflow/types.ts | 1 + web/service/apps.ts | 9 +- 9 files changed, 119 insertions(+), 13 deletions(-) diff --git a/api/controllers/console/app/app.py b/api/controllers/console/app/app.py index a6eb86122d..10753d2f95 100644 --- a/api/controllers/console/app/app.py +++ b/api/controllers/console/app/app.py @@ -237,9 +237,14 @@ class AppExportApi(Resource): # Add include_secret params parser = reqparse.RequestParser() parser.add_argument("include_secret", type=inputs.boolean, default=False, location="args") + parser.add_argument("workflow_id", type=str, location="args") args = parser.parse_args() - return {"data": AppDslService.export_dsl(app_model=app_model, include_secret=args["include_secret"])} + return { + "data": AppDslService.export_dsl( + app_model=app_model, include_secret=args["include_secret"], workflow_id=args.get("workflow_id") + ) + } class AppNameApi(Resource): diff --git a/api/services/app_dsl_service.py b/api/services/app_dsl_service.py index 18c72ebde2..2663cb3805 100644 --- a/api/services/app_dsl_service.py +++ b/api/services/app_dsl_service.py @@ -532,7 +532,7 @@ class AppDslService: return app @classmethod - def export_dsl(cls, app_model: App, include_secret: bool = False) -> str: + def export_dsl(cls, app_model: App, include_secret: bool = False, workflow_id: Optional[str] = None) -> str: """ Export app :param app_model: App instance @@ -556,7 +556,7 @@ class AppDslService: if app_mode in {AppMode.ADVANCED_CHAT, AppMode.WORKFLOW}: cls._append_workflow_export_data( - export_data=export_data, app_model=app_model, include_secret=include_secret + export_data=export_data, app_model=app_model, include_secret=include_secret, workflow_id=workflow_id ) else: cls._append_model_config_export_data(export_data, app_model) @@ -564,14 +564,16 @@ class AppDslService: return yaml.dump(export_data, allow_unicode=True) # type: ignore @classmethod - def _append_workflow_export_data(cls, *, export_data: dict, app_model: App, include_secret: bool) -> None: + def _append_workflow_export_data( + cls, *, export_data: dict, app_model: App, include_secret: bool, workflow_id: Optional[str] = None + ) -> None: """ Append workflow export data :param export_data: export data :param app_model: App instance """ workflow_service = WorkflowService() - workflow = workflow_service.get_draft_workflow(app_model) + workflow = workflow_service.get_draft_workflow(app_model, workflow_id) if not workflow: raise ValueError("Missing draft workflow configuration, please check.") diff --git a/api/services/workflow_service.py b/api/services/workflow_service.py index 3a68379789..3f54f6624f 100644 --- a/api/services/workflow_service.py +++ b/api/services/workflow_service.py @@ -96,10 +96,12 @@ class WorkflowService: ) return db.session.execute(stmt).scalar_one() - def get_draft_workflow(self, app_model: App) -> Optional[Workflow]: + def get_draft_workflow(self, app_model: App, workflow_id: Optional[str] = None) -> Optional[Workflow]: """ Get draft workflow """ + if workflow_id: + return self.get_published_workflow_by_id(app_model, workflow_id) # fetch draft workflow by app_model workflow = ( db.session.query(Workflow) @@ -115,7 +117,9 @@ class WorkflowService: return workflow def get_published_workflow_by_id(self, app_model: App, workflow_id: str) -> Optional[Workflow]: - # fetch published workflow by workflow_id + """ + fetch published workflow by workflow_id + """ workflow = ( db.session.query(Workflow) .where( diff --git a/api/tests/test_containers_integration_tests/services/test_app_dsl_service.py b/api/tests/test_containers_integration_tests/services/test_app_dsl_service.py index d83983d0ff..119f92d772 100644 --- a/api/tests/test_containers_integration_tests/services/test_app_dsl_service.py +++ b/api/tests/test_containers_integration_tests/services/test_app_dsl_service.py @@ -322,7 +322,87 @@ class TestAppDslService: # Verify workflow service was called mock_external_service_dependencies["workflow_service"].return_value.get_draft_workflow.assert_called_once_with( - app + app, None + ) + + def test_export_dsl_with_workflow_id_success(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test successful DSL export with specific workflow ID. + """ + fake = Faker() + app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies) + + # Update app to workflow mode + app.mode = "workflow" + db_session_with_containers.commit() + + # Mock workflow service to return a workflow when specific workflow_id is provided + mock_workflow = MagicMock() + mock_workflow.to_dict.return_value = { + "graph": {"nodes": [{"id": "start", "type": "start", "data": {"type": "start"}}], "edges": []}, + "features": {}, + "environment_variables": [], + "conversation_variables": [], + } + + # Mock the get_draft_workflow method to return different workflows based on workflow_id + def mock_get_draft_workflow(app_model, workflow_id=None): + if workflow_id == "specific-workflow-id": + return mock_workflow + return None + + mock_external_service_dependencies[ + "workflow_service" + ].return_value.get_draft_workflow.side_effect = mock_get_draft_workflow + + # Export DSL with specific workflow ID + exported_dsl = AppDslService.export_dsl(app, include_secret=False, workflow_id="specific-workflow-id") + + # Parse exported YAML + exported_data = yaml.safe_load(exported_dsl) + + # Verify exported data structure + assert exported_data["kind"] == "app" + assert exported_data["app"]["name"] == app.name + assert exported_data["app"]["mode"] == "workflow" + + # Verify workflow was exported + assert "workflow" in exported_data + assert "graph" in exported_data["workflow"] + assert "nodes" in exported_data["workflow"]["graph"] + + # Verify dependencies were exported + assert "dependencies" in exported_data + assert isinstance(exported_data["dependencies"], list) + + # Verify workflow service was called with specific workflow ID + mock_external_service_dependencies["workflow_service"].return_value.get_draft_workflow.assert_called_once_with( + app, "specific-workflow-id" + ) + + def test_export_dsl_with_invalid_workflow_id_raises_error( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test that export_dsl raises error when invalid workflow ID is provided. + """ + fake = Faker() + app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies) + + # Update app to workflow mode + app.mode = "workflow" + db_session_with_containers.commit() + + # Mock workflow service to return None when invalid workflow ID is provided + mock_external_service_dependencies["workflow_service"].return_value.get_draft_workflow.return_value = None + + # Export DSL with invalid workflow ID should raise ValueError + with pytest.raises(ValueError, match="Missing draft workflow configuration, please check."): + AppDslService.export_dsl(app, include_secret=False, workflow_id="invalid-workflow-id") + + # Verify workflow service was called with the invalid workflow ID + mock_external_service_dependencies["workflow_service"].return_value.get_draft_workflow.assert_called_once_with( + app, "invalid-workflow-id" ) def test_check_dependencies_success(self, db_session_with_containers, mock_external_service_dependencies): diff --git a/web/app/components/workflow/hooks/use-workflow-interactions.ts b/web/app/components/workflow/hooks/use-workflow-interactions.ts index d8653a5942..009e4d878a 100644 --- a/web/app/components/workflow/hooks/use-workflow-interactions.ts +++ b/web/app/components/workflow/hooks/use-workflow-interactions.ts @@ -346,7 +346,7 @@ export const useDSL = () => { const appDetail = useAppStore(s => s.appDetail) - const handleExportDSL = useCallback(async (include = false) => { + const handleExportDSL = useCallback(async (include = false, workflowId?: string) => { if (!appDetail) return @@ -358,6 +358,7 @@ export const useDSL = () => { await doSyncWorkflowDraft() const { data } = await exportAppConfig({ appID: appDetail.id, + workflowID: workflowId, include, }) const a = document.createElement('a') diff --git a/web/app/components/workflow/panel/version-history-panel/context-menu/use-context-menu.ts b/web/app/components/workflow/panel/version-history-panel/context-menu/use-context-menu.ts index 62043713e8..c56d286f83 100644 --- a/web/app/components/workflow/panel/version-history-panel/context-menu/use-context-menu.ts +++ b/web/app/components/workflow/panel/version-history-panel/context-menu/use-context-menu.ts @@ -29,6 +29,10 @@ const useContextMenu = (props: ContextMenuProps) => { key: VersionHistoryContextMenuOptions.edit, name: t('workflow.versionHistory.nameThisVersion'), }, + { + key: VersionHistoryContextMenuOptions.exportDSL, + name: t('app.export'), + }, { key: VersionHistoryContextMenuOptions.copyId, name: t('workflow.versionHistory.copyId'), diff --git a/web/app/components/workflow/panel/version-history-panel/index.tsx b/web/app/components/workflow/panel/version-history-panel/index.tsx index 70acca7597..5a1bfe01e9 100644 --- a/web/app/components/workflow/panel/version-history-panel/index.tsx +++ b/web/app/components/workflow/panel/version-history-panel/index.tsx @@ -3,7 +3,7 @@ import React, { useCallback, useState } from 'react' import { useTranslation } from 'react-i18next' import { RiArrowDownDoubleLine, RiCloseLine, RiLoader2Line } from '@remixicon/react' import copy from 'copy-to-clipboard' -import { useNodesSyncDraft, useWorkflowRun } from '../../hooks' +import { useDSL, useNodesSyncDraft, useWorkflowRun } from '../../hooks' import { useStore, useWorkflowStore } from '../../store' import { VersionHistoryContextMenuOptions, WorkflowVersionFilterOptions } from '../../types' import VersionHistoryItem from './version-history-item' @@ -33,6 +33,7 @@ const VersionHistoryPanel = () => { const workflowStore = useWorkflowStore() const { handleSyncWorkflowDraft } = useNodesSyncDraft() const { handleRestoreFromPublishedWorkflow, handleLoadBackupDraft } = useWorkflowRun() + const { handleExportDSL } = useDSL() const appDetail = useAppStore.getState().appDetail const setShowWorkflowVersionHistoryPanel = useStore(s => s.setShowWorkflowVersionHistoryPanel) const currentVersion = useStore(s => s.currentVersion) @@ -107,8 +108,11 @@ const VersionHistoryPanel = () => { message: t('workflow.versionHistory.action.copyIdSuccess'), }) break + case VersionHistoryContextMenuOptions.exportDSL: + handleExportDSL(false, item.id) + break } - }, [t]) + }, [t, handleExportDSL]) const handleCancel = useCallback((operation: VersionHistoryContextMenuOptions) => { switch (operation) { diff --git a/web/app/components/workflow/types.ts b/web/app/components/workflow/types.ts index 5cad042cf6..30c00c7efe 100644 --- a/web/app/components/workflow/types.ts +++ b/web/app/components/workflow/types.ts @@ -452,6 +452,7 @@ export enum VersionHistoryContextMenuOptions { restore = 'restore', edit = 'edit', delete = 'delete', + exportDSL = 'exportDSL', copyId = 'copyId', } diff --git a/web/service/apps.ts b/web/service/apps.ts index 1d7b0bccdb..5602f75791 100644 --- a/web/service/apps.ts +++ b/web/service/apps.ts @@ -35,8 +35,13 @@ export const copyApp: Fetcher(`apps/${appID}/copy`, { body: { name, icon_type, icon, icon_background, mode, description } }) } -export const exportAppConfig: Fetcher<{ data: string }, { appID: string; include?: boolean }> = ({ appID, include = false }) => { - return get<{ data: string }>(`apps/${appID}/export?include_secret=${include}`) +export const exportAppConfig: Fetcher<{ data: string }, { appID: string; include?: boolean; workflowID?: string }> = ({ appID, include = false, workflowID }) => { + const params = new URLSearchParams({ + include_secret: include.toString(), + }) + if (workflowID) + params.append('workflow_id', workflowID) + return get<{ data: string }>(`apps/${appID}/export?${params.toString()}`) } // TODO: delete From b5216df4feffb396a2e9917dc3c9ca33f6674a5f Mon Sep 17 00:00:00 2001 From: Will Date: Tue, 2 Sep 2025 21:37:06 +0800 Subject: [PATCH 20/21] fix: xxx is not bound to a Session (#24966) --- api/core/app/apps/advanced_chat/app_generator.py | 2 +- api/services/account_service.py | 5 ++++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/api/core/app/apps/advanced_chat/app_generator.py b/api/core/app/apps/advanced_chat/app_generator.py index 74e282fdcd..561af7bacf 100644 --- a/api/core/app/apps/advanced_chat/app_generator.py +++ b/api/core/app/apps/advanced_chat/app_generator.py @@ -453,7 +453,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator): # release database connection, because the following new thread operations may take a long time db.session.refresh(workflow) db.session.refresh(message) - db.session.refresh(user) + # db.session.refresh(user) db.session.close() # return response or stream generator diff --git a/api/services/account_service.py b/api/services/account_service.py index 50ce171ded..b12d4623bb 100644 --- a/api/services/account_service.py +++ b/api/services/account_service.py @@ -145,7 +145,10 @@ class AccountService: if naive_utc_now() - account.last_active_at > timedelta(minutes=10): account.last_active_at = naive_utc_now() db.session.commit() - + # NOTE: make sure account is accessible outside of a db session + # This ensures that it will work correctly after upgrading to Flask version 3.1.2 + db.session.refresh(account) + db.session.close() return account @staticmethod From d33dfee8a3b1fa7399428a3c4c9bf9a220a98ab8 Mon Sep 17 00:00:00 2001 From: Will Date: Tue, 2 Sep 2025 21:37:21 +0800 Subject: [PATCH 21/21] fix: EndUser is not bound to a Session (#25010) --- api/controllers/service_api/wraps.py | 39 ++++++++++++++-------------- api/controllers/web/wraps.py | 26 ++++++++++--------- 2 files changed, 34 insertions(+), 31 deletions(-) diff --git a/api/controllers/service_api/wraps.py b/api/controllers/service_api/wraps.py index cc4b5f65bd..67d48319d4 100644 --- a/api/controllers/service_api/wraps.py +++ b/api/controllers/service_api/wraps.py @@ -291,27 +291,28 @@ def create_or_update_end_user_for_user_id(app_model: App, user_id: Optional[str] if not user_id: user_id = "DEFAULT-USER" - end_user = ( - db.session.query(EndUser) - .where( - EndUser.tenant_id == app_model.tenant_id, - EndUser.app_id == app_model.id, - EndUser.session_id == user_id, - EndUser.type == "service_api", + with Session(db.engine, expire_on_commit=False) as session: + end_user = ( + session.query(EndUser) + .where( + EndUser.tenant_id == app_model.tenant_id, + EndUser.app_id == app_model.id, + EndUser.session_id == user_id, + EndUser.type == "service_api", + ) + .first() ) - .first() - ) - if end_user is None: - end_user = EndUser( - tenant_id=app_model.tenant_id, - app_id=app_model.id, - type="service_api", - is_anonymous=user_id == "DEFAULT-USER", - session_id=user_id, - ) - db.session.add(end_user) - db.session.commit() + if end_user is None: + end_user = EndUser( + tenant_id=app_model.tenant_id, + app_id=app_model.id, + type="service_api", + is_anonymous=user_id == "DEFAULT-USER", + session_id=user_id, + ) + session.add(end_user) + session.commit() return end_user diff --git a/api/controllers/web/wraps.py b/api/controllers/web/wraps.py index 94fa5d5626..1fc8916cab 100644 --- a/api/controllers/web/wraps.py +++ b/api/controllers/web/wraps.py @@ -4,6 +4,7 @@ from functools import wraps from flask import request from flask_restx import Resource from sqlalchemy import select +from sqlalchemy.orm import Session from werkzeug.exceptions import BadRequest, NotFound, Unauthorized from controllers.web.error import WebAppAuthAccessDeniedError, WebAppAuthRequiredError @@ -49,18 +50,19 @@ def decode_jwt_token(): decoded = PassportService().verify(tk) app_code = decoded.get("app_code") app_id = decoded.get("app_id") - app_model = db.session.scalar(select(App).where(App.id == app_id)) - site = db.session.scalar(select(Site).where(Site.code == app_code)) - if not app_model: - raise NotFound() - if not app_code or not site: - raise BadRequest("Site URL is no longer valid.") - if app_model.enable_site is False: - raise BadRequest("Site is disabled.") - end_user_id = decoded.get("end_user_id") - end_user = db.session.scalar(select(EndUser).where(EndUser.id == end_user_id)) - if not end_user: - raise NotFound() + with Session(db.engine, expire_on_commit=False) as session: + app_model = session.scalar(select(App).where(App.id == app_id)) + site = session.scalar(select(Site).where(Site.code == app_code)) + if not app_model: + raise NotFound() + if not app_code or not site: + raise BadRequest("Site URL is no longer valid.") + if app_model.enable_site is False: + raise BadRequest("Site is disabled.") + end_user_id = decoded.get("end_user_id") + end_user = session.scalar(select(EndUser).where(EndUser.id == end_user_id)) + if not end_user: + raise NotFound() # for enterprise webapp auth app_web_auth_enabled = False