From ea320ce05508c6a6bd8707e8882bbd77273bde23 Mon Sep 17 00:00:00 2001 From: aka James4u Date: Fri, 21 Nov 2025 21:38:35 -0800 Subject: [PATCH 01/22] feat: add comprehensive unit tests for dataset service creation methods (#28522) Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com> --- .../broadcast_channel/redis/_subscription.py | 32 +- .../test_dataset_service_create_dataset.py | 819 ++++++++++++++++++ 2 files changed, 846 insertions(+), 5 deletions(-) create mode 100644 api/tests/unit_tests/services/test_dataset_service_create_dataset.py diff --git a/api/libs/broadcast_channel/redis/_subscription.py b/api/libs/broadcast_channel/redis/_subscription.py index 571ad87468..7d4b8e63ca 100644 --- a/api/libs/broadcast_channel/redis/_subscription.py +++ b/api/libs/broadcast_channel/redis/_subscription.py @@ -63,7 +63,19 @@ class RedisSubscriptionBase(Subscription): pubsub = self._pubsub assert pubsub is not None, "PubSub should not be None while starting listening." while not self._closed.is_set(): - raw_message = self._get_message() + try: + raw_message = self._get_message() + except Exception as e: + # Log the exception and exit the listener thread gracefully + # This handles Redis connection errors and other exceptions + _logger.error( + "Error getting message from Redis %s subscription, topic=%s: %s", + self._get_subscription_type(), + self._topic, + e, + exc_info=True, + ) + break if raw_message is None: continue @@ -98,10 +110,20 @@ class RedisSubscriptionBase(Subscription): self._enqueue_message(payload_bytes) _logger.debug("%s listener thread stopped for channel %s", self._get_subscription_type().title(), self._topic) - self._unsubscribe() - pubsub.close() - _logger.debug("%s PubSub closed for topic %s", self._get_subscription_type().title(), self._topic) - self._pubsub = None + try: + self._unsubscribe() + pubsub.close() + _logger.debug("%s PubSub closed for topic %s", self._get_subscription_type().title(), self._topic) + except Exception as e: + _logger.error( + "Error during cleanup of Redis %s subscription, topic=%s: %s", + self._get_subscription_type(), + self._topic, + e, + exc_info=True, + ) + finally: + self._pubsub = None def _enqueue_message(self, payload: bytes) -> None: """Enqueue a message to the internal queue with dropping behavior.""" diff --git a/api/tests/unit_tests/services/test_dataset_service_create_dataset.py b/api/tests/unit_tests/services/test_dataset_service_create_dataset.py new file mode 100644 index 0000000000..4d63c5f911 --- /dev/null +++ b/api/tests/unit_tests/services/test_dataset_service_create_dataset.py @@ -0,0 +1,819 @@ +""" +Comprehensive unit tests for DatasetService creation methods. + +This test suite covers: +- create_empty_dataset for internal datasets +- create_empty_dataset for external datasets +- create_empty_rag_pipeline_dataset +- Error conditions and edge cases +""" + +from unittest.mock import Mock, create_autospec, patch +from uuid import uuid4 + +import pytest + +from core.model_runtime.entities.model_entities import ModelType +from models.account import Account +from models.dataset import Dataset, Pipeline +from services.dataset_service import DatasetService +from services.entities.knowledge_entities.knowledge_entities import RetrievalModel +from services.entities.knowledge_entities.rag_pipeline_entities import ( + IconInfo, + RagPipelineDatasetCreateEntity, +) +from services.errors.dataset import DatasetNameDuplicateError + + +class DatasetCreateTestDataFactory: + """Factory class for creating test data and mock objects for dataset creation tests.""" + + @staticmethod + def create_account_mock( + account_id: str = "account-123", + tenant_id: str = "tenant-123", + **kwargs, + ) -> Mock: + """Create a mock account.""" + account = create_autospec(Account, instance=True) + account.id = account_id + account.current_tenant_id = tenant_id + for key, value in kwargs.items(): + setattr(account, key, value) + return account + + @staticmethod + def create_embedding_model_mock(model: str = "text-embedding-ada-002", provider: str = "openai") -> Mock: + """Create a mock embedding model.""" + embedding_model = Mock() + embedding_model.model = model + embedding_model.provider = provider + return embedding_model + + @staticmethod + def create_retrieval_model_mock() -> Mock: + """Create a mock retrieval model.""" + retrieval_model = Mock(spec=RetrievalModel) + retrieval_model.model_dump.return_value = { + "search_method": "semantic_search", + "top_k": 2, + "score_threshold": 0.0, + } + retrieval_model.reranking_model = None + return retrieval_model + + @staticmethod + def create_external_knowledge_api_mock(api_id: str = "api-123", **kwargs) -> Mock: + """Create a mock external knowledge API.""" + api = Mock() + api.id = api_id + for key, value in kwargs.items(): + setattr(api, key, value) + return api + + @staticmethod + def create_dataset_mock( + dataset_id: str = "dataset-123", + name: str = "Test Dataset", + tenant_id: str = "tenant-123", + **kwargs, + ) -> Mock: + """Create a mock dataset.""" + dataset = create_autospec(Dataset, instance=True) + dataset.id = dataset_id + dataset.name = name + dataset.tenant_id = tenant_id + for key, value in kwargs.items(): + setattr(dataset, key, value) + return dataset + + @staticmethod + def create_pipeline_mock( + pipeline_id: str = "pipeline-123", + name: str = "Test Pipeline", + **kwargs, + ) -> Mock: + """Create a mock pipeline.""" + pipeline = Mock(spec=Pipeline) + pipeline.id = pipeline_id + pipeline.name = name + for key, value in kwargs.items(): + setattr(pipeline, key, value) + return pipeline + + +class TestDatasetServiceCreateEmptyDataset: + """ + Comprehensive unit tests for DatasetService.create_empty_dataset method. + + This test suite covers: + - Internal dataset creation (vendor provider) + - External dataset creation + - High quality indexing technique with embedding models + - Economy indexing technique + - Retrieval model configuration + - Error conditions (duplicate names, missing external knowledge IDs) + """ + + @pytest.fixture + def mock_dataset_service_dependencies(self): + """Common mock setup for dataset service dependencies.""" + with ( + patch("services.dataset_service.db.session") as mock_db, + patch("services.dataset_service.ModelManager") as mock_model_manager, + patch("services.dataset_service.DatasetService.check_embedding_model_setting") as mock_check_embedding, + patch("services.dataset_service.DatasetService.check_reranking_model_setting") as mock_check_reranking, + patch("services.dataset_service.ExternalDatasetService") as mock_external_service, + ): + yield { + "db_session": mock_db, + "model_manager": mock_model_manager, + "check_embedding": mock_check_embedding, + "check_reranking": mock_check_reranking, + "external_service": mock_external_service, + } + + # ==================== Internal Dataset Creation Tests ==================== + + def test_create_internal_dataset_basic_success(self, mock_dataset_service_dependencies): + """Test successful creation of basic internal dataset.""" + # Arrange + tenant_id = str(uuid4()) + account = DatasetCreateTestDataFactory.create_account_mock(tenant_id=tenant_id) + name = "Test Dataset" + description = "Test description" + + # Mock database query to return None (no duplicate name) + mock_query = Mock() + mock_query.filter_by.return_value.first.return_value = None + mock_dataset_service_dependencies["db_session"].query.return_value = mock_query + + # Mock database session operations + mock_db = mock_dataset_service_dependencies["db_session"] + mock_db.add = Mock() + mock_db.flush = Mock() + mock_db.commit = Mock() + + # Act + result = DatasetService.create_empty_dataset( + tenant_id=tenant_id, + name=name, + description=description, + indexing_technique=None, + account=account, + ) + + # Assert + assert result is not None + assert result.name == name + assert result.description == description + assert result.tenant_id == tenant_id + assert result.created_by == account.id + assert result.updated_by == account.id + assert result.provider == "vendor" + assert result.permission == "only_me" + mock_db.add.assert_called_once() + mock_db.commit.assert_called_once() + + def test_create_internal_dataset_with_economy_indexing(self, mock_dataset_service_dependencies): + """Test successful creation of internal dataset with economy indexing.""" + # Arrange + tenant_id = str(uuid4()) + account = DatasetCreateTestDataFactory.create_account_mock(tenant_id=tenant_id) + name = "Economy Dataset" + + # Mock database query + mock_query = Mock() + mock_query.filter_by.return_value.first.return_value = None + mock_dataset_service_dependencies["db_session"].query.return_value = mock_query + + mock_db = mock_dataset_service_dependencies["db_session"] + mock_db.add = Mock() + mock_db.flush = Mock() + mock_db.commit = Mock() + + # Act + result = DatasetService.create_empty_dataset( + tenant_id=tenant_id, + name=name, + description=None, + indexing_technique="economy", + account=account, + ) + + # Assert + assert result.indexing_technique == "economy" + assert result.embedding_model_provider is None + assert result.embedding_model is None + mock_db.commit.assert_called_once() + + def test_create_internal_dataset_with_high_quality_indexing_default_embedding( + self, mock_dataset_service_dependencies + ): + """Test creation with high_quality indexing using default embedding model.""" + # Arrange + tenant_id = str(uuid4()) + account = DatasetCreateTestDataFactory.create_account_mock(tenant_id=tenant_id) + name = "High Quality Dataset" + + # Mock database query + mock_query = Mock() + mock_query.filter_by.return_value.first.return_value = None + mock_dataset_service_dependencies["db_session"].query.return_value = mock_query + + # Mock model manager + embedding_model = DatasetCreateTestDataFactory.create_embedding_model_mock() + mock_model_manager_instance = Mock() + mock_model_manager_instance.get_default_model_instance.return_value = embedding_model + mock_dataset_service_dependencies["model_manager"].return_value = mock_model_manager_instance + + mock_db = mock_dataset_service_dependencies["db_session"] + mock_db.add = Mock() + mock_db.flush = Mock() + mock_db.commit = Mock() + + # Act + result = DatasetService.create_empty_dataset( + tenant_id=tenant_id, + name=name, + description=None, + indexing_technique="high_quality", + account=account, + ) + + # Assert + assert result.indexing_technique == "high_quality" + assert result.embedding_model_provider == embedding_model.provider + assert result.embedding_model == embedding_model.model + mock_model_manager_instance.get_default_model_instance.assert_called_once_with( + tenant_id=tenant_id, model_type=ModelType.TEXT_EMBEDDING + ) + mock_db.commit.assert_called_once() + + def test_create_internal_dataset_with_high_quality_indexing_custom_embedding( + self, mock_dataset_service_dependencies + ): + """Test creation with high_quality indexing using custom embedding model.""" + # Arrange + tenant_id = str(uuid4()) + account = DatasetCreateTestDataFactory.create_account_mock(tenant_id=tenant_id) + name = "Custom Embedding Dataset" + embedding_provider = "openai" + embedding_model_name = "text-embedding-3-small" + + # Mock database query + mock_query = Mock() + mock_query.filter_by.return_value.first.return_value = None + mock_dataset_service_dependencies["db_session"].query.return_value = mock_query + + # Mock model manager + embedding_model = DatasetCreateTestDataFactory.create_embedding_model_mock( + model=embedding_model_name, provider=embedding_provider + ) + mock_model_manager_instance = Mock() + mock_model_manager_instance.get_model_instance.return_value = embedding_model + mock_dataset_service_dependencies["model_manager"].return_value = mock_model_manager_instance + + mock_db = mock_dataset_service_dependencies["db_session"] + mock_db.add = Mock() + mock_db.flush = Mock() + mock_db.commit = Mock() + + # Act + result = DatasetService.create_empty_dataset( + tenant_id=tenant_id, + name=name, + description=None, + indexing_technique="high_quality", + account=account, + embedding_model_provider=embedding_provider, + embedding_model_name=embedding_model_name, + ) + + # Assert + assert result.indexing_technique == "high_quality" + assert result.embedding_model_provider == embedding_provider + assert result.embedding_model == embedding_model_name + mock_dataset_service_dependencies["check_embedding"].assert_called_once_with( + tenant_id, embedding_provider, embedding_model_name + ) + mock_model_manager_instance.get_model_instance.assert_called_once_with( + tenant_id=tenant_id, + provider=embedding_provider, + model_type=ModelType.TEXT_EMBEDDING, + model=embedding_model_name, + ) + mock_db.commit.assert_called_once() + + def test_create_internal_dataset_with_retrieval_model(self, mock_dataset_service_dependencies): + """Test creation with retrieval model configuration.""" + # Arrange + tenant_id = str(uuid4()) + account = DatasetCreateTestDataFactory.create_account_mock(tenant_id=tenant_id) + name = "Retrieval Model Dataset" + + # Mock database query + mock_query = Mock() + mock_query.filter_by.return_value.first.return_value = None + mock_dataset_service_dependencies["db_session"].query.return_value = mock_query + + # Mock retrieval model + retrieval_model = DatasetCreateTestDataFactory.create_retrieval_model_mock() + retrieval_model_dict = {"search_method": "semantic_search", "top_k": 2, "score_threshold": 0.0} + + mock_db = mock_dataset_service_dependencies["db_session"] + mock_db.add = Mock() + mock_db.flush = Mock() + mock_db.commit = Mock() + + # Act + result = DatasetService.create_empty_dataset( + tenant_id=tenant_id, + name=name, + description=None, + indexing_technique=None, + account=account, + retrieval_model=retrieval_model, + ) + + # Assert + assert result.retrieval_model == retrieval_model_dict + retrieval_model.model_dump.assert_called_once() + mock_db.commit.assert_called_once() + + def test_create_internal_dataset_with_retrieval_model_reranking(self, mock_dataset_service_dependencies): + """Test creation with retrieval model that includes reranking.""" + # Arrange + tenant_id = str(uuid4()) + account = DatasetCreateTestDataFactory.create_account_mock(tenant_id=tenant_id) + name = "Reranking Dataset" + + # Mock database query + mock_query = Mock() + mock_query.filter_by.return_value.first.return_value = None + mock_dataset_service_dependencies["db_session"].query.return_value = mock_query + + # Mock model manager + embedding_model = DatasetCreateTestDataFactory.create_embedding_model_mock() + mock_model_manager_instance = Mock() + mock_model_manager_instance.get_default_model_instance.return_value = embedding_model + mock_dataset_service_dependencies["model_manager"].return_value = mock_model_manager_instance + + # Mock retrieval model with reranking + reranking_model = Mock() + reranking_model.reranking_provider_name = "cohere" + reranking_model.reranking_model_name = "rerank-english-v3.0" + + retrieval_model = DatasetCreateTestDataFactory.create_retrieval_model_mock() + retrieval_model.reranking_model = reranking_model + + mock_db = mock_dataset_service_dependencies["db_session"] + mock_db.add = Mock() + mock_db.flush = Mock() + mock_db.commit = Mock() + + # Act + result = DatasetService.create_empty_dataset( + tenant_id=tenant_id, + name=name, + description=None, + indexing_technique="high_quality", + account=account, + retrieval_model=retrieval_model, + ) + + # Assert + mock_dataset_service_dependencies["check_reranking"].assert_called_once_with( + tenant_id, "cohere", "rerank-english-v3.0" + ) + mock_db.commit.assert_called_once() + + def test_create_internal_dataset_with_custom_permission(self, mock_dataset_service_dependencies): + """Test creation with custom permission setting.""" + # Arrange + tenant_id = str(uuid4()) + account = DatasetCreateTestDataFactory.create_account_mock(tenant_id=tenant_id) + name = "Custom Permission Dataset" + + # Mock database query + mock_query = Mock() + mock_query.filter_by.return_value.first.return_value = None + mock_dataset_service_dependencies["db_session"].query.return_value = mock_query + + mock_db = mock_dataset_service_dependencies["db_session"] + mock_db.add = Mock() + mock_db.flush = Mock() + mock_db.commit = Mock() + + # Act + result = DatasetService.create_empty_dataset( + tenant_id=tenant_id, + name=name, + description=None, + indexing_technique=None, + account=account, + permission="all_team_members", + ) + + # Assert + assert result.permission == "all_team_members" + mock_db.commit.assert_called_once() + + # ==================== External Dataset Creation Tests ==================== + + def test_create_external_dataset_success(self, mock_dataset_service_dependencies): + """Test successful creation of external dataset.""" + # Arrange + tenant_id = str(uuid4()) + account = DatasetCreateTestDataFactory.create_account_mock(tenant_id=tenant_id) + name = "External Dataset" + external_api_id = "external-api-123" + external_knowledge_id = "external-knowledge-456" + + # Mock database query + mock_query = Mock() + mock_query.filter_by.return_value.first.return_value = None + mock_dataset_service_dependencies["db_session"].query.return_value = mock_query + + # Mock external knowledge API + external_api = DatasetCreateTestDataFactory.create_external_knowledge_api_mock(api_id=external_api_id) + mock_dataset_service_dependencies["external_service"].get_external_knowledge_api.return_value = external_api + + mock_db = mock_dataset_service_dependencies["db_session"] + mock_db.add = Mock() + mock_db.flush = Mock() + mock_db.commit = Mock() + + # Act + result = DatasetService.create_empty_dataset( + tenant_id=tenant_id, + name=name, + description=None, + indexing_technique=None, + account=account, + provider="external", + external_knowledge_api_id=external_api_id, + external_knowledge_id=external_knowledge_id, + ) + + # Assert + assert result.provider == "external" + assert mock_db.add.call_count == 2 # Dataset + ExternalKnowledgeBindings + mock_dataset_service_dependencies["external_service"].get_external_knowledge_api.assert_called_once_with( + external_api_id + ) + mock_db.commit.assert_called_once() + + def test_create_external_dataset_missing_api_id_error(self, mock_dataset_service_dependencies): + """Test error when external knowledge API is not found.""" + # Arrange + tenant_id = str(uuid4()) + account = DatasetCreateTestDataFactory.create_account_mock(tenant_id=tenant_id) + name = "External Dataset" + external_api_id = "non-existent-api" + + # Mock database query + mock_query = Mock() + mock_query.filter_by.return_value.first.return_value = None + mock_dataset_service_dependencies["db_session"].query.return_value = mock_query + + # Mock external knowledge API not found + mock_dataset_service_dependencies["external_service"].get_external_knowledge_api.return_value = None + + mock_db = mock_dataset_service_dependencies["db_session"] + mock_db.add = Mock() + mock_db.flush = Mock() + + # Act & Assert + with pytest.raises(ValueError, match="External API template not found"): + DatasetService.create_empty_dataset( + tenant_id=tenant_id, + name=name, + description=None, + indexing_technique=None, + account=account, + provider="external", + external_knowledge_api_id=external_api_id, + external_knowledge_id="knowledge-123", + ) + + def test_create_external_dataset_missing_knowledge_id_error(self, mock_dataset_service_dependencies): + """Test error when external knowledge ID is missing.""" + # Arrange + tenant_id = str(uuid4()) + account = DatasetCreateTestDataFactory.create_account_mock(tenant_id=tenant_id) + name = "External Dataset" + external_api_id = "external-api-123" + + # Mock database query + mock_query = Mock() + mock_query.filter_by.return_value.first.return_value = None + mock_dataset_service_dependencies["db_session"].query.return_value = mock_query + + # Mock external knowledge API + external_api = DatasetCreateTestDataFactory.create_external_knowledge_api_mock(api_id=external_api_id) + mock_dataset_service_dependencies["external_service"].get_external_knowledge_api.return_value = external_api + + mock_db = mock_dataset_service_dependencies["db_session"] + mock_db.add = Mock() + mock_db.flush = Mock() + + # Act & Assert + with pytest.raises(ValueError, match="external_knowledge_id is required"): + DatasetService.create_empty_dataset( + tenant_id=tenant_id, + name=name, + description=None, + indexing_technique=None, + account=account, + provider="external", + external_knowledge_api_id=external_api_id, + external_knowledge_id=None, + ) + + # ==================== Error Handling Tests ==================== + + def test_create_dataset_duplicate_name_error(self, mock_dataset_service_dependencies): + """Test error when dataset name already exists.""" + # Arrange + tenant_id = str(uuid4()) + account = DatasetCreateTestDataFactory.create_account_mock(tenant_id=tenant_id) + name = "Duplicate Dataset" + + # Mock database query to return existing dataset + existing_dataset = DatasetCreateTestDataFactory.create_dataset_mock(name=name) + mock_query = Mock() + mock_query.filter_by.return_value.first.return_value = existing_dataset + mock_dataset_service_dependencies["db_session"].query.return_value = mock_query + + # Act & Assert + with pytest.raises(DatasetNameDuplicateError, match=f"Dataset with name {name} already exists"): + DatasetService.create_empty_dataset( + tenant_id=tenant_id, + name=name, + description=None, + indexing_technique=None, + account=account, + ) + + +class TestDatasetServiceCreateEmptyRagPipelineDataset: + """ + Comprehensive unit tests for DatasetService.create_empty_rag_pipeline_dataset method. + + This test suite covers: + - RAG pipeline dataset creation with provided name + - RAG pipeline dataset creation with auto-generated name + - Pipeline creation + - Error conditions (duplicate names, missing current user) + """ + + @pytest.fixture + def mock_rag_pipeline_dependencies(self): + """Common mock setup for RAG pipeline dataset creation.""" + with ( + patch("services.dataset_service.db.session") as mock_db, + patch("services.dataset_service.current_user") as mock_current_user, + patch("services.dataset_service.generate_incremental_name") as mock_generate_name, + ): + # Configure mock_current_user to behave like a Flask-Login proxy + # Default: no user (falsy) + mock_current_user.id = None + yield { + "db_session": mock_db, + "current_user_mock": mock_current_user, + "generate_name": mock_generate_name, + } + + def test_create_rag_pipeline_dataset_with_name_success(self, mock_rag_pipeline_dependencies): + """Test successful creation of RAG pipeline dataset with provided name.""" + # Arrange + tenant_id = str(uuid4()) + user_id = str(uuid4()) + name = "RAG Pipeline Dataset" + description = "RAG Pipeline Description" + + # Mock current user - set up the mock to have id attribute accessible directly + mock_rag_pipeline_dependencies["current_user_mock"].id = user_id + + # Mock database query (no duplicate name) + mock_query = Mock() + mock_query.filter_by.return_value.first.return_value = None + mock_rag_pipeline_dependencies["db_session"].query.return_value = mock_query + + # Mock database operations + mock_db = mock_rag_pipeline_dependencies["db_session"] + mock_db.add = Mock() + mock_db.flush = Mock() + mock_db.commit = Mock() + + # Create entity + icon_info = IconInfo(icon="📙", icon_background="#FFF4ED", icon_type="emoji") + entity = RagPipelineDatasetCreateEntity( + name=name, + description=description, + icon_info=icon_info, + permission="only_me", + ) + + # Act + result = DatasetService.create_empty_rag_pipeline_dataset( + tenant_id=tenant_id, rag_pipeline_dataset_create_entity=entity + ) + + # Assert + assert result is not None + assert result.name == name + assert result.description == description + assert result.tenant_id == tenant_id + assert result.created_by == user_id + assert result.provider == "vendor" + assert result.runtime_mode == "rag_pipeline" + assert result.permission == "only_me" + assert mock_db.add.call_count == 2 # Pipeline + Dataset + mock_db.commit.assert_called_once() + + def test_create_rag_pipeline_dataset_with_auto_generated_name(self, mock_rag_pipeline_dependencies): + """Test creation of RAG pipeline dataset with auto-generated name.""" + # Arrange + tenant_id = str(uuid4()) + user_id = str(uuid4()) + auto_name = "Untitled 1" + + # Mock current user - set up the mock to have id attribute accessible directly + mock_rag_pipeline_dependencies["current_user_mock"].id = user_id + + # Mock database query (empty name, need to generate) + mock_query = Mock() + mock_query.filter_by.return_value.all.return_value = [] + mock_rag_pipeline_dependencies["db_session"].query.return_value = mock_query + + # Mock name generation + mock_rag_pipeline_dependencies["generate_name"].return_value = auto_name + + # Mock database operations + mock_db = mock_rag_pipeline_dependencies["db_session"] + mock_db.add = Mock() + mock_db.flush = Mock() + mock_db.commit = Mock() + + # Create entity with empty name + icon_info = IconInfo(icon="📙", icon_background="#FFF4ED", icon_type="emoji") + entity = RagPipelineDatasetCreateEntity( + name="", + description="", + icon_info=icon_info, + permission="only_me", + ) + + # Act + result = DatasetService.create_empty_rag_pipeline_dataset( + tenant_id=tenant_id, rag_pipeline_dataset_create_entity=entity + ) + + # Assert + assert result.name == auto_name + mock_rag_pipeline_dependencies["generate_name"].assert_called_once() + mock_db.commit.assert_called_once() + + def test_create_rag_pipeline_dataset_duplicate_name_error(self, mock_rag_pipeline_dependencies): + """Test error when RAG pipeline dataset name already exists.""" + # Arrange + tenant_id = str(uuid4()) + user_id = str(uuid4()) + name = "Duplicate RAG Dataset" + + # Mock current user - set up the mock to have id attribute accessible directly + mock_rag_pipeline_dependencies["current_user_mock"].id = user_id + + # Mock database query to return existing dataset + existing_dataset = DatasetCreateTestDataFactory.create_dataset_mock(name=name) + mock_query = Mock() + mock_query.filter_by.return_value.first.return_value = existing_dataset + mock_rag_pipeline_dependencies["db_session"].query.return_value = mock_query + + # Create entity + icon_info = IconInfo(icon="📙", icon_background="#FFF4ED", icon_type="emoji") + entity = RagPipelineDatasetCreateEntity( + name=name, + description="", + icon_info=icon_info, + permission="only_me", + ) + + # Act & Assert + with pytest.raises(DatasetNameDuplicateError, match=f"Dataset with name {name} already exists"): + DatasetService.create_empty_rag_pipeline_dataset( + tenant_id=tenant_id, rag_pipeline_dataset_create_entity=entity + ) + + def test_create_rag_pipeline_dataset_missing_current_user_error(self, mock_rag_pipeline_dependencies): + """Test error when current user is not available.""" + # Arrange + tenant_id = str(uuid4()) + + # Mock current user as None - set id to None so the check fails + mock_rag_pipeline_dependencies["current_user_mock"].id = None + + # Mock database query + mock_query = Mock() + mock_query.filter_by.return_value.first.return_value = None + mock_rag_pipeline_dependencies["db_session"].query.return_value = mock_query + + # Create entity + icon_info = IconInfo(icon="📙", icon_background="#FFF4ED", icon_type="emoji") + entity = RagPipelineDatasetCreateEntity( + name="Test Dataset", + description="", + icon_info=icon_info, + permission="only_me", + ) + + # Act & Assert + with pytest.raises(ValueError, match="Current user or current user id not found"): + DatasetService.create_empty_rag_pipeline_dataset( + tenant_id=tenant_id, rag_pipeline_dataset_create_entity=entity + ) + + def test_create_rag_pipeline_dataset_with_custom_permission(self, mock_rag_pipeline_dependencies): + """Test creation with custom permission setting.""" + # Arrange + tenant_id = str(uuid4()) + user_id = str(uuid4()) + name = "Custom Permission RAG Dataset" + + # Mock current user - set up the mock to have id attribute accessible directly + mock_rag_pipeline_dependencies["current_user_mock"].id = user_id + + # Mock database query + mock_query = Mock() + mock_query.filter_by.return_value.first.return_value = None + mock_rag_pipeline_dependencies["db_session"].query.return_value = mock_query + + # Mock database operations + mock_db = mock_rag_pipeline_dependencies["db_session"] + mock_db.add = Mock() + mock_db.flush = Mock() + mock_db.commit = Mock() + + # Create entity + icon_info = IconInfo(icon="📙", icon_background="#FFF4ED", icon_type="emoji") + entity = RagPipelineDatasetCreateEntity( + name=name, + description="", + icon_info=icon_info, + permission="all_team", + ) + + # Act + result = DatasetService.create_empty_rag_pipeline_dataset( + tenant_id=tenant_id, rag_pipeline_dataset_create_entity=entity + ) + + # Assert + assert result.permission == "all_team" + mock_db.commit.assert_called_once() + + def test_create_rag_pipeline_dataset_with_icon_info(self, mock_rag_pipeline_dependencies): + """Test creation with icon info configuration.""" + # Arrange + tenant_id = str(uuid4()) + user_id = str(uuid4()) + name = "Icon Info RAG Dataset" + + # Mock current user - set up the mock to have id attribute accessible directly + mock_rag_pipeline_dependencies["current_user_mock"].id = user_id + + # Mock database query + mock_query = Mock() + mock_query.filter_by.return_value.first.return_value = None + mock_rag_pipeline_dependencies["db_session"].query.return_value = mock_query + + # Mock database operations + mock_db = mock_rag_pipeline_dependencies["db_session"] + mock_db.add = Mock() + mock_db.flush = Mock() + mock_db.commit = Mock() + + # Create entity with icon info + icon_info = IconInfo( + icon="📚", + icon_background="#E8F5E9", + icon_type="emoji", + icon_url="https://example.com/icon.png", + ) + entity = RagPipelineDatasetCreateEntity( + name=name, + description="", + icon_info=icon_info, + permission="only_me", + ) + + # Act + result = DatasetService.create_empty_rag_pipeline_dataset( + tenant_id=tenant_id, rag_pipeline_dataset_create_entity=entity + ) + + # Assert + assert result.icon_info == icon_info.model_dump() + mock_db.commit.assert_called_once() From c6e6f3b7cbbe3a950157a8f776536607fcd9760d Mon Sep 17 00:00:00 2001 From: Yuki Watanabe <31463517+B-Step62@users.noreply.github.com> Date: Sat, 22 Nov 2025 14:53:58 +0900 Subject: [PATCH 02/22] feat: MLflow tracing (#26093) Signed-off-by: B-Step62 Co-authored-by: Asuka Minato Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com> Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com> --- api/core/ops/entities/config_entity.py | 46 +- api/core/ops/mlflow_trace/__init__.py | 0 api/core/ops/mlflow_trace/mlflow_trace.py | 549 ++++++++++++++++++ api/core/ops/ops_trace_manager.py | 20 + api/core/ops/utils.py | 11 + api/pyproject.toml | 1 + api/services/ops_service.py | 20 +- api/uv.lock | 64 ++ .../[appId]/overview/tracing/config-popup.tsx | 58 +- .../[appId]/overview/tracing/config.ts | 2 + .../[appId]/overview/tracing/panel.tsx | 30 +- .../tracing/provider-config-modal.tsx | 118 +++- .../overview/tracing/provider-panel.tsx | 4 +- .../[appId]/overview/tracing/type.ts | 17 + .../public/tracing/databricks-icon-big.svg | 17 + .../assets/public/tracing/databricks-icon.svg | 17 + .../assets/public/tracing/mlflow-icon-big.svg | 14 + .../assets/public/tracing/mlflow-icon.svg | 14 + .../src/public/tracing/DatabricksIcon.json | 135 +++++ .../src/public/tracing/DatabricksIcon.tsx | 20 + .../src/public/tracing/DatabricksIconBig.json | 135 +++++ .../src/public/tracing/DatabricksIconBig.tsx | 20 + .../icons/src/public/tracing/MlflowIcon.json | 108 ++++ .../icons/src/public/tracing/MlflowIcon.tsx | 20 + .../src/public/tracing/MlflowIconBig.json | 108 ++++ .../src/public/tracing/MlflowIconBig.tsx | 20 + .../base/icons/src/public/tracing/index.ts | 4 + web/i18n/de-DE/app.ts | 8 + web/i18n/en-US/app.ts | 16 + web/i18n/es-ES/app.ts | 8 + web/i18n/fa-IR/app.ts | 8 + web/i18n/fr-FR/app.ts | 8 + web/i18n/hi-IN/app.ts | 8 + web/i18n/it-IT/app.ts | 8 + web/i18n/ja-JP/app.ts | 18 +- web/i18n/ko-KR/app.ts | 8 + web/i18n/pl-PL/app.ts | 8 + web/i18n/pt-BR/app.ts | 8 + web/i18n/ro-RO/app.ts | 8 + web/i18n/ru-RU/app.ts | 8 + web/i18n/sl-SI/app.ts | 8 + web/i18n/th-TH/app.ts | 8 + web/i18n/tr-TR/app.ts | 8 + web/i18n/uk-UA/app.ts | 8 + web/i18n/vi-VN/app.ts | 8 + web/i18n/zh-Hans/app.ts | 8 + web/i18n/zh-Hant/app.ts | 8 + web/models/app.ts | 4 +- 48 files changed, 1737 insertions(+), 17 deletions(-) create mode 100644 api/core/ops/mlflow_trace/__init__.py create mode 100644 api/core/ops/mlflow_trace/mlflow_trace.py create mode 100644 web/app/components/base/icons/assets/public/tracing/databricks-icon-big.svg create mode 100644 web/app/components/base/icons/assets/public/tracing/databricks-icon.svg create mode 100644 web/app/components/base/icons/assets/public/tracing/mlflow-icon-big.svg create mode 100644 web/app/components/base/icons/assets/public/tracing/mlflow-icon.svg create mode 100644 web/app/components/base/icons/src/public/tracing/DatabricksIcon.json create mode 100644 web/app/components/base/icons/src/public/tracing/DatabricksIcon.tsx create mode 100644 web/app/components/base/icons/src/public/tracing/DatabricksIconBig.json create mode 100644 web/app/components/base/icons/src/public/tracing/DatabricksIconBig.tsx create mode 100644 web/app/components/base/icons/src/public/tracing/MlflowIcon.json create mode 100644 web/app/components/base/icons/src/public/tracing/MlflowIcon.tsx create mode 100644 web/app/components/base/icons/src/public/tracing/MlflowIconBig.json create mode 100644 web/app/components/base/icons/src/public/tracing/MlflowIconBig.tsx diff --git a/api/core/ops/entities/config_entity.py b/api/core/ops/entities/config_entity.py index f9b8d41e0a..fda00ac3b9 100644 --- a/api/core/ops/entities/config_entity.py +++ b/api/core/ops/entities/config_entity.py @@ -2,7 +2,7 @@ from enum import StrEnum from pydantic import BaseModel, ValidationInfo, field_validator -from core.ops.utils import validate_project_name, validate_url, validate_url_with_path +from core.ops.utils import validate_integer_id, validate_project_name, validate_url, validate_url_with_path class TracingProviderEnum(StrEnum): @@ -13,6 +13,8 @@ class TracingProviderEnum(StrEnum): OPIK = "opik" WEAVE = "weave" ALIYUN = "aliyun" + MLFLOW = "mlflow" + DATABRICKS = "databricks" TENCENT = "tencent" @@ -223,5 +225,47 @@ class TencentConfig(BaseTracingConfig): return cls.validate_project_field(v, "dify_app") +class MLflowConfig(BaseTracingConfig): + """ + Model class for MLflow tracing config. + """ + + tracking_uri: str = "http://localhost:5000" + experiment_id: str = "0" # Default experiment id in MLflow is 0 + username: str | None = None + password: str | None = None + + @field_validator("tracking_uri") + @classmethod + def tracking_uri_validator(cls, v, info: ValidationInfo): + if isinstance(v, str) and v.startswith("databricks"): + raise ValueError( + "Please use Databricks tracing config below to record traces to Databricks-managed MLflow instances." + ) + return validate_url_with_path(v, "http://localhost:5000") + + @field_validator("experiment_id") + @classmethod + def experiment_id_validator(cls, v, info: ValidationInfo): + return validate_integer_id(v) + + +class DatabricksConfig(BaseTracingConfig): + """ + Model class for Databricks (Databricks-managed MLflow) tracing config. + """ + + experiment_id: str + host: str + client_id: str | None = None + client_secret: str | None = None + personal_access_token: str | None = None + + @field_validator("experiment_id") + @classmethod + def experiment_id_validator(cls, v, info: ValidationInfo): + return validate_integer_id(v) + + OPS_FILE_PATH = "ops_trace/" OPS_TRACE_FAILED_KEY = "FAILED_OPS_TRACE" diff --git a/api/core/ops/mlflow_trace/__init__.py b/api/core/ops/mlflow_trace/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/core/ops/mlflow_trace/mlflow_trace.py b/api/core/ops/mlflow_trace/mlflow_trace.py new file mode 100644 index 0000000000..df6e016632 --- /dev/null +++ b/api/core/ops/mlflow_trace/mlflow_trace.py @@ -0,0 +1,549 @@ +import json +import logging +import os +from datetime import datetime, timedelta +from typing import Any, cast + +import mlflow +from mlflow.entities import Document, Span, SpanEvent, SpanStatusCode, SpanType +from mlflow.tracing.constant import SpanAttributeKey, TokenUsageKey, TraceMetadataKey +from mlflow.tracing.fluent import start_span_no_context, update_current_trace +from mlflow.tracing.provider import detach_span_from_context, set_span_in_context + +from core.ops.base_trace_instance import BaseTraceInstance +from core.ops.entities.config_entity import DatabricksConfig, MLflowConfig +from core.ops.entities.trace_entity import ( + BaseTraceInfo, + DatasetRetrievalTraceInfo, + GenerateNameTraceInfo, + MessageTraceInfo, + ModerationTraceInfo, + SuggestedQuestionTraceInfo, + ToolTraceInfo, + TraceTaskName, + WorkflowTraceInfo, +) +from core.workflow.enums import NodeType +from extensions.ext_database import db +from models import EndUser +from models.workflow import WorkflowNodeExecutionModel + +logger = logging.getLogger(__name__) + + +def datetime_to_nanoseconds(dt: datetime | None) -> int | None: + """Convert datetime to nanosecond timestamp for MLflow API""" + if dt is None: + return None + return int(dt.timestamp() * 1_000_000_000) + + +class MLflowDataTrace(BaseTraceInstance): + def __init__(self, config: MLflowConfig | DatabricksConfig): + super().__init__(config) + if isinstance(config, DatabricksConfig): + self._setup_databricks(config) + else: + self._setup_mlflow(config) + + # Enable async logging to minimize performance overhead + os.environ["MLFLOW_ENABLE_ASYNC_TRACE_LOGGING"] = "true" + + def _setup_databricks(self, config: DatabricksConfig): + """Setup connection to Databricks-managed MLflow instances""" + os.environ["DATABRICKS_HOST"] = config.host + + if config.client_id and config.client_secret: + # OAuth: https://docs.databricks.com/aws/en/dev-tools/auth/oauth-m2m?language=Environment + os.environ["DATABRICKS_CLIENT_ID"] = config.client_id + os.environ["DATABRICKS_CLIENT_SECRET"] = config.client_secret + elif config.personal_access_token: + # PAT: https://docs.databricks.com/aws/en/dev-tools/auth/pat + os.environ["DATABRICKS_TOKEN"] = config.personal_access_token + else: + raise ValueError( + "Either Databricks token (PAT) or client id and secret (OAuth) must be provided" + "See https://docs.databricks.com/aws/en/dev-tools/auth/#what-authorization-option-should-i-choose " + "for more information about the authorization options." + ) + mlflow.set_tracking_uri("databricks") + mlflow.set_experiment(experiment_id=config.experiment_id) + + # Remove trailing slash from host + config.host = config.host.rstrip("/") + self._project_url = f"{config.host}/ml/experiments/{config.experiment_id}/traces" + + def _setup_mlflow(self, config: MLflowConfig): + """Setup connection to MLflow instances""" + mlflow.set_tracking_uri(config.tracking_uri) + mlflow.set_experiment(experiment_id=config.experiment_id) + + # Simple auth if provided + if config.username and config.password: + os.environ["MLFLOW_TRACKING_USERNAME"] = config.username + os.environ["MLFLOW_TRACKING_PASSWORD"] = config.password + + self._project_url = f"{config.tracking_uri}/#/experiments/{config.experiment_id}/traces" + + def trace(self, trace_info: BaseTraceInfo): + """Simple dispatch to trace methods""" + try: + if isinstance(trace_info, WorkflowTraceInfo): + self.workflow_trace(trace_info) + elif isinstance(trace_info, MessageTraceInfo): + self.message_trace(trace_info) + elif isinstance(trace_info, ToolTraceInfo): + self.tool_trace(trace_info) + elif isinstance(trace_info, ModerationTraceInfo): + self.moderation_trace(trace_info) + elif isinstance(trace_info, DatasetRetrievalTraceInfo): + self.dataset_retrieval_trace(trace_info) + elif isinstance(trace_info, SuggestedQuestionTraceInfo): + self.suggested_question_trace(trace_info) + elif isinstance(trace_info, GenerateNameTraceInfo): + self.generate_name_trace(trace_info) + except Exception: + logger.exception("[MLflow] Trace error") + raise + + def workflow_trace(self, trace_info: WorkflowTraceInfo): + """Create workflow span as root, with node spans as children""" + # fields with sys.xyz is added by Dify, they are duplicate to trace_info.metadata + raw_inputs = trace_info.workflow_run_inputs or {} + workflow_inputs = {k: v for k, v in raw_inputs.items() if not k.startswith("sys.")} + + # Special inputs propagated by system + if trace_info.query: + workflow_inputs["query"] = trace_info.query + + workflow_span = start_span_no_context( + name=TraceTaskName.WORKFLOW_TRACE.value, + span_type=SpanType.CHAIN, + inputs=workflow_inputs, + attributes=trace_info.metadata, + start_time_ns=datetime_to_nanoseconds(trace_info.start_time), + ) + + # Set reserved fields in trace-level metadata + trace_metadata = {} + if user_id := trace_info.metadata.get("user_id"): + trace_metadata[TraceMetadataKey.TRACE_USER] = user_id + if session_id := trace_info.conversation_id: + trace_metadata[TraceMetadataKey.TRACE_SESSION] = session_id + self._set_trace_metadata(workflow_span, trace_metadata) + + try: + # Create child spans for workflow nodes + for node in self._get_workflow_nodes(trace_info.workflow_run_id): + inputs = None + attributes = { + "node_id": node.id, + "node_type": node.node_type, + "status": node.status, + "tenant_id": node.tenant_id, + "app_id": node.app_id, + "app_name": node.title, + } + + if node.node_type in (NodeType.LLM, NodeType.QUESTION_CLASSIFIER): + inputs, llm_attributes = self._parse_llm_inputs_and_attributes(node) + attributes.update(llm_attributes) + elif node.node_type == NodeType.HTTP_REQUEST: + inputs = node.process_data # contains request URL + + if not inputs: + inputs = json.loads(node.inputs) if node.inputs else {} + + node_span = start_span_no_context( + name=node.title, + span_type=self._get_node_span_type(node.node_type), + parent_span=workflow_span, + inputs=inputs, + attributes=attributes, + start_time_ns=datetime_to_nanoseconds(node.created_at), + ) + + # Handle node errors + if node.status != "succeeded": + node_span.set_status(SpanStatusCode.ERROR) + node_span.add_event( + SpanEvent( # type: ignore[abstract] + name="exception", + attributes={ + "exception.message": f"Node failed with status: {node.status}", + "exception.type": "Error", + "exception.stacktrace": f"Node failed with status: {node.status}", + }, + ) + ) + + # End node span + finished_at = node.created_at + timedelta(seconds=node.elapsed_time) + outputs = json.loads(node.outputs) if node.outputs else {} + if node.node_type == NodeType.KNOWLEDGE_RETRIEVAL: + outputs = self._parse_knowledge_retrieval_outputs(outputs) + elif node.node_type == NodeType.LLM: + outputs = outputs.get("text", outputs) + node_span.end( + outputs=outputs, + end_time_ns=datetime_to_nanoseconds(finished_at), + ) + + # Handle workflow-level errors + if trace_info.error: + workflow_span.set_status(SpanStatusCode.ERROR) + workflow_span.add_event( + SpanEvent( # type: ignore[abstract] + name="exception", + attributes={ + "exception.message": trace_info.error, + "exception.type": "Error", + "exception.stacktrace": trace_info.error, + }, + ) + ) + + finally: + workflow_span.end( + outputs=trace_info.workflow_run_outputs, + end_time_ns=datetime_to_nanoseconds(trace_info.end_time), + ) + + def _parse_llm_inputs_and_attributes(self, node: WorkflowNodeExecutionModel) -> tuple[Any, dict]: + """Parse LLM inputs and attributes from LLM workflow node""" + if node.process_data is None: + return {}, {} + + try: + data = json.loads(node.process_data) + except (json.JSONDecodeError, TypeError): + return {}, {} + + inputs = self._parse_prompts(data.get("prompts")) + attributes = { + "model_name": data.get("model_name"), + "model_provider": data.get("model_provider"), + "finish_reason": data.get("finish_reason"), + } + + if hasattr(SpanAttributeKey, "MESSAGE_FORMAT"): + attributes[SpanAttributeKey.MESSAGE_FORMAT] = "dify" + + if usage := data.get("usage"): + # Set reserved token usage attributes + attributes[SpanAttributeKey.CHAT_USAGE] = { + TokenUsageKey.INPUT_TOKENS: usage.get("prompt_tokens", 0), + TokenUsageKey.OUTPUT_TOKENS: usage.get("completion_tokens", 0), + TokenUsageKey.TOTAL_TOKENS: usage.get("total_tokens", 0), + } + # Store raw usage data as well as it includes more data like price + attributes["usage"] = usage + + return inputs, attributes + + def _parse_knowledge_retrieval_outputs(self, outputs: dict): + """Parse KR outputs and attributes from KR workflow node""" + retrieved = outputs.get("result", []) + + if not retrieved or not isinstance(retrieved, list): + return outputs + + documents = [] + for item in retrieved: + documents.append(Document(page_content=item.get("content", ""), metadata=item.get("metadata", {}))) + return documents + + def message_trace(self, trace_info: MessageTraceInfo): + """Create span for CHATBOT message processing""" + if not trace_info.message_data: + return + + file_list = cast(list[str], trace_info.file_list) or [] + if message_file_data := trace_info.message_file_data: + base_url = os.getenv("FILES_URL", "http://127.0.0.1:5001") + file_list.append(f"{base_url}/{message_file_data.url}") + + span = start_span_no_context( + name=TraceTaskName.MESSAGE_TRACE.value, + span_type=SpanType.LLM, + inputs=self._parse_prompts(trace_info.inputs), # type: ignore[arg-type] + attributes={ + "message_id": trace_info.message_id, # type: ignore[dict-item] + "model_provider": trace_info.message_data.model_provider, + "model_id": trace_info.message_data.model_id, + "conversation_mode": trace_info.conversation_mode, + "file_list": file_list, # type: ignore[dict-item] + "total_price": trace_info.message_data.total_price, + **trace_info.metadata, + }, + start_time_ns=datetime_to_nanoseconds(trace_info.start_time), + ) + + if hasattr(SpanAttributeKey, "MESSAGE_FORMAT"): + span.set_attribute(SpanAttributeKey.MESSAGE_FORMAT, "dify") + + # Set token usage + span.set_attribute( + SpanAttributeKey.CHAT_USAGE, + { + TokenUsageKey.INPUT_TOKENS: trace_info.message_tokens or 0, + TokenUsageKey.OUTPUT_TOKENS: trace_info.answer_tokens or 0, + TokenUsageKey.TOTAL_TOKENS: trace_info.total_tokens or 0, + }, + ) + + # Set reserved fields in trace-level metadata + trace_metadata = {} + if user_id := self._get_message_user_id(trace_info.metadata): + trace_metadata[TraceMetadataKey.TRACE_USER] = user_id + if session_id := trace_info.metadata.get("conversation_id"): + trace_metadata[TraceMetadataKey.TRACE_SESSION] = session_id + self._set_trace_metadata(span, trace_metadata) + + if trace_info.error: + span.set_status(SpanStatusCode.ERROR) + span.add_event( + SpanEvent( # type: ignore[abstract] + name="error", + attributes={ + "exception.message": trace_info.error, + "exception.type": "Error", + "exception.stacktrace": trace_info.error, + }, + ) + ) + + span.end( + outputs=trace_info.message_data.answer, + end_time_ns=datetime_to_nanoseconds(trace_info.end_time), + ) + + def _get_message_user_id(self, metadata: dict) -> str | None: + if (end_user_id := metadata.get("from_end_user_id")) and ( + end_user_data := db.session.query(EndUser).where(EndUser.id == end_user_id).first() + ): + return end_user_data.session_id + + return metadata.get("from_account_id") # type: ignore[return-value] + + def tool_trace(self, trace_info: ToolTraceInfo): + span = start_span_no_context( + name=trace_info.tool_name, + span_type=SpanType.TOOL, + inputs=trace_info.tool_inputs, # type: ignore[arg-type] + attributes={ + "message_id": trace_info.message_id, # type: ignore[dict-item] + "metadata": trace_info.metadata, # type: ignore[dict-item] + "tool_config": trace_info.tool_config, # type: ignore[dict-item] + "tool_parameters": trace_info.tool_parameters, # type: ignore[dict-item] + }, + start_time_ns=datetime_to_nanoseconds(trace_info.start_time), + ) + + # Handle tool errors + if trace_info.error: + span.set_status(SpanStatusCode.ERROR) + span.add_event( + SpanEvent( # type: ignore[abstract] + name="error", + attributes={ + "exception.message": trace_info.error, + "exception.type": "Error", + "exception.stacktrace": trace_info.error, + }, + ) + ) + + span.end( + outputs=trace_info.tool_outputs, + end_time_ns=datetime_to_nanoseconds(trace_info.end_time), + ) + + def moderation_trace(self, trace_info: ModerationTraceInfo): + if trace_info.message_data is None: + return + + start_time = trace_info.start_time or trace_info.message_data.created_at + span = start_span_no_context( + name=TraceTaskName.MODERATION_TRACE.value, + span_type=SpanType.TOOL, + inputs=trace_info.inputs or {}, + attributes={ + "message_id": trace_info.message_id, # type: ignore[dict-item] + "metadata": trace_info.metadata, # type: ignore[dict-item] + }, + start_time_ns=datetime_to_nanoseconds(start_time), + ) + + span.end( + outputs={ + "action": trace_info.action, + "flagged": trace_info.flagged, + "preset_response": trace_info.preset_response, + }, + end_time_ns=datetime_to_nanoseconds(trace_info.end_time), + ) + + def dataset_retrieval_trace(self, trace_info: DatasetRetrievalTraceInfo): + if trace_info.message_data is None: + return + + span = start_span_no_context( + name=TraceTaskName.DATASET_RETRIEVAL_TRACE.value, + span_type=SpanType.RETRIEVER, + inputs=trace_info.inputs, + attributes={ + "message_id": trace_info.message_id, # type: ignore[dict-item] + "metadata": trace_info.metadata, # type: ignore[dict-item] + }, + start_time_ns=datetime_to_nanoseconds(trace_info.start_time), + ) + span.end(outputs={"documents": trace_info.documents}, end_time_ns=datetime_to_nanoseconds(trace_info.end_time)) + + def suggested_question_trace(self, trace_info: SuggestedQuestionTraceInfo): + if trace_info.message_data is None: + return + + start_time = trace_info.start_time or trace_info.message_data.created_at + end_time = trace_info.end_time or trace_info.message_data.updated_at + + span = start_span_no_context( + name=TraceTaskName.SUGGESTED_QUESTION_TRACE.value, + span_type=SpanType.TOOL, + inputs=trace_info.inputs, + attributes={ + "message_id": trace_info.message_id, # type: ignore[dict-item] + "model_provider": trace_info.model_provider, # type: ignore[dict-item] + "model_id": trace_info.model_id, # type: ignore[dict-item] + "total_tokens": trace_info.total_tokens or 0, # type: ignore[dict-item] + }, + start_time_ns=datetime_to_nanoseconds(start_time), + ) + + if trace_info.error: + span.set_status(SpanStatusCode.ERROR) + span.add_event( + SpanEvent( # type: ignore[abstract] + name="error", + attributes={ + "exception.message": trace_info.error, + "exception.type": "Error", + "exception.stacktrace": trace_info.error, + }, + ) + ) + + span.end(outputs=trace_info.suggested_question, end_time_ns=datetime_to_nanoseconds(end_time)) + + def generate_name_trace(self, trace_info: GenerateNameTraceInfo): + span = start_span_no_context( + name=TraceTaskName.GENERATE_NAME_TRACE.value, + span_type=SpanType.CHAIN, + inputs=trace_info.inputs, + attributes={"message_id": trace_info.message_id}, # type: ignore[dict-item] + start_time_ns=datetime_to_nanoseconds(trace_info.start_time), + ) + span.end(outputs=trace_info.outputs, end_time_ns=datetime_to_nanoseconds(trace_info.end_time)) + + def _get_workflow_nodes(self, workflow_run_id: str): + """Helper method to get workflow nodes""" + workflow_nodes = ( + db.session.query( + WorkflowNodeExecutionModel.id, + WorkflowNodeExecutionModel.tenant_id, + WorkflowNodeExecutionModel.app_id, + WorkflowNodeExecutionModel.title, + WorkflowNodeExecutionModel.node_type, + WorkflowNodeExecutionModel.status, + WorkflowNodeExecutionModel.inputs, + WorkflowNodeExecutionModel.outputs, + WorkflowNodeExecutionModel.created_at, + WorkflowNodeExecutionModel.elapsed_time, + WorkflowNodeExecutionModel.process_data, + WorkflowNodeExecutionModel.execution_metadata, + ) + .filter(WorkflowNodeExecutionModel.workflow_run_id == workflow_run_id) + .order_by(WorkflowNodeExecutionModel.created_at) + .all() + ) + return workflow_nodes + + def _get_node_span_type(self, node_type: str) -> str: + """Map Dify node types to MLflow span types""" + node_type_mapping = { + NodeType.LLM: SpanType.LLM, + NodeType.QUESTION_CLASSIFIER: SpanType.LLM, + NodeType.KNOWLEDGE_RETRIEVAL: SpanType.RETRIEVER, + NodeType.TOOL: SpanType.TOOL, + NodeType.CODE: SpanType.TOOL, + NodeType.HTTP_REQUEST: SpanType.TOOL, + NodeType.AGENT: SpanType.AGENT, + } + return node_type_mapping.get(node_type, "CHAIN") # type: ignore[arg-type,call-overload] + + def _set_trace_metadata(self, span: Span, metadata: dict): + token = None + try: + # NB: Set span in context such that we can use update_current_trace() API + token = set_span_in_context(span) + update_current_trace(metadata=metadata) + finally: + if token: + detach_span_from_context(token) + + def _parse_prompts(self, prompts): + """Postprocess prompts format to be standard chat messages""" + if isinstance(prompts, str): + return prompts + elif isinstance(prompts, dict): + return self._parse_single_message(prompts) + elif isinstance(prompts, list): + messages = [self._parse_single_message(item) for item in prompts] + messages = self._resolve_tool_call_ids(messages) + return messages + return prompts # Fallback to original format + + def _parse_single_message(self, item: dict): + """Postprocess single message format to be standard chat message""" + role = item.get("role", "user") + msg = {"role": role, "content": item.get("text", "")} + + if ( + (tool_calls := item.get("tool_calls")) + # Tool message does not contain tool calls normally + and role != "tool" + ): + msg["tool_calls"] = tool_calls + + if files := item.get("files"): + msg["files"] = files + + return msg + + def _resolve_tool_call_ids(self, messages: list[dict]): + """ + The tool call message from Dify does not contain tool call ids, which is not + ideal for debugging. This method resolves the tool call ids by matching the + tool call name and parameters with the tool instruction messages. + """ + tool_call_ids = [] + for msg in messages: + if tool_calls := msg.get("tool_calls"): + tool_call_ids = [t["id"] for t in tool_calls] + if msg["role"] == "tool": + # Get the tool call id in the order of the tool call messages + # assuming Dify runs tools sequentially + if tool_call_ids: + msg["tool_call_id"] = tool_call_ids.pop(0) + return messages + + def api_check(self): + """Simple connection test""" + try: + mlflow.search_experiments(max_results=1) + return True + except Exception as e: + raise ValueError(f"MLflow connection failed: {str(e)}") + + def get_project_url(self): + return self._project_url diff --git a/api/core/ops/ops_trace_manager.py b/api/core/ops/ops_trace_manager.py index e8ba2d7aab..ce2b0239cd 100644 --- a/api/core/ops/ops_trace_manager.py +++ b/api/core/ops/ops_trace_manager.py @@ -120,6 +120,26 @@ class OpsTraceProviderConfigMap(collections.UserDict[str, dict[str, Any]]): "other_keys": ["endpoint", "app_name"], "trace_instance": AliyunDataTrace, } + case TracingProviderEnum.MLFLOW: + from core.ops.entities.config_entity import MLflowConfig + from core.ops.mlflow_trace.mlflow_trace import MLflowDataTrace + + return { + "config_class": MLflowConfig, + "secret_keys": ["password"], + "other_keys": ["tracking_uri", "experiment_id", "username"], + "trace_instance": MLflowDataTrace, + } + case TracingProviderEnum.DATABRICKS: + from core.ops.entities.config_entity import DatabricksConfig + from core.ops.mlflow_trace.mlflow_trace import MLflowDataTrace + + return { + "config_class": DatabricksConfig, + "secret_keys": ["personal_access_token", "client_secret"], + "other_keys": ["host", "client_id", "experiment_id"], + "trace_instance": MLflowDataTrace, + } case TracingProviderEnum.TENCENT: from core.ops.entities.config_entity import TencentConfig diff --git a/api/core/ops/utils.py b/api/core/ops/utils.py index 5e8651d6f9..c00f785034 100644 --- a/api/core/ops/utils.py +++ b/api/core/ops/utils.py @@ -147,3 +147,14 @@ def validate_project_name(project: str, default_name: str) -> str: return default_name return project.strip() + + +def validate_integer_id(id_str: str) -> str: + """ + Validate and normalize integer ID + """ + id_str = id_str.strip() + if not id_str.isdigit(): + raise ValueError("ID must be a valid integer") + + return id_str diff --git a/api/pyproject.toml b/api/pyproject.toml index 1cf7d719ea..98813ef42c 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -34,6 +34,7 @@ dependencies = [ "langfuse~=2.51.3", "langsmith~=0.1.77", "markdown~=3.5.1", + "mlflow-skinny>=3.0.0", "numpy~=1.26.4", "openpyxl~=3.1.5", "opik~=1.8.72", diff --git a/api/services/ops_service.py b/api/services/ops_service.py index a2c8e9118e..50ea832085 100644 --- a/api/services/ops_service.py +++ b/api/services/ops_service.py @@ -113,6 +113,24 @@ class OpsService: except Exception: new_decrypt_tracing_config.update({"project_url": "https://console.cloud.tencent.com/apm"}) + if tracing_provider == "mlflow" and ( + "project_url" not in decrypt_tracing_config or not decrypt_tracing_config.get("project_url") + ): + try: + project_url = OpsTraceManager.get_trace_config_project_url(decrypt_tracing_config, tracing_provider) + new_decrypt_tracing_config.update({"project_url": project_url}) + except Exception: + new_decrypt_tracing_config.update({"project_url": "http://localhost:5000/"}) + + if tracing_provider == "databricks" and ( + "project_url" not in decrypt_tracing_config or not decrypt_tracing_config.get("project_url") + ): + try: + project_url = OpsTraceManager.get_trace_config_project_url(decrypt_tracing_config, tracing_provider) + new_decrypt_tracing_config.update({"project_url": project_url}) + except Exception: + new_decrypt_tracing_config.update({"project_url": "https://www.databricks.com/"}) + trace_config_data.tracing_config = new_decrypt_tracing_config return trace_config_data.to_dict() @@ -155,7 +173,7 @@ class OpsService: project_url = f"{tracing_config.get('host')}/project/{project_key}" except Exception: project_url = None - elif tracing_provider in ("langsmith", "opik", "tencent"): + elif tracing_provider in ("langsmith", "opik", "mlflow", "databricks", "tencent"): try: project_url = OpsTraceManager.get_trace_config_project_url(tracing_config, tracing_provider) except Exception: diff --git a/api/uv.lock b/api/uv.lock index 6300adae61..dab6bc5787 100644 --- a/api/uv.lock +++ b/api/uv.lock @@ -1055,6 +1055,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/23/38/749c708619f402d4d582dfa73fbeb64ade77b1f250a93bd064d2a1aa3776/clickzetta_connector_python-0.8.106-py3-none-any.whl", hash = "sha256:120d6700051d97609dbd6655c002ab3bc260b7c8e67d39dfc7191e749563f7b4", size = 78121, upload-time = "2025-10-29T02:38:15.014Z" }, ] +[[package]] +name = "cloudpickle" +version = "3.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/27/fb/576f067976d320f5f0114a8d9fa1215425441bb35627b1993e5afd8111e5/cloudpickle-3.1.2.tar.gz", hash = "sha256:7fda9eb655c9c230dab534f1983763de5835249750e85fbcef43aaa30a9a2414", size = 22330, upload-time = "2025-11-03T09:25:26.604Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/39/799be3f2f0f38cc727ee3b4f1445fe6d5e4133064ec2e4115069418a5bb6/cloudpickle-3.1.2-py3-none-any.whl", hash = "sha256:9acb47f6afd73f60dc1df93bb801b472f05ff42fa6c84167d25cb206be1fbf4a", size = 22228, upload-time = "2025-11-03T09:25:25.534Z" }, +] + [[package]] name = "cloudscraper" version = "1.2.71" @@ -1255,6 +1264,20 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/0d/c3/e90f4a4feae6410f914f8ebac129b9ae7a8c92eb60a638012dde42030a9d/cryptography-46.0.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:6b5063083824e5509fdba180721d55909ffacccc8adbec85268b48439423d78c", size = 3438528, upload-time = "2025-10-15T23:18:26.227Z" }, ] +[[package]] +name = "databricks-sdk" +version = "0.73.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-auth" }, + { name = "protobuf" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a8/7f/cfb2a00d10f6295332616e5b22f2ae3aaf2841a3afa6c49262acb6b94f5b/databricks_sdk-0.73.0.tar.gz", hash = "sha256:db09eaaacd98e07dded78d3e7ab47d2f6c886e0380cb577977bd442bace8bd8d", size = 801017, upload-time = "2025-11-05T06:52:58.509Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a7/27/b822b474aaefb684d11df358d52e012699a2a8af231f9b47c54b73f280cb/databricks_sdk-0.73.0-py3-none-any.whl", hash = "sha256:a4d3cfd19357a2b459d2dc3101454d7f0d1b62865ce099c35d0c342b66ac64ff", size = 753896, upload-time = "2025-11-05T06:52:56.451Z" }, +] + [[package]] name = "dataclasses-json" version = "0.6.7" @@ -1350,6 +1373,7 @@ dependencies = [ { name = "langsmith" }, { name = "litellm" }, { name = "markdown" }, + { name = "mlflow-skinny" }, { name = "numpy" }, { name = "openpyxl" }, { name = "opentelemetry-api" }, @@ -1544,6 +1568,7 @@ requires-dist = [ { name = "langsmith", specifier = "~=0.1.77" }, { name = "litellm", specifier = "==1.77.1" }, { name = "markdown", specifier = "~=3.5.1" }, + { name = "mlflow-skinny", specifier = ">=3.0.0" }, { name = "numpy", specifier = "~=1.26.4" }, { name = "openpyxl", specifier = "~=3.1.5" }, { name = "opentelemetry-api", specifier = "==1.27.0" }, @@ -3338,6 +3363,36 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d3/82/41d9b80f09b82e066894d9b508af07b7b0fa325ce0322980674de49106a0/milvus_lite-2.5.1-py3-none-manylinux2014_x86_64.whl", hash = "sha256:25ce13f4b8d46876dd2b7ac8563d7d8306da7ff3999bb0d14b116b30f71d706c", size = 55263911, upload-time = "2025-06-30T04:24:19.434Z" }, ] +[[package]] +name = "mlflow-skinny" +version = "3.6.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cachetools" }, + { name = "click" }, + { name = "cloudpickle" }, + { name = "databricks-sdk" }, + { name = "fastapi" }, + { name = "gitpython" }, + { name = "importlib-metadata" }, + { name = "opentelemetry-api" }, + { name = "opentelemetry-proto" }, + { name = "opentelemetry-sdk" }, + { name = "packaging" }, + { name = "protobuf" }, + { name = "pydantic" }, + { name = "python-dotenv" }, + { name = "pyyaml" }, + { name = "requests" }, + { name = "sqlparse" }, + { name = "typing-extensions" }, + { name = "uvicorn" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8d/8e/2a2d0cd5b1b985c5278202805f48aae6f2adc3ddc0fce3385ec50e07e258/mlflow_skinny-3.6.0.tar.gz", hash = "sha256:cc04706b5b6faace9faf95302a6e04119485e1bfe98ddc9b85b81984e80944b6", size = 1963286, upload-time = "2025-11-07T18:33:52.596Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0e/78/e8fdc3e1708bdfd1eba64f41ce96b461cae1b505aa08b69352ac99b4caa4/mlflow_skinny-3.6.0-py3-none-any.whl", hash = "sha256:c83b34fce592acb2cc6bddcb507587a6d9ef3f590d9e7a8658c85e0980596d78", size = 2364629, upload-time = "2025-11-07T18:33:50.744Z" }, +] + [[package]] name = "mmh3" version = "5.2.0" @@ -5729,6 +5784,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/9b/70/20c1912bc0bfebf516d59d618209443b136c58a7cff141afa7cf30969988/sqlglot-27.29.0-py3-none-any.whl", hash = "sha256:9a5ea8ac61826a7763de10cad45a35f0aa9bfcf7b96ee74afb2314de9089e1cb", size = 526060, upload-time = "2025-10-29T13:50:22.061Z" }, ] +[[package]] +name = "sqlparse" +version = "0.5.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e5/40/edede8dd6977b0d3da179a342c198ed100dd2aba4be081861ee5911e4da4/sqlparse-0.5.3.tar.gz", hash = "sha256:09f67787f56a0b16ecdbde1bfc7f5d9c3371ca683cfeaa8e6ff60b4807ec9272", size = 84999, upload-time = "2024-12-10T12:05:30.728Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a9/5c/bfd6bd0bf979426d405cc6e71eceb8701b148b16c21d2dc3c261efc61c7b/sqlparse-0.5.3-py3-none-any.whl", hash = "sha256:cf2196ed3418f3ba5de6af7e82c694a9fbdbfecccdfc72e281548517081f16ca", size = 44415, upload-time = "2024-12-10T12:05:27.824Z" }, +] + [[package]] name = "sseclient-py" version = "1.8.0" diff --git a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config-popup.tsx b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config-popup.tsx index 0ad02ad7f3..628eb13071 100644 --- a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config-popup.tsx +++ b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config-popup.tsx @@ -5,7 +5,7 @@ import { useTranslation } from 'react-i18next' import { useBoolean } from 'ahooks' import TracingIcon from './tracing-icon' import ProviderPanel from './provider-panel' -import type { AliyunConfig, ArizeConfig, LangFuseConfig, LangSmithConfig, OpikConfig, PhoenixConfig, TencentConfig, WeaveConfig } from './type' +import type { AliyunConfig, ArizeConfig, DatabricksConfig, LangFuseConfig, LangSmithConfig, MLflowConfig, OpikConfig, PhoenixConfig, TencentConfig, WeaveConfig } from './type' import { TracingProvider } from './type' import ProviderConfigModal from './provider-config-modal' import Indicator from '@/app/components/header/indicator' @@ -30,8 +30,10 @@ export type PopupProps = { opikConfig: OpikConfig | null weaveConfig: WeaveConfig | null aliyunConfig: AliyunConfig | null + mlflowConfig: MLflowConfig | null + databricksConfig: DatabricksConfig | null tencentConfig: TencentConfig | null - onConfigUpdated: (provider: TracingProvider, payload: ArizeConfig | PhoenixConfig | LangSmithConfig | LangFuseConfig | OpikConfig | WeaveConfig | AliyunConfig | TencentConfig) => void + onConfigUpdated: (provider: TracingProvider, payload: ArizeConfig | PhoenixConfig | LangSmithConfig | LangFuseConfig | OpikConfig | WeaveConfig | AliyunConfig | TencentConfig | MLflowConfig | DatabricksConfig) => void onConfigRemoved: (provider: TracingProvider) => void } @@ -49,6 +51,8 @@ const ConfigPopup: FC = ({ opikConfig, weaveConfig, aliyunConfig, + mlflowConfig, + databricksConfig, tencentConfig, onConfigUpdated, onConfigRemoved, @@ -73,7 +77,7 @@ const ConfigPopup: FC = ({ } }, [onChooseProvider]) - const handleConfigUpdated = useCallback((payload: ArizeConfig | PhoenixConfig | LangSmithConfig | LangFuseConfig | OpikConfig | WeaveConfig | AliyunConfig | TencentConfig) => { + const handleConfigUpdated = useCallback((payload: ArizeConfig | PhoenixConfig | LangSmithConfig | LangFuseConfig | OpikConfig | WeaveConfig | AliyunConfig | MLflowConfig | DatabricksConfig | TencentConfig) => { onConfigUpdated(currentProvider!, payload) hideConfigModal() }, [currentProvider, hideConfigModal, onConfigUpdated]) @@ -83,8 +87,8 @@ const ConfigPopup: FC = ({ hideConfigModal() }, [currentProvider, hideConfigModal, onConfigRemoved]) - const providerAllConfigured = arizeConfig && phoenixConfig && langSmithConfig && langFuseConfig && opikConfig && weaveConfig && aliyunConfig && tencentConfig - const providerAllNotConfigured = !arizeConfig && !phoenixConfig && !langSmithConfig && !langFuseConfig && !opikConfig && !weaveConfig && !aliyunConfig && !tencentConfig + const providerAllConfigured = arizeConfig && phoenixConfig && langSmithConfig && langFuseConfig && opikConfig && weaveConfig && aliyunConfig && mlflowConfig && databricksConfig && tencentConfig + const providerAllNotConfigured = !arizeConfig && !phoenixConfig && !langSmithConfig && !langFuseConfig && !opikConfig && !weaveConfig && !aliyunConfig && !mlflowConfig && !databricksConfig && !tencentConfig const switchContent = ( = ({ /> ) + const mlflowPanel = ( + + ) + + const databricksPanel = ( + + ) + const tencentPanel = ( = ({ if (aliyunConfig) configuredPanels.push(aliyunPanel) + if (mlflowConfig) + configuredPanels.push(mlflowPanel) + + if (databricksConfig) + configuredPanels.push(databricksPanel) + if (tencentConfig) configuredPanels.push(tencentPanel) @@ -251,6 +287,12 @@ const ConfigPopup: FC = ({ if (!aliyunConfig) notConfiguredPanels.push(aliyunPanel) + if (!mlflowConfig) + notConfiguredPanels.push(mlflowPanel) + + if (!databricksConfig) + notConfiguredPanels.push(databricksPanel) + if (!tencentConfig) notConfiguredPanels.push(tencentPanel) @@ -258,6 +300,10 @@ const ConfigPopup: FC = ({ } const configuredProviderConfig = () => { + if (currentProvider === TracingProvider.mlflow) + return mlflowConfig + if (currentProvider === TracingProvider.databricks) + return databricksConfig if (currentProvider === TracingProvider.arize) return arizeConfig if (currentProvider === TracingProvider.phoenix) @@ -316,6 +362,8 @@ const ConfigPopup: FC = ({ {langfusePanel} {langSmithPanel} {opikPanel} + {mlflowPanel} + {databricksPanel} {weavePanel} {arizePanel} {phoenixPanel} diff --git a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config.ts b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config.ts index 00f6224e9e..221ba2808f 100644 --- a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config.ts +++ b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config.ts @@ -8,5 +8,7 @@ export const docURL = { [TracingProvider.opik]: 'https://www.comet.com/docs/opik/tracing/integrations/dify#setup-instructions', [TracingProvider.weave]: 'https://weave-docs.wandb.ai/', [TracingProvider.aliyun]: 'https://help.aliyun.com/zh/arms/tracing-analysis/untitled-document-1750672984680', + [TracingProvider.mlflow]: 'https://mlflow.org/docs/latest/genai/', + [TracingProvider.databricks]: 'https://docs.databricks.com/aws/en/mlflow3/genai/tracing/', [TracingProvider.tencent]: 'https://cloud.tencent.com/document/product/248/116531', } diff --git a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/panel.tsx b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/panel.tsx index e1fd39fd48..2c17931b83 100644 --- a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/panel.tsx +++ b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/panel.tsx @@ -8,12 +8,12 @@ import { import { useTranslation } from 'react-i18next' import { usePathname } from 'next/navigation' import { useBoolean } from 'ahooks' -import type { AliyunConfig, ArizeConfig, LangFuseConfig, LangSmithConfig, OpikConfig, PhoenixConfig, TencentConfig, WeaveConfig } from './type' +import type { AliyunConfig, ArizeConfig, DatabricksConfig, LangFuseConfig, LangSmithConfig, MLflowConfig, OpikConfig, PhoenixConfig, TencentConfig, WeaveConfig } from './type' import { TracingProvider } from './type' import TracingIcon from './tracing-icon' import ConfigButton from './config-button' import cn from '@/utils/classnames' -import { AliyunIcon, ArizeIcon, LangfuseIcon, LangsmithIcon, OpikIcon, PhoenixIcon, TencentIcon, WeaveIcon } from '@/app/components/base/icons/src/public/tracing' +import { AliyunIcon, ArizeIcon, DatabricksIcon, LangfuseIcon, LangsmithIcon, MlflowIcon, OpikIcon, PhoenixIcon, TencentIcon, WeaveIcon } from '@/app/components/base/icons/src/public/tracing' import Indicator from '@/app/components/header/indicator' import { fetchTracingConfig as doFetchTracingConfig, fetchTracingStatus, updateTracingStatus } from '@/service/apps' import type { TracingStatus } from '@/models/app' @@ -71,6 +71,8 @@ const Panel: FC = () => { [TracingProvider.opik]: OpikIcon, [TracingProvider.weave]: WeaveIcon, [TracingProvider.aliyun]: AliyunIcon, + [TracingProvider.mlflow]: MlflowIcon, + [TracingProvider.databricks]: DatabricksIcon, [TracingProvider.tencent]: TencentIcon, } const InUseProviderIcon = inUseTracingProvider ? providerIconMap[inUseTracingProvider] : undefined @@ -82,8 +84,10 @@ const Panel: FC = () => { const [opikConfig, setOpikConfig] = useState(null) const [weaveConfig, setWeaveConfig] = useState(null) const [aliyunConfig, setAliyunConfig] = useState(null) + const [mlflowConfig, setMLflowConfig] = useState(null) + const [databricksConfig, setDatabricksConfig] = useState(null) const [tencentConfig, setTencentConfig] = useState(null) - const hasConfiguredTracing = !!(langSmithConfig || langFuseConfig || opikConfig || weaveConfig || arizeConfig || phoenixConfig || aliyunConfig || tencentConfig) + const hasConfiguredTracing = !!(langSmithConfig || langFuseConfig || opikConfig || weaveConfig || arizeConfig || phoenixConfig || aliyunConfig || mlflowConfig || databricksConfig || tencentConfig) const fetchTracingConfig = async () => { const getArizeConfig = async () => { @@ -121,6 +125,16 @@ const Panel: FC = () => { if (!aliyunHasNotConfig) setAliyunConfig(aliyunConfig as AliyunConfig) } + const getMLflowConfig = async () => { + const { tracing_config: mlflowConfig, has_not_configured: mlflowHasNotConfig } = await doFetchTracingConfig({ appId, provider: TracingProvider.mlflow }) + if (!mlflowHasNotConfig) + setMLflowConfig(mlflowConfig as MLflowConfig) + } + const getDatabricksConfig = async () => { + const { tracing_config: databricksConfig, has_not_configured: databricksHasNotConfig } = await doFetchTracingConfig({ appId, provider: TracingProvider.databricks }) + if (!databricksHasNotConfig) + setDatabricksConfig(databricksConfig as DatabricksConfig) + } const getTencentConfig = async () => { const { tracing_config: tencentConfig, has_not_configured: tencentHasNotConfig } = await doFetchTracingConfig({ appId, provider: TracingProvider.tencent }) if (!tencentHasNotConfig) @@ -134,6 +148,8 @@ const Panel: FC = () => { getOpikConfig(), getWeaveConfig(), getAliyunConfig(), + getMLflowConfig(), + getDatabricksConfig(), getTencentConfig(), ]) } @@ -174,6 +190,10 @@ const Panel: FC = () => { setWeaveConfig(null) else if (provider === TracingProvider.aliyun) setAliyunConfig(null) + else if (provider === TracingProvider.mlflow) + setMLflowConfig(null) + else if (provider === TracingProvider.databricks) + setDatabricksConfig(null) else if (provider === TracingProvider.tencent) setTencentConfig(null) if (provider === inUseTracingProvider) { @@ -221,6 +241,8 @@ const Panel: FC = () => { opikConfig={opikConfig} weaveConfig={weaveConfig} aliyunConfig={aliyunConfig} + mlflowConfig={mlflowConfig} + databricksConfig={databricksConfig} tencentConfig={tencentConfig} onConfigUpdated={handleTracingConfigUpdated} onConfigRemoved={handleTracingConfigRemoved} @@ -258,6 +280,8 @@ const Panel: FC = () => { opikConfig={opikConfig} weaveConfig={weaveConfig} aliyunConfig={aliyunConfig} + mlflowConfig={mlflowConfig} + databricksConfig={databricksConfig} tencentConfig={tencentConfig} onConfigUpdated={handleTracingConfigUpdated} onConfigRemoved={handleTracingConfigRemoved} diff --git a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/provider-config-modal.tsx b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/provider-config-modal.tsx index 5933e73e66..7cf479f5a8 100644 --- a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/provider-config-modal.tsx +++ b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/provider-config-modal.tsx @@ -4,7 +4,7 @@ import React, { useCallback, useState } from 'react' import { useTranslation } from 'react-i18next' import { useBoolean } from 'ahooks' import Field from './field' -import type { AliyunConfig, ArizeConfig, LangFuseConfig, LangSmithConfig, OpikConfig, PhoenixConfig, TencentConfig, WeaveConfig } from './type' +import type { AliyunConfig, ArizeConfig, DatabricksConfig, LangFuseConfig, LangSmithConfig, MLflowConfig, OpikConfig, PhoenixConfig, TencentConfig, WeaveConfig } from './type' import { TracingProvider } from './type' import { docURL } from './config' import { @@ -22,10 +22,10 @@ import Divider from '@/app/components/base/divider' type Props = { appId: string type: TracingProvider - payload?: ArizeConfig | PhoenixConfig | LangSmithConfig | LangFuseConfig | OpikConfig | WeaveConfig | AliyunConfig | TencentConfig | null + payload?: ArizeConfig | PhoenixConfig | LangSmithConfig | LangFuseConfig | OpikConfig | WeaveConfig | AliyunConfig | MLflowConfig | DatabricksConfig | TencentConfig | null onRemoved: () => void onCancel: () => void - onSaved: (payload: ArizeConfig | PhoenixConfig | LangSmithConfig | LangFuseConfig | OpikConfig | WeaveConfig | AliyunConfig | TencentConfig) => void + onSaved: (payload: ArizeConfig | PhoenixConfig | LangSmithConfig | LangFuseConfig | OpikConfig | WeaveConfig | AliyunConfig | MLflowConfig | DatabricksConfig | TencentConfig) => void onChosen: (provider: TracingProvider) => void } @@ -77,6 +77,21 @@ const aliyunConfigTemplate = { endpoint: '', } +const mlflowConfigTemplate = { + tracking_uri: '', + experiment_id: '', + username: '', + password: '', +} + +const databricksConfigTemplate = { + experiment_id: '', + host: '', + client_id: '', + client_secret: '', + personal_access_token: '', +} + const tencentConfigTemplate = { token: '', endpoint: '', @@ -96,7 +111,7 @@ const ProviderConfigModal: FC = ({ const isEdit = !!payload const isAdd = !isEdit const [isSaving, setIsSaving] = useState(false) - const [config, setConfig] = useState((() => { + const [config, setConfig] = useState((() => { if (isEdit) return payload @@ -118,6 +133,12 @@ const ProviderConfigModal: FC = ({ else if (type === TracingProvider.aliyun) return aliyunConfigTemplate + else if (type === TracingProvider.mlflow) + return mlflowConfigTemplate + + else if (type === TracingProvider.databricks) + return databricksConfigTemplate + else if (type === TracingProvider.tencent) return tencentConfigTemplate @@ -211,6 +232,20 @@ const ProviderConfigModal: FC = ({ errorMessage = t('common.errorMsg.fieldRequired', { field: 'Endpoint' }) } + if (type === TracingProvider.mlflow) { + const postData = config as MLflowConfig + if (!errorMessage && !postData.tracking_uri) + errorMessage = t('common.errorMsg.fieldRequired', { field: 'Tracking URI' }) + } + + if (type === TracingProvider.databricks) { + const postData = config as DatabricksConfig + if (!errorMessage && !postData.experiment_id) + errorMessage = t('common.errorMsg.fieldRequired', { field: 'Experiment ID' }) + if (!errorMessage && !postData.host) + errorMessage = t('common.errorMsg.fieldRequired', { field: 'Host' }) + } + if (type === TracingProvider.tencent) { const postData = config as TencentConfig if (!errorMessage && !postData.token) @@ -513,6 +548,81 @@ const ProviderConfigModal: FC = ({ /> )} + {type === TracingProvider.mlflow && ( + <> + + + + + + )} + {type === TracingProvider.databricks && ( + <> + + + + + + + )}
{ [TracingProvider.opik]: OpikIconBig, [TracingProvider.weave]: WeaveIconBig, [TracingProvider.aliyun]: AliyunIconBig, + [TracingProvider.mlflow]: MlflowIconBig, + [TracingProvider.databricks]: DatabricksIconBig, [TracingProvider.tencent]: TencentIconBig, })[type] } diff --git a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/type.ts b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/type.ts index 719451f5d0..737111a7ef 100644 --- a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/type.ts +++ b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/type.ts @@ -6,6 +6,8 @@ export enum TracingProvider { opik = 'opik', weave = 'weave', aliyun = 'aliyun', + mlflow = 'mlflow', + databricks = 'databricks', tencent = 'tencent', } @@ -55,6 +57,21 @@ export type AliyunConfig = { endpoint: string } +export type MLflowConfig = { + tracking_uri: string + experiment_id: string + username: string + password: string +} + +export type DatabricksConfig = { + experiment_id: string + host: string + client_id: string + client_secret: string + personal_access_token: string +} + export type TencentConfig = { token: string endpoint: string diff --git a/web/app/components/base/icons/assets/public/tracing/databricks-icon-big.svg b/web/app/components/base/icons/assets/public/tracing/databricks-icon-big.svg new file mode 100644 index 0000000000..2456376d40 --- /dev/null +++ b/web/app/components/base/icons/assets/public/tracing/databricks-icon-big.svg @@ -0,0 +1,17 @@ + + + + + + + + + + + + + + + + + diff --git a/web/app/components/base/icons/assets/public/tracing/databricks-icon.svg b/web/app/components/base/icons/assets/public/tracing/databricks-icon.svg new file mode 100644 index 0000000000..b9e852eca7 --- /dev/null +++ b/web/app/components/base/icons/assets/public/tracing/databricks-icon.svg @@ -0,0 +1,17 @@ + + + + + + + + + + + + + + + + + diff --git a/web/app/components/base/icons/assets/public/tracing/mlflow-icon-big.svg b/web/app/components/base/icons/assets/public/tracing/mlflow-icon-big.svg new file mode 100644 index 0000000000..0a88b9bc2c --- /dev/null +++ b/web/app/components/base/icons/assets/public/tracing/mlflow-icon-big.svg @@ -0,0 +1,14 @@ + + + + + + + + + + + + + + diff --git a/web/app/components/base/icons/assets/public/tracing/mlflow-icon.svg b/web/app/components/base/icons/assets/public/tracing/mlflow-icon.svg new file mode 100644 index 0000000000..f6beec36a2 --- /dev/null +++ b/web/app/components/base/icons/assets/public/tracing/mlflow-icon.svg @@ -0,0 +1,14 @@ + + + + + + + + + + + + + + diff --git a/web/app/components/base/icons/src/public/tracing/DatabricksIcon.json b/web/app/components/base/icons/src/public/tracing/DatabricksIcon.json new file mode 100644 index 0000000000..fef015543d --- /dev/null +++ b/web/app/components/base/icons/src/public/tracing/DatabricksIcon.json @@ -0,0 +1,135 @@ +{ + "icon": { + "type": "element", + "isRootNode": true, + "name": "svg", + "attributes": { + "xmlns": "http://www.w3.org/2000/svg", + "xmlns:xlink": "http://www.w3.org/1999/xlink", + "width": "100px", + "height": "16px", + "viewBox": "0 0 100 16", + "version": "1.1" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "surface1" + }, + "children": [ + { + "type": "element", + "name": "path", + "attributes": { + "style": " stroke:none;fill-rule:nonzero;fill:rgb(93.333334%,23.921569%,17.254902%);fill-opacity:1;", + "d": "M 13.886719 6.597656 L 7.347656 10.320312 L 0.351562 6.34375 L 0.015625 6.527344 L 0.015625 9.414062 L 7.347656 13.578125 L 13.886719 9.867188 L 13.886719 11.398438 L 7.347656 15.121094 L 0.351562 11.144531 L 0.015625 11.328125 L 0.015625 11.824219 L 7.347656 15.984375 L 14.671875 11.824219 L 14.671875 8.933594 L 14.332031 8.75 L 7.347656 12.714844 L 0.800781 9.003906 L 0.800781 7.476562 L 7.347656 11.1875 L 14.671875 7.023438 L 14.671875 4.175781 L 14.304688 3.964844 L 7.347656 7.914062 L 1.136719 4.402344 L 7.347656 0.878906 L 12.453125 3.78125 L 12.902344 3.527344 L 12.902344 3.171875 L 7.347656 0.015625 L 0.015625 4.175781 L 0.015625 4.628906 L 7.347656 8.792969 L 13.886719 5.070312 Z M 13.886719 6.597656 " + }, + "children": [] + }, + { + "type": "element", + "name": "path", + "attributes": { + "style": " stroke:none;fill-rule:nonzero;fill:rgb(0%,0%,0%);fill-opacity:1;", + "d": "M 28.375 13.621094 L 28.375 0.90625 L 26.4375 0.90625 L 26.4375 5.664062 C 26.4375 5.734375 26.394531 5.792969 26.324219 5.820312 C 26.253906 5.847656 26.183594 5.820312 26.144531 5.777344 C 25.484375 5 24.460938 4.558594 23.339844 4.558594 C 20.941406 4.558594 19.058594 6.597656 19.058594 9.203125 C 19.058594 10.476562 19.496094 11.652344 20.292969 12.515625 C 21.09375 13.378906 22.175781 13.847656 23.339844 13.847656 C 24.445312 13.847656 25.46875 13.378906 26.144531 12.574219 C 26.183594 12.515625 26.269531 12.503906 26.324219 12.515625 C 26.394531 12.546875 26.4375 12.601562 26.4375 12.671875 L 26.4375 13.621094 Z M 23.757812 12.078125 C 22.214844 12.078125 21.011719 10.816406 21.011719 9.203125 C 21.011719 7.589844 22.214844 6.328125 23.757812 6.328125 C 25.300781 6.328125 26.507812 7.589844 26.507812 9.203125 C 26.507812 10.816406 25.300781 12.078125 23.757812 12.078125 Z M 23.757812 12.078125 " + }, + "children": [] + }, + { + "type": "element", + "name": "path", + "attributes": { + "style": " stroke:none;fill-rule:nonzero;fill:rgb(0%,0%,0%);fill-opacity:1;", + "d": "M 38.722656 13.621094 L 38.722656 4.773438 L 36.800781 4.773438 L 36.800781 5.664062 C 36.800781 5.734375 36.761719 5.792969 36.691406 5.820312 C 36.621094 5.847656 36.550781 5.820312 36.507812 5.761719 C 35.863281 4.984375 34.851562 4.546875 33.703125 4.546875 C 31.304688 4.546875 29.425781 6.585938 29.425781 9.1875 C 29.425781 11.792969 31.304688 13.832031 33.703125 13.832031 C 34.8125 13.832031 35.835938 13.367188 36.507812 12.546875 C 36.550781 12.488281 36.632812 12.472656 36.691406 12.488281 C 36.761719 12.515625 36.800781 12.574219 36.800781 12.644531 L 36.800781 13.605469 L 38.722656 13.605469 Z M 34.136719 12.078125 C 32.59375 12.078125 31.386719 10.816406 31.386719 9.203125 C 31.386719 7.589844 32.59375 6.328125 34.136719 6.328125 C 35.679688 6.328125 36.886719 7.589844 36.886719 9.203125 C 36.886719 10.816406 35.679688 12.078125 34.136719 12.078125 Z M 34.136719 12.078125 " + }, + "children": [] + }, + { + "type": "element", + "name": "path", + "attributes": { + "style": " stroke:none;fill-rule:nonzero;fill:rgb(0%,0%,0%);fill-opacity:1;", + "d": "M 55.175781 13.621094 L 55.175781 4.773438 L 53.253906 4.773438 L 53.253906 5.664062 C 53.253906 5.734375 53.210938 5.792969 53.140625 5.820312 C 53.070312 5.847656 53 5.820312 52.960938 5.761719 C 52.3125 4.984375 51.304688 4.546875 50.152344 4.546875 C 47.742188 4.546875 45.875 6.585938 45.875 9.203125 C 45.875 11.824219 47.757812 13.847656 50.152344 13.847656 C 51.261719 13.847656 52.285156 13.378906 52.960938 12.558594 C 53 12.503906 53.085938 12.488281 53.140625 12.503906 C 53.210938 12.53125 53.253906 12.585938 53.253906 12.660156 L 53.253906 13.621094 Z M 50.589844 12.078125 C 49.046875 12.078125 47.839844 10.816406 47.839844 9.203125 C 47.839844 7.589844 49.046875 6.328125 50.589844 6.328125 C 52.132812 6.328125 53.339844 7.589844 53.339844 9.203125 C 53.339844 10.816406 52.132812 12.078125 50.589844 12.078125 Z M 50.589844 12.078125 " + }, + "children": [] + }, + { + "type": "element", + "name": "path", + "attributes": { + "style": " stroke:none;fill-rule:nonzero;fill:rgb(0%,0%,0%);fill-opacity:1;", + "d": "M 58.695312 12.574219 C 58.710938 12.574219 58.738281 12.558594 58.75 12.558594 C 58.792969 12.558594 58.851562 12.585938 58.878906 12.617188 C 59.539062 13.394531 60.5625 13.832031 61.683594 13.832031 C 64.082031 13.832031 65.960938 11.792969 65.960938 9.1875 C 65.960938 7.914062 65.527344 6.738281 64.726562 5.875 C 63.925781 5.011719 62.847656 4.546875 61.683594 4.546875 C 60.574219 4.546875 59.550781 5.011719 58.878906 5.820312 C 58.835938 5.875 58.765625 5.890625 58.695312 5.875 C 58.625 5.847656 58.582031 5.792969 58.582031 5.71875 L 58.582031 0.90625 L 56.648438 0.90625 L 56.648438 13.621094 L 58.582031 13.621094 L 58.582031 12.730469 C 58.582031 12.660156 58.625 12.601562 58.695312 12.574219 Z M 58.5 9.203125 C 58.5 7.589844 59.707031 6.328125 61.25 6.328125 C 62.792969 6.328125 63.996094 7.589844 63.996094 9.203125 C 63.996094 10.816406 62.792969 12.078125 61.25 12.078125 C 59.707031 12.078125 58.5 10.804688 58.5 9.203125 Z M 58.5 9.203125 " + }, + "children": [] + }, + { + "type": "element", + "name": "path", + "attributes": { + "style": " stroke:none;fill-rule:nonzero;fill:rgb(0%,0%,0%);fill-opacity:1;", + "d": "M 71.558594 6.585938 C 71.738281 6.585938 71.90625 6.597656 72.019531 6.625 L 72.019531 4.617188 C 71.949219 4.601562 71.824219 4.585938 71.695312 4.585938 C 70.6875 4.585938 69.761719 5.113281 69.269531 5.945312 C 69.230469 6.019531 69.160156 6.046875 69.089844 6.019531 C 69.019531 6.003906 68.960938 5.933594 68.960938 5.863281 L 68.960938 4.773438 L 67.039062 4.773438 L 67.039062 13.636719 L 68.976562 13.636719 L 68.976562 9.726562 C 68.976562 7.789062 69.957031 6.585938 71.558594 6.585938 Z M 71.558594 6.585938 " + }, + "children": [] + }, + { + "type": "element", + "name": "path", + "attributes": { + "style": " stroke:none;fill-rule:nonzero;fill:rgb(0%,0%,0%);fill-opacity:1;", + "d": "M 73.238281 4.773438 L 75.203125 4.773438 L 75.203125 13.636719 L 73.238281 13.636719 Z M 73.238281 4.773438 " + }, + "children": [] + }, + { + "type": "element", + "name": "path", + "attributes": { + "style": " stroke:none;fill-rule:nonzero;fill:rgb(0%,0%,0%);fill-opacity:1;", + "d": "M 74.195312 0.921875 C 73.535156 0.921875 73 1.457031 73 2.125 C 73 2.789062 73.535156 3.328125 74.195312 3.328125 C 74.851562 3.328125 75.386719 2.789062 75.386719 2.125 C 75.386719 1.457031 74.851562 0.921875 74.195312 0.921875 Z M 74.195312 0.921875 " + }, + "children": [] + }, + { + "type": "element", + "name": "path", + "attributes": { + "style": " stroke:none;fill-rule:nonzero;fill:rgb(0%,0%,0%);fill-opacity:1;", + "d": "M 80.953125 4.546875 C 78.261719 4.546875 76.3125 6.5 76.3125 9.203125 C 76.3125 10.519531 76.773438 11.695312 77.601562 12.546875 C 78.441406 13.394531 79.621094 13.863281 80.941406 13.863281 C 82.035156 13.863281 82.875 13.648438 84.472656 12.460938 L 83.367188 11.285156 C 82.582031 11.808594 81.851562 12.0625 81.136719 12.0625 C 79.507812 12.0625 78.289062 10.832031 78.289062 9.203125 C 78.289062 7.574219 79.507812 6.34375 81.136719 6.34375 C 81.90625 6.34375 82.621094 6.597656 83.339844 7.121094 L 84.570312 5.945312 C 83.128906 4.699219 81.824219 4.546875 80.953125 4.546875 Z M 80.953125 4.546875 " + }, + "children": [] + }, + { + "type": "element", + "name": "path", + "attributes": { + "style": " stroke:none;fill-rule:nonzero;fill:rgb(0%,0%,0%);fill-opacity:1;", + "d": "M 87.882812 9.726562 C 87.910156 9.699219 87.953125 9.683594 87.996094 9.683594 L 88.007812 9.683594 C 88.050781 9.683594 88.09375 9.714844 88.132812 9.742188 L 91.234375 13.621094 L 93.617188 13.621094 L 89.605469 8.722656 C 89.550781 8.652344 89.550781 8.550781 89.621094 8.496094 L 93.308594 4.773438 L 90.941406 4.773438 L 87.757812 8 C 87.714844 8.042969 87.644531 8.054688 87.574219 8.042969 C 87.515625 8.015625 87.476562 7.957031 87.476562 7.886719 L 87.476562 0.921875 L 85.527344 0.921875 L 85.527344 13.636719 L 87.460938 13.636719 L 87.460938 10.179688 C 87.460938 10.136719 87.476562 10.082031 87.515625 10.054688 Z M 87.882812 9.726562 " + }, + "children": [] + }, + { + "type": "element", + "name": "path", + "attributes": { + "style": " stroke:none;fill-rule:nonzero;fill:rgb(0%,0%,0%);fill-opacity:1;", + "d": "M 96.773438 13.847656 C 98.359375 13.847656 99.972656 12.871094 99.972656 11.015625 C 99.972656 9.796875 99.214844 8.960938 97.671875 8.453125 L 96.621094 8.097656 C 95.90625 7.859375 95.566406 7.519531 95.566406 7.050781 C 95.566406 6.511719 96.042969 6.144531 96.71875 6.144531 C 97.363281 6.144531 97.9375 6.570312 98.304688 7.304688 L 99.859375 6.457031 C 99.285156 5.265625 98.09375 4.53125 96.71875 4.53125 C 94.980469 4.53125 93.714844 5.664062 93.714844 7.207031 C 93.714844 8.4375 94.445312 9.261719 95.945312 9.742188 L 97.027344 10.09375 C 97.785156 10.335938 98.105469 10.648438 98.105469 11.144531 C 98.105469 11.894531 97.417969 12.164062 96.832031 12.164062 C 96.042969 12.164062 95.34375 11.652344 95.007812 10.816406 L 93.421875 11.667969 C 93.941406 13.011719 95.21875 13.847656 96.773438 13.847656 Z M 96.773438 13.847656 " + }, + "children": [] + }, + { + "type": "element", + "name": "path", + "attributes": { + "style": " stroke:none;fill-rule:nonzero;fill:rgb(0%,0%,0%);fill-opacity:1;", + "d": "M 44.109375 13.761719 C 44.726562 13.761719 45.273438 13.707031 45.582031 13.664062 L 45.582031 11.964844 C 45.328125 11.992188 44.878906 12.019531 44.613281 12.019531 C 43.828125 12.019531 43.226562 11.878906 43.226562 10.167969 L 43.226562 6.527344 C 43.226562 6.429688 43.296875 6.359375 43.394531 6.359375 L 45.289062 6.359375 L 45.289062 4.757812 L 43.394531 4.757812 C 43.296875 4.757812 43.226562 4.6875 43.226562 4.585938 L 43.226562 2.039062 L 41.289062 2.039062 L 41.289062 4.601562 C 41.289062 4.699219 41.21875 4.773438 41.121094 4.773438 L 39.777344 4.773438 L 39.777344 6.371094 L 41.121094 6.371094 C 41.21875 6.371094 41.289062 6.441406 41.289062 6.542969 L 41.289062 10.660156 C 41.289062 13.761719 43.339844 13.761719 44.109375 13.761719 Z M 44.109375 13.761719 " + }, + "children": [] + } + ] + } + ] + }, + "name": "DatabricksIcon" +} diff --git a/web/app/components/base/icons/src/public/tracing/DatabricksIcon.tsx b/web/app/components/base/icons/src/public/tracing/DatabricksIcon.tsx new file mode 100644 index 0000000000..1403c12d46 --- /dev/null +++ b/web/app/components/base/icons/src/public/tracing/DatabricksIcon.tsx @@ -0,0 +1,20 @@ +// GENERATE BY script +// DON NOT EDIT IT MANUALLY + +import * as React from 'react' +import data from './DatabricksIcon.json' +import IconBase from '@/app/components/base/icons/IconBase' +import type { IconData } from '@/app/components/base/icons/IconBase' + +const Icon = ( + { + ref, + ...props + }: React.SVGProps & { + ref?: React.RefObject>; + }, +) => + +Icon.displayName = 'DatabricksIcon' + +export default Icon diff --git a/web/app/components/base/icons/src/public/tracing/DatabricksIconBig.json b/web/app/components/base/icons/src/public/tracing/DatabricksIconBig.json new file mode 100644 index 0000000000..4ca83d5f59 --- /dev/null +++ b/web/app/components/base/icons/src/public/tracing/DatabricksIconBig.json @@ -0,0 +1,135 @@ +{ + "icon": { + "type": "element", + "isRootNode": true, + "name": "svg", + "attributes": { + "xmlns": "http://www.w3.org/2000/svg", + "xmlns:xlink": "http://www.w3.org/1999/xlink", + "width": "150px", + "height": "24px", + "viewBox": "0 0 151 24", + "version": "1.1" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "surface1" + }, + "children": [ + { + "type": "element", + "name": "path", + "attributes": { + "style": " stroke:none;fill-rule:nonzero;fill:rgb(93.333334%,23.921569%,17.254902%);fill-opacity:1;", + "d": "M 20.964844 9.898438 L 11.097656 15.484375 L 0.53125 9.515625 L 0.0195312 9.792969 L 0.0195312 14.125 L 11.097656 20.367188 L 20.964844 14.804688 L 20.964844 17.097656 L 11.097656 22.683594 L 0.53125 16.714844 L 0.0195312 16.992188 L 0.0195312 17.734375 L 11.097656 23.980469 L 22.152344 17.734375 L 22.152344 13.402344 L 21.644531 13.125 L 11.097656 19.074219 L 1.207031 13.507812 L 1.207031 11.214844 L 11.097656 16.777344 L 22.152344 10.535156 L 22.152344 6.265625 L 21.601562 5.945312 L 11.097656 11.871094 L 1.714844 6.605469 L 11.097656 1.316406 L 18.804688 5.671875 L 19.484375 5.289062 L 19.484375 4.757812 L 11.097656 0.0195312 L 0.0195312 6.265625 L 0.0195312 6.945312 L 11.097656 13.1875 L 20.964844 7.605469 Z M 20.964844 9.898438 " + }, + "children": [] + }, + { + "type": "element", + "name": "path", + "attributes": { + "style": " stroke:none;fill-rule:nonzero;fill:rgb(0%,0%,0%);fill-opacity:1;", + "d": "M 42.84375 20.433594 L 42.84375 1.359375 L 39.921875 1.359375 L 39.921875 8.496094 C 39.921875 8.601562 39.855469 8.6875 39.75 8.730469 C 39.644531 8.773438 39.539062 8.730469 39.476562 8.664062 C 38.480469 7.496094 36.933594 6.839844 35.242188 6.839844 C 31.617188 6.839844 28.78125 9.898438 28.78125 13.804688 C 28.78125 15.71875 29.4375 17.480469 30.644531 18.773438 C 31.851562 20.070312 33.484375 20.773438 35.242188 20.773438 C 36.914062 20.773438 38.460938 20.070312 39.476562 18.859375 C 39.539062 18.773438 39.667969 18.753906 39.75 18.773438 C 39.855469 18.816406 39.921875 18.902344 39.921875 19.007812 L 39.921875 20.433594 Z M 35.875 18.117188 C 33.546875 18.117188 31.726562 16.226562 31.726562 13.804688 C 31.726562 11.382812 33.546875 9.492188 35.875 9.492188 C 38.207031 9.492188 40.027344 11.382812 40.027344 13.804688 C 40.027344 16.226562 38.207031 18.117188 35.875 18.117188 Z M 35.875 18.117188 " + }, + "children": [] + }, + { + "type": "element", + "name": "path", + "attributes": { + "style": " stroke:none;fill-rule:nonzero;fill:rgb(0%,0%,0%);fill-opacity:1;", + "d": "M 58.472656 20.433594 L 58.472656 7.15625 L 55.570312 7.15625 L 55.570312 8.496094 C 55.570312 8.601562 55.507812 8.6875 55.402344 8.730469 C 55.296875 8.773438 55.191406 8.730469 55.125 8.644531 C 54.152344 7.476562 52.628906 6.816406 50.890625 6.816406 C 47.269531 6.816406 44.433594 9.875 44.433594 13.785156 C 44.433594 17.691406 47.269531 20.75 50.890625 20.75 C 52.5625 20.75 54.109375 20.050781 55.125 18.816406 C 55.191406 18.734375 55.316406 18.710938 55.402344 18.734375 C 55.507812 18.773438 55.570312 18.859375 55.570312 18.964844 L 55.570312 20.410156 L 58.472656 20.410156 Z M 51.546875 18.117188 C 49.21875 18.117188 47.398438 16.226562 47.398438 13.804688 C 47.398438 11.382812 49.21875 9.492188 51.546875 9.492188 C 53.878906 9.492188 55.699219 11.382812 55.699219 13.804688 C 55.699219 16.226562 53.878906 18.117188 51.546875 18.117188 Z M 51.546875 18.117188 " + }, + "children": [] + }, + { + "type": "element", + "name": "path", + "attributes": { + "style": " stroke:none;fill-rule:nonzero;fill:rgb(0%,0%,0%);fill-opacity:1;", + "d": "M 83.316406 20.433594 L 83.316406 7.15625 L 80.414062 7.15625 L 80.414062 8.496094 C 80.414062 8.601562 80.351562 8.6875 80.242188 8.730469 C 80.136719 8.773438 80.03125 8.730469 79.96875 8.644531 C 78.996094 7.476562 77.46875 6.816406 75.734375 6.816406 C 72.089844 6.816406 69.273438 9.875 69.273438 13.804688 C 69.273438 17.734375 72.113281 20.773438 75.734375 20.773438 C 77.40625 20.773438 78.953125 20.070312 79.96875 18.839844 C 80.03125 18.753906 80.160156 18.734375 80.242188 18.753906 C 80.351562 18.796875 80.414062 18.882812 80.414062 18.988281 L 80.414062 20.433594 Z M 76.390625 18.117188 C 74.058594 18.117188 72.238281 16.226562 72.238281 13.804688 C 72.238281 11.382812 74.058594 9.492188 76.390625 9.492188 C 78.71875 9.492188 80.539062 11.382812 80.539062 13.804688 C 80.539062 16.226562 78.71875 18.117188 76.390625 18.117188 Z M 76.390625 18.117188 " + }, + "children": [] + }, + { + "type": "element", + "name": "path", + "attributes": { + "style": " stroke:none;fill-rule:nonzero;fill:rgb(0%,0%,0%);fill-opacity:1;", + "d": "M 88.628906 18.859375 C 88.652344 18.859375 88.695312 18.839844 88.714844 18.839844 C 88.777344 18.839844 88.863281 18.882812 88.90625 18.925781 C 89.902344 20.09375 91.445312 20.75 93.140625 20.75 C 96.761719 20.75 99.601562 17.691406 99.601562 13.785156 C 99.601562 11.871094 98.945312 10.109375 97.738281 8.8125 C 96.53125 7.519531 94.898438 6.816406 93.140625 6.816406 C 91.46875 6.816406 89.921875 7.519531 88.90625 8.730469 C 88.84375 8.8125 88.734375 8.835938 88.628906 8.8125 C 88.523438 8.773438 88.460938 8.6875 88.460938 8.582031 L 88.460938 1.359375 L 85.539062 1.359375 L 85.539062 20.433594 L 88.460938 20.433594 L 88.460938 19.09375 C 88.460938 18.988281 88.523438 18.902344 88.628906 18.859375 Z M 88.332031 13.804688 C 88.332031 11.382812 90.15625 9.492188 92.484375 9.492188 C 94.8125 9.492188 96.636719 11.382812 96.636719 13.804688 C 96.636719 16.226562 94.8125 18.117188 92.484375 18.117188 C 90.15625 18.117188 88.332031 16.207031 88.332031 13.804688 Z M 88.332031 13.804688 " + }, + "children": [] + }, + { + "type": "element", + "name": "path", + "attributes": { + "style": " stroke:none;fill-rule:nonzero;fill:rgb(0%,0%,0%);fill-opacity:1;", + "d": "M 108.050781 9.875 C 108.324219 9.875 108.582031 9.898438 108.75 9.941406 L 108.75 6.925781 C 108.644531 6.902344 108.453125 6.882812 108.261719 6.882812 C 106.738281 6.882812 105.339844 7.667969 104.597656 8.921875 C 104.535156 9.027344 104.429688 9.070312 104.324219 9.027344 C 104.21875 9.003906 104.132812 8.898438 104.132812 8.792969 L 104.132812 7.15625 L 101.230469 7.15625 L 101.230469 20.453125 L 104.152344 20.453125 L 104.152344 14.589844 C 104.152344 11.679688 105.636719 9.875 108.050781 9.875 Z M 108.050781 9.875 " + }, + "children": [] + }, + { + "type": "element", + "name": "path", + "attributes": { + "style": " stroke:none;fill-rule:nonzero;fill:rgb(0%,0%,0%);fill-opacity:1;", + "d": "M 110.59375 7.15625 L 113.558594 7.15625 L 113.558594 20.453125 L 110.59375 20.453125 Z M 110.59375 7.15625 " + }, + "children": [] + }, + { + "type": "element", + "name": "path", + "attributes": { + "style": " stroke:none;fill-rule:nonzero;fill:rgb(0%,0%,0%);fill-opacity:1;", + "d": "M 112.03125 1.378906 C 111.035156 1.378906 110.230469 2.1875 110.230469 3.1875 C 110.230469 4.183594 111.035156 4.992188 112.03125 4.992188 C 113.027344 4.992188 113.832031 4.183594 113.832031 3.1875 C 113.832031 2.1875 113.027344 1.378906 112.03125 1.378906 Z M 112.03125 1.378906 " + }, + "children": [] + }, + { + "type": "element", + "name": "path", + "attributes": { + "style": " stroke:none;fill-rule:nonzero;fill:rgb(0%,0%,0%);fill-opacity:1;", + "d": "M 122.238281 6.816406 C 118.175781 6.816406 115.230469 9.75 115.230469 13.804688 C 115.230469 15.78125 115.929688 17.542969 117.179688 18.816406 C 118.449219 20.09375 120.226562 20.792969 122.21875 20.792969 C 123.871094 20.792969 125.140625 20.472656 127.554688 18.691406 L 125.882812 16.925781 C 124.695312 17.714844 123.59375 18.09375 122.515625 18.09375 C 120.058594 18.09375 118.214844 16.246094 118.214844 13.804688 C 118.214844 11.363281 120.058594 9.515625 122.515625 9.515625 C 123.679688 9.515625 124.761719 9.898438 125.839844 10.683594 L 127.703125 8.921875 C 125.523438 7.050781 123.554688 6.816406 122.238281 6.816406 Z M 122.238281 6.816406 " + }, + "children": [] + }, + { + "type": "element", + "name": "path", + "attributes": { + "style": " stroke:none;fill-rule:nonzero;fill:rgb(0%,0%,0%);fill-opacity:1;", + "d": "M 132.703125 14.589844 C 132.746094 14.546875 132.808594 14.527344 132.871094 14.527344 L 132.894531 14.527344 C 132.957031 14.527344 133.019531 14.570312 133.082031 14.613281 L 137.765625 20.433594 L 141.363281 20.433594 L 135.308594 13.082031 C 135.222656 12.976562 135.222656 12.828125 135.328125 12.742188 L 140.898438 7.15625 L 137.320312 7.15625 L 132.511719 12 C 132.449219 12.0625 132.34375 12.085938 132.234375 12.0625 C 132.152344 12.019531 132.089844 11.9375 132.089844 11.832031 L 132.089844 1.378906 L 129.144531 1.378906 L 129.144531 20.453125 L 132.066406 20.453125 L 132.066406 15.269531 C 132.066406 15.207031 132.089844 15.121094 132.152344 15.078125 Z M 132.703125 14.589844 " + }, + "children": [] + }, + { + "type": "element", + "name": "path", + "attributes": { + "style": " stroke:none;fill-rule:nonzero;fill:rgb(0%,0%,0%);fill-opacity:1;", + "d": "M 146.128906 20.773438 C 148.523438 20.773438 150.957031 19.304688 150.957031 16.523438 C 150.957031 14.699219 149.8125 13.445312 147.484375 12.679688 L 145.894531 12.148438 C 144.816406 11.789062 144.308594 11.277344 144.308594 10.578125 C 144.308594 9.769531 145.027344 9.21875 146.042969 9.21875 C 147.019531 9.21875 147.886719 9.855469 148.4375 10.960938 L 150.789062 9.683594 C 149.917969 7.902344 148.121094 6.796875 146.042969 6.796875 C 143.417969 6.796875 141.511719 8.496094 141.511719 10.8125 C 141.511719 12.660156 142.613281 13.890625 144.878906 14.613281 L 146.511719 15.144531 C 147.652344 15.503906 148.140625 15.972656 148.140625 16.714844 C 148.140625 17.839844 147.101562 18.246094 146.214844 18.246094 C 145.027344 18.246094 143.96875 17.480469 143.460938 16.226562 L 141.066406 17.5 C 141.851562 19.519531 143.777344 20.773438 146.128906 20.773438 Z M 146.128906 20.773438 " + }, + "children": [] + }, + { + "type": "element", + "name": "path", + "attributes": { + "style": " stroke:none;fill-rule:nonzero;fill:rgb(0%,0%,0%);fill-opacity:1;", + "d": "M 66.605469 20.644531 C 67.535156 20.644531 68.363281 20.558594 68.828125 20.496094 L 68.828125 17.945312 C 68.449219 17.988281 67.769531 18.03125 67.367188 18.03125 C 66.179688 18.03125 65.269531 17.820312 65.269531 15.25 L 65.269531 9.792969 C 65.269531 9.640625 65.375 9.535156 65.523438 9.535156 L 68.382812 9.535156 L 68.382812 7.136719 L 65.523438 7.136719 C 65.375 7.136719 65.269531 7.03125 65.269531 6.882812 L 65.269531 3.058594 L 62.347656 3.058594 L 62.347656 6.902344 C 62.347656 7.050781 62.242188 7.15625 62.09375 7.15625 L 60.0625 7.15625 L 60.0625 9.558594 L 62.09375 9.558594 C 62.242188 9.558594 62.347656 9.664062 62.347656 9.8125 L 62.347656 15.992188 C 62.347656 20.644531 65.441406 20.644531 66.605469 20.644531 Z M 66.605469 20.644531 " + }, + "children": [] + } + ] + } + ] + }, + "name": "DatabricksIconBig" +} diff --git a/web/app/components/base/icons/src/public/tracing/DatabricksIconBig.tsx b/web/app/components/base/icons/src/public/tracing/DatabricksIconBig.tsx new file mode 100644 index 0000000000..d2ecdcbea5 --- /dev/null +++ b/web/app/components/base/icons/src/public/tracing/DatabricksIconBig.tsx @@ -0,0 +1,20 @@ +// GENERATE BY script +// DON NOT EDIT IT MANUALLY + +import * as React from 'react' +import data from './DatabricksIconBig.json' +import IconBase from '@/app/components/base/icons/IconBase' +import type { IconData } from '@/app/components/base/icons/IconBase' + +const Icon = ( + { + ref, + ...props + }: React.SVGProps & { + ref?: React.RefObject>; + }, +) => + +Icon.displayName = 'DatabricksIconBig' + +export default Icon diff --git a/web/app/components/base/icons/src/public/tracing/MlflowIcon.json b/web/app/components/base/icons/src/public/tracing/MlflowIcon.json new file mode 100644 index 0000000000..28145faf51 --- /dev/null +++ b/web/app/components/base/icons/src/public/tracing/MlflowIcon.json @@ -0,0 +1,108 @@ +{ + "icon": { + "type": "element", + "isRootNode": true, + "name": "svg", + "attributes": { + "xmlns": "http://www.w3.org/2000/svg", + "xmlns:xlink": "http://www.w3.org/1999/xlink", + "width": "44px", + "height": "16px", + "viewBox": "0 0 43 16", + "version": "1.1" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "surface1" + }, + "children": [ + { + "type": "element", + "name": "path", + "attributes": { + "style": " stroke:none;fill-rule:nonzero;fill:rgb(20%,20%,20%);fill-opacity:1;", + "d": "M 0 12.414062 L 0 6.199219 L 1.398438 6.199219 L 1.398438 6.988281 C 1.75 6.351562 2.519531 6.019531 3.210938 6.019531 C 4.015625 6.019531 4.71875 6.386719 5.046875 7.117188 C 5.527344 6.300781 6.242188 6.019531 7.035156 6.019531 C 8.144531 6.019531 9.203125 6.734375 9.203125 8.378906 L 9.203125 12.414062 L 7.792969 12.414062 L 7.792969 8.621094 C 7.792969 7.894531 7.425781 7.34375 6.609375 7.34375 C 5.839844 7.34375 5.335938 7.957031 5.335938 8.722656 L 5.335938 12.410156 L 3.902344 12.410156 L 3.902344 8.621094 C 3.902344 7.90625 3.546875 7.347656 2.71875 7.347656 C 1.9375 7.347656 1.445312 7.9375 1.445312 8.726562 L 1.445312 12.414062 Z M 0 12.414062 " + }, + "children": [] + }, + { + "type": "element", + "name": "path", + "attributes": { + "style": " stroke:none;fill-rule:nonzero;fill:rgb(20%,20%,20%);fill-opacity:1;", + "d": "M 10.988281 12.414062 L 10.988281 3.171875 L 12.449219 3.171875 L 12.449219 12.414062 Z M 10.988281 12.414062 " + }, + "children": [] + }, + { + "type": "element", + "name": "path", + "attributes": { + "style": " stroke:none;fill-rule:nonzero;fill:rgb(0.392157%,58.039218%,88.627452%);fill-opacity:1;", + "d": "M 11.863281 15.792969 C 12.191406 15.886719 12.488281 15.949219 13.113281 15.949219 C 14.277344 15.949219 15.652344 15.28125 16.015625 13.414062 L 17.507812 5.917969 L 19.726562 5.917969 L 20 4.667969 L 17.753906 4.667969 L 18.058594 3.179688 C 18.289062 2.023438 18.917969 1.4375 19.933594 1.4375 C 20.195312 1.4375 20.121094 1.460938 20.359375 1.503906 L 20.683594 0.226562 C 20.371094 0.132812 20.089844 0.078125 19.480469 0.078125 C 18.835938 0.0664062 18.207031 0.277344 17.691406 0.667969 C 17.125 1.117188 16.75 1.769531 16.578125 2.613281 L 16.15625 4.667969 L 14.171875 4.667969 L 14.007812 5.917969 L 15.910156 5.917969 L 14.539062 12.847656 C 14.390625 13.632812 13.949219 14.574219 12.683594 14.574219 C 12.398438 14.574219 12.5 14.550781 12.242188 14.507812 Z M 11.863281 15.792969 " + }, + "children": [] + }, + { + "type": "element", + "name": "path", + "attributes": { + "style": " stroke:none;fill-rule:nonzero;fill:rgb(0.392157%,58.039218%,88.627452%);fill-opacity:1;", + "d": "M 21.042969 12.363281 L 19.582031 12.363281 L 21.585938 3.039062 L 23.042969 3.039062 Z M 21.042969 12.363281 " + }, + "children": [] + }, + { + "type": "element", + "name": "path", + "attributes": { + "style": " stroke:none;fill-rule:nonzero;fill:rgb(26.274511%,78.823531%,92.941177%);fill-opacity:1;", + "d": "M 28.328125 6.589844 C 27.054688 5.6875 25.316406 5.863281 24.246094 7.007812 C 23.175781 8.152344 23.09375 9.917969 24.050781 11.160156 L 25.007812 10.449219 C 24.535156 9.851562 24.4375 9.03125 24.761719 8.339844 C 25.082031 7.644531 25.769531 7.199219 26.527344 7.191406 L 26.527344 7.949219 Z M 28.328125 6.589844 " + }, + "children": [] + }, + { + "type": "element", + "name": "path", + "attributes": { + "style": " stroke:none;fill-rule:nonzero;fill:rgb(0.392157%,58.039218%,88.627452%);fill-opacity:1;", + "d": "M 24.703125 11.789062 C 25.976562 12.691406 27.710938 12.515625 28.78125 11.371094 C 29.851562 10.226562 29.933594 8.460938 28.976562 7.21875 L 28.019531 7.929688 C 28.496094 8.527344 28.59375 9.347656 28.269531 10.039062 C 27.945312 10.734375 27.261719 11.179688 26.503906 11.1875 L 26.503906 10.429688 Z M 24.703125 11.789062 " + }, + "children": [] + }, + { + "type": "element", + "name": "path", + "attributes": { + "style": " stroke:none;fill-rule:nonzero;fill:rgb(0.392157%,58.039218%,88.627452%);fill-opacity:1;", + "d": "M 30.808594 6.195312 L 32.402344 6.195312 L 32.726562 10.441406 L 35 6.195312 L 36.511719 6.21875 L 37.109375 10.441406 L 39.109375 6.195312 L 40.570312 6.21875 L 37.539062 12.417969 L 36.082031 12.417969 L 35.378906 7.972656 L 33.050781 12.417969 L 31.535156 12.417969 Z M 30.808594 6.195312 " + }, + "children": [] + }, + { + "type": "element", + "name": "path", + "attributes": { + "style": " stroke:none;fill-rule:nonzero;fill:rgb(0.392157%,58.039218%,88.627452%);fill-opacity:1;", + "d": "M 41.449219 6.308594 L 41.148438 6.308594 L 41.148438 6.199219 L 41.875 6.199219 L 41.875 6.308594 L 41.574219 6.308594 L 41.574219 7.207031 L 41.449219 7.207031 Z M 41.449219 6.308594 " + }, + "children": [] + }, + { + "type": "element", + "name": "path", + "attributes": { + "style": " stroke:none;fill-rule:nonzero;fill:rgb(0.392157%,58.039218%,88.627452%);fill-opacity:1;", + "d": "M 42.058594 6.199219 L 42.210938 6.199219 L 42.398438 6.738281 C 42.425781 6.804688 42.445312 6.875 42.46875 6.945312 L 42.476562 6.945312 C 42.5 6.875 42.523438 6.804688 42.546875 6.738281 L 42.734375 6.199219 L 42.886719 6.199219 L 42.886719 7.207031 L 42.765625 7.207031 L 42.765625 6.652344 C 42.765625 6.5625 42.777344 6.441406 42.78125 6.351562 L 42.777344 6.351562 L 42.703125 6.582031 L 42.515625 7.105469 L 42.433594 7.105469 L 42.242188 6.582031 L 42.167969 6.355469 L 42.160156 6.355469 C 42.167969 6.445312 42.175781 6.566406 42.175781 6.652344 L 42.175781 7.207031 L 42.0625 7.207031 Z M 42.058594 6.199219 " + }, + "children": [] + } + ] + } + ] + }, + "name": "MlflowIcon" +} diff --git a/web/app/components/base/icons/src/public/tracing/MlflowIcon.tsx b/web/app/components/base/icons/src/public/tracing/MlflowIcon.tsx new file mode 100644 index 0000000000..c0213133b7 --- /dev/null +++ b/web/app/components/base/icons/src/public/tracing/MlflowIcon.tsx @@ -0,0 +1,20 @@ +// GENERATE BY script +// DON NOT EDIT IT MANUALLY + +import * as React from 'react' +import data from './MlflowIcon.json' +import IconBase from '@/app/components/base/icons/IconBase' +import type { IconData } from '@/app/components/base/icons/IconBase' + +const Icon = ( + { + ref, + ...props + }: React.SVGProps & { + ref?: React.RefObject>; + }, +) => + +Icon.displayName = 'MlflowIcon' + +export default Icon diff --git a/web/app/components/base/icons/src/public/tracing/MlflowIconBig.json b/web/app/components/base/icons/src/public/tracing/MlflowIconBig.json new file mode 100644 index 0000000000..b09af4435c --- /dev/null +++ b/web/app/components/base/icons/src/public/tracing/MlflowIconBig.json @@ -0,0 +1,108 @@ +{ + "icon": { + "type": "element", + "isRootNode": true, + "name": "svg", + "attributes": { + "xmlns": "http://www.w3.org/2000/svg", + "xmlns:xlink": "http://www.w3.org/1999/xlink", + "width": "65px", + "height": "24px", + "viewBox": "0 0 65 24", + "version": "1.1" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "surface1" + }, + "children": [ + { + "type": "element", + "name": "path", + "attributes": { + "style": " stroke:none;fill-rule:nonzero;fill:rgb(20%,20%,20%);fill-opacity:1;", + "d": "M 0 18.617188 L 0 9.300781 L 2.113281 9.300781 L 2.113281 10.480469 C 2.644531 9.523438 3.804688 9.027344 4.851562 9.027344 C 6.070312 9.027344 7.132812 9.582031 7.628906 10.671875 C 8.355469 9.449219 9.4375 9.027344 10.636719 9.027344 C 12.3125 9.027344 13.910156 10.097656 13.910156 12.570312 L 13.910156 18.617188 L 11.78125 18.617188 L 11.78125 12.933594 C 11.78125 11.839844 11.226562 11.019531 9.988281 11.019531 C 8.828125 11.019531 8.066406 11.9375 8.066406 13.085938 L 8.066406 18.617188 L 5.898438 18.617188 L 5.898438 12.933594 C 5.898438 11.859375 5.363281 11.023438 4.109375 11.023438 C 2.929688 11.023438 2.1875 11.90625 2.1875 13.089844 L 2.1875 18.625 Z M 0 18.617188 " + }, + "children": [] + }, + { + "type": "element", + "name": "path", + "attributes": { + "style": " stroke:none;fill-rule:nonzero;fill:rgb(20%,20%,20%);fill-opacity:1;", + "d": "M 16.609375 18.617188 L 16.609375 4.757812 L 18.820312 4.757812 L 18.820312 18.617188 Z M 16.609375 18.617188 " + }, + "children": [] + }, + { + "type": "element", + "name": "path", + "attributes": { + "style": " stroke:none;fill-rule:nonzero;fill:rgb(0.392157%,58.039218%,88.627452%);fill-opacity:1;", + "d": "M 17.933594 23.691406 C 18.429688 23.832031 18.875 23.921875 19.820312 23.921875 C 21.582031 23.921875 23.660156 22.921875 24.207031 20.117188 L 26.464844 8.875 L 29.820312 8.875 L 30.230469 7.003906 L 26.839844 7.003906 L 27.296875 4.769531 C 27.644531 3.035156 28.601562 2.15625 30.132812 2.15625 C 30.53125 2.15625 30.417969 2.191406 30.773438 2.257812 L 31.265625 0.34375 C 30.792969 0.199219 30.367188 0.113281 29.445312 0.113281 C 28.472656 0.101562 27.519531 0.414062 26.746094 1.003906 C 25.886719 1.671875 25.320312 2.65625 25.058594 3.921875 L 24.425781 7.003906 L 21.421875 7.003906 L 21.175781 8.875 L 24.054688 8.875 L 21.980469 19.273438 C 21.753906 20.453125 21.085938 21.863281 19.171875 21.863281 C 18.738281 21.863281 18.898438 21.828125 18.503906 21.765625 Z M 17.933594 23.691406 " + }, + "children": [] + }, + { + "type": "element", + "name": "path", + "attributes": { + "style": " stroke:none;fill-rule:nonzero;fill:rgb(0.392157%,58.039218%,88.627452%);fill-opacity:1;", + "d": "M 31.808594 18.542969 L 29.601562 18.542969 L 32.628906 4.558594 L 34.835938 4.558594 Z M 31.808594 18.542969 " + }, + "children": [] + }, + { + "type": "element", + "name": "path", + "attributes": { + "style": " stroke:none;fill-rule:nonzero;fill:rgb(26.274511%,78.823531%,92.941177%);fill-opacity:1;", + "d": "M 42.820312 9.886719 C 40.894531 8.53125 38.269531 8.796875 36.652344 10.511719 C 35.035156 12.230469 34.910156 14.878906 36.359375 16.742188 L 37.804688 15.675781 C 37.085938 14.777344 36.941406 13.550781 37.429688 12.507812 C 37.917969 11.46875 38.953125 10.800781 40.097656 10.789062 L 40.097656 11.925781 Z M 42.820312 9.886719 " + }, + "children": [] + }, + { + "type": "element", + "name": "path", + "attributes": { + "style": " stroke:none;fill-rule:nonzero;fill:rgb(0.392157%,58.039218%,88.627452%);fill-opacity:1;", + "d": "M 37.339844 17.683594 C 39.265625 19.039062 41.890625 18.773438 43.507812 17.054688 C 45.125 15.339844 45.25 12.691406 43.804688 10.828125 L 42.355469 11.894531 C 43.074219 12.789062 43.21875 14.019531 42.730469 15.0625 C 42.242188 16.101562 41.207031 16.769531 40.0625 16.78125 L 40.0625 15.644531 Z M 37.339844 17.683594 " + }, + "children": [] + }, + { + "type": "element", + "name": "path", + "attributes": { + "style": " stroke:none;fill-rule:nonzero;fill:rgb(0.392157%,58.039218%,88.627452%);fill-opacity:1;", + "d": "M 46.570312 9.296875 L 48.980469 9.296875 L 49.472656 15.664062 L 52.90625 9.296875 L 55.195312 9.328125 L 56.09375 15.664062 L 59.121094 9.296875 L 61.328125 9.328125 L 56.746094 18.625 L 54.539062 18.625 L 53.476562 11.960938 L 49.960938 18.625 L 47.671875 18.625 Z M 46.570312 9.296875 " + }, + "children": [] + }, + { + "type": "element", + "name": "path", + "attributes": { + "style": " stroke:none;fill-rule:nonzero;fill:rgb(0.392157%,58.039218%,88.627452%);fill-opacity:1;", + "d": "M 62.65625 9.460938 L 62.199219 9.460938 L 62.199219 9.300781 L 63.300781 9.300781 L 63.300781 9.464844 L 62.84375 9.464844 L 62.84375 10.808594 L 62.65625 10.808594 Z M 62.65625 9.460938 " + }, + "children": [] + }, + { + "type": "element", + "name": "path", + "attributes": { + "style": " stroke:none;fill-rule:nonzero;fill:rgb(0.392157%,58.039218%,88.627452%);fill-opacity:1;", + "d": "M 63.578125 9.300781 L 63.804688 9.300781 L 64.09375 10.105469 C 64.128906 10.207031 64.164062 10.3125 64.199219 10.417969 L 64.210938 10.417969 C 64.246094 10.3125 64.277344 10.207031 64.3125 10.105469 L 64.597656 9.300781 L 64.824219 9.300781 L 64.824219 10.808594 L 64.648438 10.808594 L 64.648438 9.976562 C 64.648438 9.847656 64.664062 9.664062 64.671875 9.53125 L 64.664062 9.53125 L 64.546875 9.875 L 64.265625 10.65625 L 64.140625 10.65625 L 63.855469 9.875 L 63.742188 9.53125 L 63.730469 9.53125 C 63.742188 9.664062 63.757812 9.847656 63.757812 9.980469 L 63.757812 10.8125 L 63.582031 10.8125 Z M 63.578125 9.300781 " + }, + "children": [] + } + ] + } + ] + }, + "name": "MlflowIconBig" +} diff --git a/web/app/components/base/icons/src/public/tracing/MlflowIconBig.tsx b/web/app/components/base/icons/src/public/tracing/MlflowIconBig.tsx new file mode 100644 index 0000000000..1452799114 --- /dev/null +++ b/web/app/components/base/icons/src/public/tracing/MlflowIconBig.tsx @@ -0,0 +1,20 @@ +// GENERATE BY script +// DON NOT EDIT IT MANUALLY + +import * as React from 'react' +import data from './MlflowIconBig.json' +import IconBase from '@/app/components/base/icons/IconBase' +import type { IconData } from '@/app/components/base/icons/IconBase' + +const Icon = ( + { + ref, + ...props + }: React.SVGProps & { + ref?: React.RefObject>; + }, +) => + +Icon.displayName = 'MlflowIconBig' + +export default Icon diff --git a/web/app/components/base/icons/src/public/tracing/index.ts b/web/app/components/base/icons/src/public/tracing/index.ts index 8911798b56..ca92270c95 100644 --- a/web/app/components/base/icons/src/public/tracing/index.ts +++ b/web/app/components/base/icons/src/public/tracing/index.ts @@ -2,10 +2,14 @@ export { default as AliyunIconBig } from './AliyunIconBig' export { default as AliyunIcon } from './AliyunIcon' export { default as ArizeIconBig } from './ArizeIconBig' export { default as ArizeIcon } from './ArizeIcon' +export { default as DatabricksIconBig } from './DatabricksIconBig' +export { default as DatabricksIcon } from './DatabricksIcon' export { default as LangfuseIconBig } from './LangfuseIconBig' export { default as LangfuseIcon } from './LangfuseIcon' export { default as LangsmithIconBig } from './LangsmithIconBig' export { default as LangsmithIcon } from './LangsmithIcon' +export { default as MlflowIconBig } from './MlflowIconBig' +export { default as MlflowIcon } from './MlflowIcon' export { default as OpikIconBig } from './OpikIconBig' export { default as OpikIcon } from './OpikIcon' export { default as TencentIconBig } from './TencentIconBig' diff --git a/web/i18n/de-DE/app.ts b/web/i18n/de-DE/app.ts index 480efa6880..744e70cc0a 100644 --- a/web/i18n/de-DE/app.ts +++ b/web/i18n/de-DE/app.ts @@ -160,6 +160,14 @@ const translation = { title: 'Cloud-Monitor', description: 'Die vollständig verwaltete und wartungsfreie Observability-Plattform von Alibaba Cloud ermöglicht eine sofortige Überwachung, Verfolgung und Bewertung von Dify-Anwendungen.', }, + mlflow: { + title: 'MLflow', + description: 'Open-Source-LLMOps-Plattform mit Experiment-Tracking, Observability und Evaluierungen für die sichere Entwicklung von AI/LLM-Anwendungen.', + }, + databricks: { + title: 'Databricks', + description: 'Databricks bietet vollständig verwaltetes MLflow mit starker Governance und Sicherheit für die Speicherung von Trace-Daten.', + }, tencent: { title: 'Tencent APM', description: 'Tencent Application Performance Monitoring bietet umfassendes Tracing und multidimensionale Analyse für LLM-Anwendungen.', diff --git a/web/i18n/en-US/app.ts b/web/i18n/en-US/app.ts index 99bab2893c..694329ee14 100644 --- a/web/i18n/en-US/app.ts +++ b/web/i18n/en-US/app.ts @@ -183,6 +183,14 @@ const translation = { title: 'Cloud Monitor', description: 'The fully-managed and maintenance-free observability platform provided by Alibaba Cloud, enables out-of-the-box monitoring, tracing, and evaluation of Dify applications.', }, + mlflow: { + title: 'MLflow', + description: 'MLflow is an open-source platform for experiment management, evaluation, and monitoring of LLM applications.', + }, + databricks: { + title: 'Databricks', + description: 'Databricks offers fully-managed MLflow with strong governance and security for storing trace data.', + }, tencent: { title: 'Tencent APM', description: 'Tencent Application Performance Monitoring provides comprehensive tracing and multi-dimensional analysis for LLM applications.', @@ -192,11 +200,19 @@ const translation = { title: 'Config ', placeholder: 'Enter your {{key}}', project: 'Project', + trackingUri: 'Tracking URI', + experimentId: 'Experiment ID', + username: 'Username', + password: 'Password', publicKey: 'Public Key', secretKey: 'Secret Key', viewDocsLink: 'View {{key}} docs', removeConfirmTitle: 'Remove {{key}} configuration?', removeConfirmContent: 'The current configuration is in use, removing it will turn off the Tracing feature.', + clientId: 'OAuth Client ID', + clientSecret: 'OAuth Client Secret', + personalAccessToken: 'Personal Access Token (legacy)', + databricksHost: 'Databricks Workspace URL', }, }, appSelector: { diff --git a/web/i18n/es-ES/app.ts b/web/i18n/es-ES/app.ts index 5e738b0ecf..086e46d9b7 100644 --- a/web/i18n/es-ES/app.ts +++ b/web/i18n/es-ES/app.ts @@ -163,6 +163,14 @@ const translation = { title: 'Monitor de Nubes', description: 'La plataforma de observabilidad totalmente gestionada y sin mantenimiento proporcionada por Alibaba Cloud, permite la monitorización, trazado y evaluación de aplicaciones Dify de manera inmediata.', }, + mlflow: { + title: 'MLflow', + description: 'Plataforma LLMOps de código abierto para seguimiento de experimentos, observabilidad y evaluación, para construir aplicaciones de IA/LLM con confianza.', + }, + databricks: { + title: 'Databricks', + description: 'Databricks ofrece MLflow completamente gestionado con fuerte gobernanza y seguridad para almacenar datos de trazabilidad.', + }, tencent: { title: 'Tencent APM', description: 'Tencent Application Performance Monitoring proporciona rastreo integral y análisis multidimensional para aplicaciones LLM.', diff --git a/web/i18n/fa-IR/app.ts b/web/i18n/fa-IR/app.ts index d4c71adc6e..a5a49afbac 100644 --- a/web/i18n/fa-IR/app.ts +++ b/web/i18n/fa-IR/app.ts @@ -171,6 +171,14 @@ const translation = { title: 'نظارت بر ابر', description: 'پلتفرم مشاهده‌پذیری کاملاً مدیریت‌شده و بدون نیاز به نگهداری که توسط Alibaba Cloud ارائه شده، امکان نظارت، ردیابی و ارزیابی برنامه‌های Dify را به‌صورت آماده و با تنظیمات اولیه فراهم می‌کند.', }, + mlflow: { + title: 'MLflow', + description: 'پلتفرم LLMOps متن‌باز برای ردیابی آزمایش‌ها، مشاهده‌پذیری و ارزیابی، برای ساخت برنامه‌های AI/LLM با اطمینان.', + }, + databricks: { + title: 'Databricks', + description: 'Databricks MLflow کاملاً مدیریت‌شده با حکمرانی و امنیت قوی برای ذخیره‌سازی داده‌های ردیابی ارائه می‌دهد.', + }, tencent: { title: 'تنست ای‌پی‌ام', description: 'نظارت بر عملکرد برنامه‌های Tencent تحلیل‌های جامع و ردیابی چندبعدی برای برنامه‌های LLM ارائه می‌دهد.', diff --git a/web/i18n/fr-FR/app.ts b/web/i18n/fr-FR/app.ts index ee9434e5f2..0146f59092 100644 --- a/web/i18n/fr-FR/app.ts +++ b/web/i18n/fr-FR/app.ts @@ -163,6 +163,14 @@ const translation = { title: 'Surveillance Cloud', description: 'La plateforme d\'observabilité entièrement gérée et sans maintenance fournie par Alibaba Cloud permet une surveillance, un traçage et une évaluation prêts à l\'emploi des applications Dify.', }, + mlflow: { + title: 'MLflow', + description: 'Plateforme LLMOps open source pour le suivi d\'expériences, l\'observabilité et l\'évaluation, pour créer des applications IA/LLM en toute confiance.', + }, + databricks: { + title: 'Databricks', + description: 'Databricks propose MLflow entièrement géré avec une gouvernance et une sécurité robustes pour stocker les données de traçabilité.', + }, tencent: { title: 'Tencent APM', description: 'Tencent Application Performance Monitoring fournit une traçabilité complète et une analyse multidimensionnelle pour les applications LLM.', diff --git a/web/i18n/hi-IN/app.ts b/web/i18n/hi-IN/app.ts index 211ca738a2..acb282b573 100644 --- a/web/i18n/hi-IN/app.ts +++ b/web/i18n/hi-IN/app.ts @@ -163,6 +163,14 @@ const translation = { title: 'क्लाउड मॉनिटर', description: 'अलीबाबा क्लाउड द्वारा प्रदान की गई पूरी तरह से प्रबंधित और रखरखाव-मुक्त अवलोकन प्लेटफ़ॉर्म, Dify अनुप्रयोगों की स्वचालित निगरानी, ट्रेसिंग और मूल्यांकन का सक्षम बनाता है।', }, + mlflow: { + title: 'MLflow', + description: 'प्रयोग ट्रैकिंग, अवलोकनीयता और मूल्यांकन के लिए ओपन-सोर्स LLMOps प्लेटफ़ॉर्म, विश्वास के साथ AI/LLM ऐप्स बनाने के लिए।', + }, + databricks: { + title: 'Databricks', + description: 'Databricks मजबूत शासन और सुरक्षा के साथ पूरी तरह से प्रबंधित MLflow प्रदान करता है, ट्रेस डेटा संग्रहीत करने के लिए।', + }, tencent: { title: 'टेनसेंट एपीएम', description: 'Tencent एप्लिकेशन परफॉर्मेंस मॉनिटरिंग LLM एप्लिकेशन के लिए व्यापक ट्रेसिंग और बहु-आयामी विश्लेषण प्रदान करता है।', diff --git a/web/i18n/it-IT/app.ts b/web/i18n/it-IT/app.ts index 3c87e65b33..a2f830dd41 100644 --- a/web/i18n/it-IT/app.ts +++ b/web/i18n/it-IT/app.ts @@ -169,6 +169,14 @@ const translation = { title: 'Monitoraggio Cloud', description: 'La piattaforma di osservabilità completamente gestita e senza manutenzione fornita da Alibaba Cloud consente il monitoraggio, il tracciamento e la valutazione delle applicazioni Dify fin da subito.', }, + mlflow: { + title: 'MLflow', + description: 'Piattaforma LLMOps open source per il tracciamento degli esperimenti, l\'osservabilità e la valutazione, per costruire app AI/LLM con sicurezza.', + }, + databricks: { + title: 'Databricks', + description: 'Databricks offre MLflow completamente gestito con forte governance e sicurezza per memorizzare i dati di tracciamento.', + }, tencent: { title: 'Tencent APM', description: 'Tencent Application Performance Monitoring fornisce tracciamento completo e analisi multidimensionale per le applicazioni LLM.', diff --git a/web/i18n/ja-JP/app.ts b/web/i18n/ja-JP/app.ts index 4625d69c52..140e075416 100644 --- a/web/i18n/ja-JP/app.ts +++ b/web/i18n/ja-JP/app.ts @@ -158,14 +158,22 @@ const translation = { }, inUse: '使用中', configProvider: { - title: '配置 ', + title: '設定 ', placeholder: '{{key}}を入力してください', project: 'プロジェクト', + trackingUri: 'トラッキング URI', + experimentId: '実験 ID', + username: 'ユーザー名', + password: 'パスワード', publicKey: '公開キー', secretKey: '秘密キー', viewDocsLink: '{{key}}に関するドキュメントを見る', removeConfirmTitle: '{{key}}の設定を削除しますか?', removeConfirmContent: '現在の設定は使用中です。これを削除すると、トレース機能が無効になります。', + clientId: 'OAuthクライアントID', + clientSecret: 'OAuthクライアントシークレット', + personalAccessToken: '(非推奨)アクセストークン', + databricksHost: 'DatabricksワークスペースのURL', }, weave: { title: '織る', @@ -175,6 +183,14 @@ const translation = { title: 'クラウドモニター', description: 'Alibaba Cloud が提供する完全管理型でメンテナンスフリーの可観測性プラットフォームは、Dify アプリケーションの即時監視、トレース、評価を可能にします。', }, + mlflow: { + title: 'MLflow', + description: 'MLflowはLLMアプリケーションの実験管理・評価・監視を行うためのオープンソースプラットフォームです。Difyアプリの実行をトレースし、デバッグや改善に役立てることができます。', + }, + databricks: { + title: 'Databricks', + description: 'DatabricksはフルマネージドのMLflowサービスを提供し、本番環境のトレースデータを強力なガバナンスとセキュリティの元で保存することができます。', + }, tencent: { title: 'テンセントAPM', description: 'Tencent アプリケーションパフォーマンスモニタリングは、LLM アプリケーションに対して包括的なトレーシングと多次元分析を提供します。', diff --git a/web/i18n/ko-KR/app.ts b/web/i18n/ko-KR/app.ts index 8c64644563..39227f5d2b 100644 --- a/web/i18n/ko-KR/app.ts +++ b/web/i18n/ko-KR/app.ts @@ -178,6 +178,14 @@ const translation = { title: '클라우드 모니터', description: '알리바바 클라우드에서 제공하는 완전 관리형 및 유지보수가 필요 없는 가시성 플랫폼은 Dify 애플리케이션의 모니터링, 추적 및 평가를 즉시 사용할 수 있도록 지원합니다.', }, + mlflow: { + title: 'MLflow', + description: '실험 추적, 관찰 가능성 및 평가를 위한 오픈 소스 LLMOps 플랫폼으로 AI/LLM 앱을 자신있게 구축합니다.', + }, + databricks: { + title: 'Databricks', + description: 'Databricks는 강력한 거버넌스와 보안을 갖춘 완전 관리형 MLflow를 제공하여 트레이스 데이터 저장을 지원합니다.', + }, tencent: { title: '텐센트 APM', description: '텐센트 애플리케이션 성능 모니터링은 LLM 애플리케이션에 대한 포괄적인 추적 및 다차원 분석을 제공합니다.', diff --git a/web/i18n/pl-PL/app.ts b/web/i18n/pl-PL/app.ts index 9b06320620..f51e150292 100644 --- a/web/i18n/pl-PL/app.ts +++ b/web/i18n/pl-PL/app.ts @@ -164,6 +164,14 @@ const translation = { title: 'Monitor Chmury', description: 'W pełni zarządzana i wolna od konserwacji platforma obserwowalności oferowana przez Alibaba Cloud umożliwia gotowe monitorowanie, śledzenie i oceny aplikacji Dify.', }, + mlflow: { + title: 'MLflow', + description: 'Platforma LLMOps open source do śledzenia eksperymentów, obserwowalności i oceny, aby tworzyć aplikacje AI/LLM z pewnością.', + }, + databricks: { + title: 'Databricks', + description: 'Databricks oferuje w pełni zarządzany MLflow z silną kontrolą i bezpieczeństwem do przechowywania danych śledzenia.', + }, tencent: { title: 'Tencent APM', description: 'Tencent Application Performance Monitoring zapewnia kompleksowe śledzenie i wielowymiarową analizę dla aplikacji LLM.', diff --git a/web/i18n/pt-BR/app.ts b/web/i18n/pt-BR/app.ts index 3051268f8f..cfe0935e10 100644 --- a/web/i18n/pt-BR/app.ts +++ b/web/i18n/pt-BR/app.ts @@ -163,6 +163,14 @@ const translation = { title: 'Monitoramento em Nuvem', description: 'A plataforma de observabilidade totalmente gerenciada e sem manutenção fornecida pela Alibaba Cloud, permite monitoramento, rastreamento e avaliação prontos para uso de aplicações Dify.', }, + mlflow: { + title: 'MLflow', + description: 'Plataforma LLMOps de código aberto para rastreamento de experimentos, observabilidade e avaliação, para construir aplicações de IA/LLM com confiança.', + }, + databricks: { + title: 'Databricks', + description: 'Databricks oferece MLflow totalmente gerenciado com forte governança e segurança para armazenar dados de rastreamento.', + }, tencent: { title: 'Tencent APM', description: 'O Monitoramento de Desempenho de Aplicações da Tencent fornece rastreamento abrangente e análise multidimensional para aplicações LLM.', diff --git a/web/i18n/ro-RO/app.ts b/web/i18n/ro-RO/app.ts index 53c8de2ef4..8457476ba4 100644 --- a/web/i18n/ro-RO/app.ts +++ b/web/i18n/ro-RO/app.ts @@ -163,6 +163,14 @@ const translation = { description: 'Platforma de observabilitate SaaS oferită de Alibaba Cloud permite monitorizarea, urmărirea și evaluarea aplicațiilor Dify din cutie.', title: 'Monitorizarea Cloud', }, + mlflow: { + title: 'MLflow', + description: 'Platformă LLMOps open source pentru urmărirea experimentelor, observabilitate și evaluare, pentru a construi aplicații AI/LLM cu încredere.', + }, + databricks: { + title: 'Databricks', + description: 'Databricks oferă MLflow complet gestionat cu o puternică guvernanță și securitate pentru stocarea datelor de urmărire.', + }, tencent: { title: 'Tencent APM', description: 'Monitorizarea Performanței Aplicațiilor Tencent oferă trasabilitate cuprinzătoare și analiză multidimensională pentru aplicațiile LLM.', diff --git a/web/i18n/ru-RU/app.ts b/web/i18n/ru-RU/app.ts index 86f5a83ec1..59f45a1c68 100644 --- a/web/i18n/ru-RU/app.ts +++ b/web/i18n/ru-RU/app.ts @@ -171,6 +171,14 @@ const translation = { title: 'Облачный монитор', description: 'Полностью управляемая и не требующая обслуживания платформа наблюдения, предоставляемая Alibaba Cloud, обеспечивает мониторинг, трассировку и оценку приложений Dify из коробки.', }, + mlflow: { + title: 'MLflow', + description: 'Платформа LLMOps с открытым исходным кодом для отслеживания экспериментов, наблюдаемости и оценки, для создания приложений AI/LLM с уверенностью.', + }, + databricks: { + title: 'Databricks', + description: 'Databricks предлагает полностью управляемый MLflow с сильным управлением и безопасностью для хранения данных трассировки.', + }, tencent: { title: 'Tencent APM', description: 'Мониторинг производительности приложений Tencent предоставляет всестороннее отслеживание и многомерный анализ для приложений LLM.', diff --git a/web/i18n/sl-SI/app.ts b/web/i18n/sl-SI/app.ts index d755b371ba..2e72adea24 100644 --- a/web/i18n/sl-SI/app.ts +++ b/web/i18n/sl-SI/app.ts @@ -176,6 +176,14 @@ const translation = { title: 'Oblačni nadzor', description: 'Popolnoma upravljana in brez vzdrževanja platforma za opazovanje, ki jo zagotavlja Alibaba Cloud, omogoča takojšnje spremljanje, sledenje in ocenjevanje aplikacij Dify.', }, + mlflow: { + title: 'MLflow', + description: 'Odprtokodna platforma LLMOps za sledenje eksperimentom, opazljivost in ocenjevanje, za gradnjo aplikacij AI/LLM z zaupanjem.', + }, + databricks: { + title: 'Databricks', + description: 'Databricks ponuja popolnoma upravljan MLflow z močnim upravljanjem in varnostjo za shranjevanje podatkov sledenja.', + }, tencent: { description: 'Tencent Application Performance Monitoring zagotavlja celovito sledenje in večdimenzionalno analizo za aplikacije LLM.', title: 'Tencent APM', diff --git a/web/i18n/th-TH/app.ts b/web/i18n/th-TH/app.ts index 18e9511259..51924721b4 100644 --- a/web/i18n/th-TH/app.ts +++ b/web/i18n/th-TH/app.ts @@ -172,6 +172,14 @@ const translation = { title: 'การตรวจสอบคลาวด์', description: 'แพลตฟอร์มการสังเกตการณ์ที่จัดการโดย Alibaba Cloud ซึ่งไม่ต้องดูแลและบำรุงรักษา ช่วยให้สามารถติดตาม ตรวจสอบ และประเมินแอปพลิเคชัน Dify ได้ทันที', }, + mlflow: { + title: 'MLflow', + description: 'แพลตฟอร์ม LLMOps โอเพนซอร์สสำหรับการติดตามการทดลอง การสังเกตการณ์ และการประเมินผล เพื่อสร้างแอป AI/LLM ด้วยความมั่นใจ', + }, + databricks: { + title: 'Databricks', + description: 'Databricks ให้บริการ MLflow ที่จัดการแบบเต็มรูปแบบพร้อมการกำกับดูแลและความปลอดภัยที่แข็งแกร่งสำหรับการจัดเก็บข้อมูลการติดตาม', + }, tencent: { title: 'Tencent APM', description: 'การติดตามประสิทธิภาพแอปพลิเคชันของ Tencent มอบการตรวจสอบแบบครบวงจรและการวิเคราะห์หลายมิติสำหรับแอป LLM', diff --git a/web/i18n/tr-TR/app.ts b/web/i18n/tr-TR/app.ts index 2f78f452a5..50adab2426 100644 --- a/web/i18n/tr-TR/app.ts +++ b/web/i18n/tr-TR/app.ts @@ -167,6 +167,14 @@ const translation = { title: 'Bulut İzleyici', description: 'Alibaba Cloud tarafından sağlanan tamamen yönetilen ve bakım gerektirmeyen gözlemleme platformu, Dify uygulamalarının kutudan çıkar çıkmaz izlenmesi, takip edilmesi ve değerlendirilmesine olanak tanır.', }, + mlflow: { + title: 'MLflow', + description: 'Deney takibi, gözlemlenebilirlik ve değerlendirme için açık kaynaklı LLMOps platformu, AI/LLM uygulamalarını güvenle oluşturmak için.', + }, + databricks: { + title: 'Databricks', + description: 'Databricks, iz veri depolama için güçlü yönetişim ve güvenlik ile tamamen yönetilen MLflow sunar.', + }, tencent: { title: 'Tencent APM', description: 'Tencent Uygulama Performans İzleme, LLM uygulamaları için kapsamlı izleme ve çok boyutlu analiz sağlar.', diff --git a/web/i18n/uk-UA/app.ts b/web/i18n/uk-UA/app.ts index ffd50a7cb4..5ccdf61894 100644 --- a/web/i18n/uk-UA/app.ts +++ b/web/i18n/uk-UA/app.ts @@ -163,6 +163,14 @@ const translation = { title: 'Моніторинг Хмари', description: 'Повністю керовані та без обслуговування платформи спостереження, надані Alibaba Cloud, дозволяють миттєвий моніторинг, трасування та оцінку застосувань Dify.', }, + mlflow: { + title: 'MLflow', + description: 'Платформа LLMOps з відкритим кодом для відстеження експериментів, спостережуваності та оцінки, для створення додатків AI/LLM з впевненістю.', + }, + databricks: { + title: 'Databricks', + description: 'Databricks пропонує повністю керований MLflow з сильною управлінням та безпекою для зберігання даних трасування.', + }, tencent: { title: 'Tencent APM', description: 'Сервіс моніторингу продуктивності додатків Tencent забезпечує комплексне трасування та багатовимірний аналіз додатків LLM.', diff --git a/web/i18n/vi-VN/app.ts b/web/i18n/vi-VN/app.ts index 5efd1af4a6..6a1cea0578 100644 --- a/web/i18n/vi-VN/app.ts +++ b/web/i18n/vi-VN/app.ts @@ -163,6 +163,14 @@ const translation = { title: 'Giám sát Đám mây', description: 'Nền tảng quan sát được quản lý hoàn toàn và không cần bảo trì do Alibaba Cloud cung cấp, cho phép giám sát, theo dõi và đánh giá các ứng dụng Dify ngay lập tức.', }, + mlflow: { + title: 'MLflow', + description: 'Nền tảng LLMOps mã nguồn mở cho theo dõi thử nghiệm, khả năng quan sát và đánh giá, để xây dựng ứng dụng AI/LLM với sự tự tin.', + }, + databricks: { + title: 'Databricks', + description: 'Databricks cung cấp MLflow được quản lý hoàn toàn với quản trị mạnh mẽ và bảo mật để lưu trữ dữ liệu theo dõi.', + }, tencent: { title: 'Tencent APM', description: 'Giám sát hiệu suất ứng dụng của Tencent cung cấp khả năng theo dõi toàn diện và phân tích đa chiều cho các ứng dụng LLM.', diff --git a/web/i18n/zh-Hans/app.ts b/web/i18n/zh-Hans/app.ts index 53b4ef784a..89360f6065 100644 --- a/web/i18n/zh-Hans/app.ts +++ b/web/i18n/zh-Hans/app.ts @@ -192,6 +192,14 @@ const translation = { title: '云监控', description: '阿里云提供的全托管免运维可观测平台,一键开启Dify应用的监控追踪和评估', }, + mlflow: { + title: 'MLflow', + description: '开源LLMOps平台,提供实验跟踪、可观测性和评估功能,帮助您自信地构建AI/LLM应用。', + }, + databricks: { + title: 'Databricks', + description: 'Databricks提供完全托管的MLflow,具有强大的治理和安全功能,用于存储跟踪数据。', + }, tencent: { title: '腾讯云 APM', description: '腾讯云应用性能监控,提供 LLM 应用全链路追踪和多维分析', diff --git a/web/i18n/zh-Hant/app.ts b/web/i18n/zh-Hant/app.ts index c7a69d9b3c..6a17751400 100644 --- a/web/i18n/zh-Hant/app.ts +++ b/web/i18n/zh-Hant/app.ts @@ -162,6 +162,14 @@ const translation = { title: '雲端監控', description: '阿里雲提供的完全管理且無需維護的可觀察性平台,支持即時監控、追蹤和評估 Dify 應用程序。', }, + mlflow: { + title: 'MLflow', + description: '開源LLMOps平台,提供實驗追蹤、可觀測性和評估功能,幫助您自信地構建AI/LLM應用。', + }, + databricks: { + title: 'Databricks', + description: 'Databricks提供完全託管的MLflow,具有強大的治理和安全功能,用於存儲追蹤數據。', + }, tencent: { title: '騰訊 APM', description: '騰訊應用性能監控為大型語言模型應用提供全面的追蹤和多維分析。', diff --git a/web/models/app.ts b/web/models/app.ts index e0f31ff26e..fa148511f0 100644 --- a/web/models/app.ts +++ b/web/models/app.ts @@ -1,8 +1,10 @@ import type { AliyunConfig, ArizeConfig, + DatabricksConfig, LangFuseConfig, LangSmithConfig, + MLflowConfig, OpikConfig, PhoenixConfig, TencentConfig, @@ -119,7 +121,7 @@ export type TracingStatus = { export type TracingConfig = { tracing_provider: TracingProvider - tracing_config: ArizeConfig | PhoenixConfig | LangSmithConfig | LangFuseConfig | OpikConfig | WeaveConfig | AliyunConfig | TencentConfig + tracing_config: ArizeConfig | PhoenixConfig | LangSmithConfig | LangFuseConfig | DatabricksConfig | MLflowConfig | OpikConfig | WeaveConfig | AliyunConfig | TencentConfig } export type WebhookTriggerResponse = { From 1dfde240cbfe5dcf42028d0331431e0afd813351 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sat, 22 Nov 2025 13:54:08 +0800 Subject: [PATCH 03/22] chore: translate i18n files and update type definitions (#28518) Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com> --- web/i18n/de-DE/share.ts | 1 + web/i18n/es-ES/share.ts | 1 + web/i18n/fa-IR/share.ts | 1 + web/i18n/fr-FR/share.ts | 1 + web/i18n/hi-IN/share.ts | 1 + web/i18n/id-ID/share.ts | 1 + web/i18n/it-IT/share.ts | 1 + web/i18n/ja-JP/share.ts | 1 + web/i18n/ko-KR/share.ts | 1 + web/i18n/pl-PL/share.ts | 1 + web/i18n/pt-BR/share.ts | 1 + web/i18n/ro-RO/share.ts | 1 + web/i18n/ru-RU/share.ts | 1 + web/i18n/sl-SI/share.ts | 1 + web/i18n/th-TH/share.ts | 1 + web/i18n/tr-TR/share.ts | 1 + web/i18n/uk-UA/share.ts | 1 + web/i18n/vi-VN/share.ts | 1 + web/i18n/zh-Hans/share.ts | 1 + web/i18n/zh-Hant/share.ts | 1 + 20 files changed, 20 insertions(+) diff --git a/web/i18n/de-DE/share.ts b/web/i18n/de-DE/share.ts index 33c40917dd..8d828229d4 100644 --- a/web/i18n/de-DE/share.ts +++ b/web/i18n/de-DE/share.ts @@ -76,6 +76,7 @@ const translation = { }, executions: '{{num}} HINRICHTUNGEN', execution: 'AUSFÜHRUNG', + stopRun: 'Stopp Lauf', }, login: { backToHome: 'Zurück zur Startseite', diff --git a/web/i18n/es-ES/share.ts b/web/i18n/es-ES/share.ts index caeb056d89..45c6bb1c71 100644 --- a/web/i18n/es-ES/share.ts +++ b/web/i18n/es-ES/share.ts @@ -76,6 +76,7 @@ const translation = { }, execution: 'EJECUCIÓN', executions: '{{num}} EJECUCIONES', + stopRun: 'Detener carrera', }, login: { backToHome: 'Volver a Inicio', diff --git a/web/i18n/fa-IR/share.ts b/web/i18n/fa-IR/share.ts index 03ed4e8ea9..17ae970838 100644 --- a/web/i18n/fa-IR/share.ts +++ b/web/i18n/fa-IR/share.ts @@ -72,6 +72,7 @@ const translation = { }, executions: '{{num}} اعدام', execution: 'اجرا', + stopRun: 'توقف کن، بدو', }, login: { backToHome: 'بازگشت به خانه', diff --git a/web/i18n/fr-FR/share.ts b/web/i18n/fr-FR/share.ts index 2374da70e6..c2c0d262db 100644 --- a/web/i18n/fr-FR/share.ts +++ b/web/i18n/fr-FR/share.ts @@ -76,6 +76,7 @@ const translation = { }, executions: '{{num}} EXÉCUTIONS', execution: 'EXÉCUTION', + stopRun: 'Arrêtez de courir', }, login: { backToHome: 'Retour à l\'accueil', diff --git a/web/i18n/hi-IN/share.ts b/web/i18n/hi-IN/share.ts index 2e078a0a3b..760fc36d1d 100644 --- a/web/i18n/hi-IN/share.ts +++ b/web/i18n/hi-IN/share.ts @@ -76,6 +76,7 @@ const translation = { }, execution: 'अनु执行', executions: '{{num}} फाँसी', + stopRun: 'रोकें या दौड़ना बंद करें', }, login: { backToHome: 'होम पर वापस', diff --git a/web/i18n/id-ID/share.ts b/web/i18n/id-ID/share.ts index 0cf47804cc..eb2bd67df5 100644 --- a/web/i18n/id-ID/share.ts +++ b/web/i18n/id-ID/share.ts @@ -67,6 +67,7 @@ const translation = { queryPlaceholder: 'Tulis konten kueri Anda...', resultTitle: 'Penyelesaian AI', browse: 'ramban', + stopRun: 'Berhenti Lari', }, login: { backToHome: 'Kembali ke Beranda', diff --git a/web/i18n/it-IT/share.ts b/web/i18n/it-IT/share.ts index 4c6c18ff33..8226355ba7 100644 --- a/web/i18n/it-IT/share.ts +++ b/web/i18n/it-IT/share.ts @@ -78,6 +78,7 @@ const translation = { }, execution: 'ESECUZIONE', executions: '{{num}} ESECUZIONI', + stopRun: 'Ferma la corsa', }, login: { backToHome: 'Torna alla home', diff --git a/web/i18n/ja-JP/share.ts b/web/i18n/ja-JP/share.ts index 20dad7faec..8ae9bc1728 100644 --- a/web/i18n/ja-JP/share.ts +++ b/web/i18n/ja-JP/share.ts @@ -72,6 +72,7 @@ const translation = { moreThanMaxLengthLine: '{{rowIndex}}行目:{{varName}}が制限長({{maxLength}})を超過', atLeastOne: '1 行以上のデータが必要です', }, + stopRun: '走るのをやめろ', }, login: { backToHome: 'ホームに戻る', diff --git a/web/i18n/ko-KR/share.ts b/web/i18n/ko-KR/share.ts index 3958b4f93e..41821c19c1 100644 --- a/web/i18n/ko-KR/share.ts +++ b/web/i18n/ko-KR/share.ts @@ -72,6 +72,7 @@ const translation = { }, execution: '실행', executions: '{{num}} 처형', + stopRun: '멈춰 달려', }, login: { backToHome: '홈으로 돌아가기', diff --git a/web/i18n/pl-PL/share.ts b/web/i18n/pl-PL/share.ts index 617f66d994..dbc5612963 100644 --- a/web/i18n/pl-PL/share.ts +++ b/web/i18n/pl-PL/share.ts @@ -77,6 +77,7 @@ const translation = { }, executions: '{{num}} EGZEKUCJI', execution: 'WYKONANIE', + stopRun: 'Zatrzymaj bieg', }, login: { backToHome: 'Powrót do strony głównej', diff --git a/web/i18n/pt-BR/share.ts b/web/i18n/pt-BR/share.ts index 9a9d7db632..4b33f6df1c 100644 --- a/web/i18n/pt-BR/share.ts +++ b/web/i18n/pt-BR/share.ts @@ -76,6 +76,7 @@ const translation = { }, executions: '{{num}} EXECUÇÕES', execution: 'EXECUÇÃO', + stopRun: 'Pare de correr', }, login: { backToHome: 'Voltar para a página inicial', diff --git a/web/i18n/ro-RO/share.ts b/web/i18n/ro-RO/share.ts index 41e38812c5..63d383c8f7 100644 --- a/web/i18n/ro-RO/share.ts +++ b/web/i18n/ro-RO/share.ts @@ -76,6 +76,7 @@ const translation = { }, execution: 'EXECUȚIE', executions: '{{num}} EXECUȚII', + stopRun: 'Oprește alergarea', }, login: { backToHome: 'Înapoi la Acasă', diff --git a/web/i18n/ru-RU/share.ts b/web/i18n/ru-RU/share.ts index dafbe9d6b1..3b9f109ebf 100644 --- a/web/i18n/ru-RU/share.ts +++ b/web/i18n/ru-RU/share.ts @@ -76,6 +76,7 @@ const translation = { }, execution: 'ИСПОЛНЕНИЕ', executions: '{{num}} ВЫПОЛНЕНИЯ', + stopRun: 'Остановись, убегая', }, login: { backToHome: 'Назад на главную', diff --git a/web/i18n/sl-SI/share.ts b/web/i18n/sl-SI/share.ts index 8b7fe87cbd..21da9bf090 100644 --- a/web/i18n/sl-SI/share.ts +++ b/web/i18n/sl-SI/share.ts @@ -73,6 +73,7 @@ const translation = { }, execution: 'IZVEDBA', executions: '{{num}} IZVRŠITEV', + stopRun: 'Ustavi teči', }, login: { backToHome: 'Nazaj na začetno stran', diff --git a/web/i18n/th-TH/share.ts b/web/i18n/th-TH/share.ts index eca049b9a2..cfcffcda52 100644 --- a/web/i18n/th-TH/share.ts +++ b/web/i18n/th-TH/share.ts @@ -72,6 +72,7 @@ const translation = { }, execution: 'การดำเนินการ', executions: '{{num}} การประหารชีวิต', + stopRun: 'หยุดวิ่ง', }, login: { backToHome: 'กลับไปที่หน้าแรก', diff --git a/web/i18n/tr-TR/share.ts b/web/i18n/tr-TR/share.ts index e7ad4fcd68..ae2804e22c 100644 --- a/web/i18n/tr-TR/share.ts +++ b/web/i18n/tr-TR/share.ts @@ -72,6 +72,7 @@ const translation = { }, execution: 'İFRAZAT', executions: '{{num}} İDAM', + stopRun: 'Dur Koş', }, login: { backToHome: 'Ana Sayfaya Dön', diff --git a/web/i18n/uk-UA/share.ts b/web/i18n/uk-UA/share.ts index 92f25545d9..9ac8f95ba5 100644 --- a/web/i18n/uk-UA/share.ts +++ b/web/i18n/uk-UA/share.ts @@ -72,6 +72,7 @@ const translation = { }, execution: 'ВИКОНАННЯ', executions: '{{num}} ВИКОНАНЬ', + stopRun: 'Зупинись, не біжи', }, login: { backToHome: 'Повернутися на головну', diff --git a/web/i18n/vi-VN/share.ts b/web/i18n/vi-VN/share.ts index 12a31bd40b..9138a78caa 100644 --- a/web/i18n/vi-VN/share.ts +++ b/web/i18n/vi-VN/share.ts @@ -72,6 +72,7 @@ const translation = { }, executions: '{{num}} ÁN TỬ HÌNH', execution: 'THI HÀNH', + stopRun: 'Dừng lại', }, login: { backToHome: 'Trở về Trang Chủ', diff --git a/web/i18n/zh-Hans/share.ts b/web/i18n/zh-Hans/share.ts index ce1270dae8..4db47e9241 100644 --- a/web/i18n/zh-Hans/share.ts +++ b/web/i18n/zh-Hans/share.ts @@ -72,6 +72,7 @@ const translation = { moreThanMaxLengthLine: '第 {{rowIndex}} 行:{{varName}}值超过最大长度 {{maxLength}}', atLeastOne: '上传文件的内容不能少于一条', }, + stopRun: '停止跑', }, login: { backToHome: '返回首页', diff --git a/web/i18n/zh-Hant/share.ts b/web/i18n/zh-Hant/share.ts index e25aa0c0de..eaecd4f000 100644 --- a/web/i18n/zh-Hant/share.ts +++ b/web/i18n/zh-Hant/share.ts @@ -72,6 +72,7 @@ const translation = { }, execution: '執行', executions: '{{num}} 執行', + stopRun: '停止奔跑', }, login: { backToHome: '返回首頁', From c75a4e6309de08474dc2c6951cbd4a52e92514e5 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sun, 23 Nov 2025 15:47:57 +0800 Subject: [PATCH 04/22] chore: translate i18n files and update type definitions (#28528) Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com> Co-authored-by: yyh <92089059+lyzno1@users.noreply.github.com> --- web/i18n/de-DE/app.ts | 10 ++++++++++ web/i18n/es-ES/app.ts | 10 ++++++++++ web/i18n/fa-IR/app.ts | 10 ++++++++++ web/i18n/fr-FR/app.ts | 10 ++++++++++ web/i18n/hi-IN/app.ts | 10 ++++++++++ web/i18n/id-ID/app.ts | 18 ++++++++++++++++++ web/i18n/it-IT/app.ts | 10 ++++++++++ web/i18n/ja-JP/app.ts | 4 +++- web/i18n/ko-KR/app.ts | 10 ++++++++++ web/i18n/pl-PL/app.ts | 10 ++++++++++ web/i18n/pt-BR/app.ts | 10 ++++++++++ web/i18n/ro-RO/app.ts | 10 ++++++++++ web/i18n/ru-RU/app.ts | 10 ++++++++++ web/i18n/sl-SI/app.ts | 10 ++++++++++ web/i18n/th-TH/app.ts | 10 ++++++++++ web/i18n/tr-TR/app.ts | 10 ++++++++++ web/i18n/uk-UA/app.ts | 10 ++++++++++ web/i18n/vi-VN/app.ts | 10 ++++++++++ web/i18n/zh-Hans/app.ts | 8 ++++++++ web/i18n/zh-Hant/app.ts | 10 ++++++++++ 20 files changed, 199 insertions(+), 1 deletion(-) diff --git a/web/i18n/de-DE/app.ts b/web/i18n/de-DE/app.ts index 744e70cc0a..ce606d5089 100644 --- a/web/i18n/de-DE/app.ts +++ b/web/i18n/de-DE/app.ts @@ -146,6 +146,14 @@ const translation = { viewDocsLink: '{{key}}-Dokumentation ansehen', removeConfirmTitle: '{{key}}-Konfiguration entfernen?', removeConfirmContent: 'Die aktuelle Konfiguration wird verwendet. Das Entfernen wird die Nachverfolgungsfunktion ausschalten.', + password: 'Passwort', + databricksHost: 'Databricks-Workspace-URL', + clientSecret: 'OAuth-Client-Geheimnis', + personalAccessToken: 'Persönlicher Zugriffsschlüssel (Legacy)', + experimentId: 'Experiment-ID', + username: 'Benutzername', + trackingUri: 'Tracking-URI', + clientId: 'OAuth-Client-ID', }, view: 'Ansehen', opik: { @@ -336,6 +344,8 @@ const translation = { startTyping: 'Beginnen Sie mit der Eingabe, um zu suchen', selectToNavigate: 'Auswählen, um zu navigieren', }, + notPublishedYet: 'App ist noch nicht veröffentlicht', + noUserInputNode: 'Fehlender Benutzereingabeknoten', } export default translation diff --git a/web/i18n/es-ES/app.ts b/web/i18n/es-ES/app.ts index 086e46d9b7..5ca88414f6 100644 --- a/web/i18n/es-ES/app.ts +++ b/web/i18n/es-ES/app.ts @@ -149,6 +149,14 @@ const translation = { viewDocsLink: 'Ver documentación de {{key}}', removeConfirmTitle: '¿Eliminar la configuración de {{key}}?', removeConfirmContent: 'La configuración actual está en uso, eliminarla desactivará la función de rastreo.', + password: 'Contraseña', + experimentId: 'ID del experimento', + trackingUri: 'URI de seguimiento', + username: 'Nombre de usuario', + databricksHost: 'URL del espacio de trabajo de Databricks', + clientSecret: 'Secreto del cliente OAuth', + clientId: 'ID de cliente OAuth', + personalAccessToken: 'Token de Acceso Personal (antiguo)', }, view: 'Vista', opik: { @@ -334,6 +342,8 @@ const translation = { startTyping: 'Empieza a escribir para buscar', tips: 'Presiona ↑↓ para navegar', }, + notPublishedYet: 'La aplicación aún no está publicada', + noUserInputNode: 'Nodo de entrada de usuario faltante', } export default translation diff --git a/web/i18n/fa-IR/app.ts b/web/i18n/fa-IR/app.ts index a5a49afbac..db3295eed2 100644 --- a/web/i18n/fa-IR/app.ts +++ b/web/i18n/fa-IR/app.ts @@ -157,6 +157,14 @@ const translation = { viewDocsLink: 'مشاهده مستندات {{key}}', removeConfirmTitle: 'حذف پیکربندی {{key}}؟', removeConfirmContent: 'پیکربندی فعلی در حال استفاده است، حذف آن ویژگی ردیابی را غیرفعال خواهد کرد.', + clientId: 'شناسه مشتری OAuth', + username: 'نام کاربری', + password: 'رمز عبور', + experimentId: 'شناسه آزمایش', + personalAccessToken: 'نشانه دسترسی شخصی (قدیمی)', + databricksHost: 'نشانی اینترنتی محیط کاری دیتابریکس', + trackingUri: 'آدرس URI ردیابی', + clientSecret: 'رمز مخفی مشتری OAuth', }, view: 'مشاهده', opik: { @@ -334,6 +342,8 @@ const translation = { pressEscToClose: 'برای بستن ESC را فشار دهید', tips: 'برای حرکت به بالا و پایین کلیدهای ↑ و ↓ را فشار دهید', }, + noUserInputNode: 'ورودی کاربر پیدا نشد', + notPublishedYet: 'اپ هنوز منتشر نشده است', } export default translation diff --git a/web/i18n/fr-FR/app.ts b/web/i18n/fr-FR/app.ts index 0146f59092..8ab52d3ce8 100644 --- a/web/i18n/fr-FR/app.ts +++ b/web/i18n/fr-FR/app.ts @@ -149,6 +149,14 @@ const translation = { viewDocsLink: 'Voir la documentation de {{key}}', removeConfirmTitle: 'Supprimer la configuration de {{key}} ?', removeConfirmContent: 'La configuration actuelle est en cours d\'utilisation, sa suppression désactivera la fonction de Traçage.', + password: 'Mot de passe', + trackingUri: 'URI de suivi', + clientId: 'ID client OAuth', + clientSecret: 'Secret client OAuth', + username: 'Nom d\'utilisateur', + experimentId: 'ID de l\'expérience', + personalAccessToken: 'Jeton d\'accès personnel (ancien)', + databricksHost: 'URL de l\'espace de travail Databricks', }, view: 'Vue', opik: { @@ -334,6 +342,8 @@ const translation = { startTyping: 'Commencez à taper pour rechercher', selectToNavigate: 'Sélectionnez pour naviguer', }, + noUserInputNode: 'Nœud d\'entrée utilisateur manquant', + notPublishedYet: 'L\'application n\'est pas encore publiée', } export default translation diff --git a/web/i18n/hi-IN/app.ts b/web/i18n/hi-IN/app.ts index acb282b573..e0fe95f424 100644 --- a/web/i18n/hi-IN/app.ts +++ b/web/i18n/hi-IN/app.ts @@ -149,6 +149,14 @@ const translation = { viewDocsLink: '{{key}} दस्तावेज़ देखें', removeConfirmTitle: '{{key}} कॉन्फ़िगरेशन हटाएं?', removeConfirmContent: 'वर्तमान कॉन्फ़िगरेशन उपयोग में है, इसे हटाने से ट्रेसिंग सुविधा बंद हो जाएगी।', + password: 'पासवर्ड', + clientId: 'OAuth क्लाइंट आईडी', + clientSecret: 'OAuth क्लाइंट सीक्रेट', + trackingUri: 'ट्रैकिंग यूआरआई', + username: 'उपयोगकर्ता नाम', + experimentId: 'प्रयोग आईडी', + databricksHost: 'डेटाब्रिक्स वर्कस्पेस यूआरएल', + personalAccessToken: 'व्यक्तिगत एक्सेस टोकन (पुराना)', }, view: 'देखना', opik: { @@ -334,6 +342,8 @@ const translation = { selectToNavigate: 'नेविगेट करने के लिए चुनें', tips: 'नेविगेट करने के लिए ↑↓ दबाएँ', }, + noUserInputNode: 'उपयोगकर्ता इनपुट नोड गायब है', + notPublishedYet: 'ऐप अभी प्रकाशित नहीं हुआ है', } export default translation diff --git a/web/i18n/id-ID/app.ts b/web/i18n/id-ID/app.ts index 2072bec35e..3babd9ee9d 100644 --- a/web/i18n/id-ID/app.ts +++ b/web/i18n/id-ID/app.ts @@ -142,6 +142,14 @@ const translation = { removeConfirmContent: 'Konfigurasi saat ini sedang digunakan, menghapusnya akan mematikan fitur Pelacakan.', title: 'Konfigurasi', secretKey: 'Kunci Rahasia', + experimentId: 'ID Eksperimen', + trackingUri: 'URI Pelacakan', + clientId: 'ID Klien OAuth', + clientSecret: 'Rahasia Klien OAuth', + username: 'Nama Pengguna', + databricksHost: 'URL Workspace Databricks', + personalAccessToken: 'Token Akses Pribadi (lama)', + password: 'Kata sandi', }, expand: 'Memperluas', disabledTip: 'Silakan konfigurasi penyedia terlebih dahulu', @@ -159,6 +167,14 @@ const translation = { title: 'Tencent APM', description: 'Tencent Application Performance Monitoring menyediakan pelacakan komprehensif dan analisis multi-dimensi untuk aplikasi LLM.', }, + mlflow: { + title: 'MLflow', + description: 'MLflow adalah platform sumber terbuka untuk manajemen eksperimen, evaluasi, dan pemantauan aplikasi LLM.', + }, + databricks: { + title: 'Databricks', + description: 'Databricks menawarkan MLflow yang sepenuhnya dikelola dengan tata kelola dan keamanan yang kuat untuk menyimpan data jejak.', + }, }, appSelector: { placeholder: 'Pilih aplikasi...', @@ -309,6 +325,8 @@ const translation = { openInExplore: 'Buka di Jelajahi', showMyCreatedAppsOnly: 'Dibuat oleh saya', appDeleteFailed: 'Gagal menghapus aplikasi', + noUserInputNode: 'Node input pengguna hilang', + notPublishedYet: 'Aplikasi belum diterbitkan', } export default translation diff --git a/web/i18n/it-IT/app.ts b/web/i18n/it-IT/app.ts index a2f830dd41..824988af7c 100644 --- a/web/i18n/it-IT/app.ts +++ b/web/i18n/it-IT/app.ts @@ -155,6 +155,14 @@ const translation = { removeConfirmTitle: 'Rimuovere la configurazione di {{key}}?', removeConfirmContent: 'La configurazione attuale è in uso, rimuovendola disattiverà la funzione di Tracciamento.', + password: 'Password', + clientId: 'ID client OAuth', + username: 'Nome utente', + trackingUri: 'URI di tracciamento', + personalAccessToken: 'Token di accesso personale (legacy)', + clientSecret: 'Segreto del client OAuth', + experimentId: 'ID Esperimento', + databricksHost: 'URL dell\'area di lavoro Databricks', }, view: 'Vista', opik: { @@ -340,6 +348,8 @@ const translation = { tips: 'Premi ↑↓ per navigare', pressEscToClose: 'Premi ESC per chiudere', }, + noUserInputNode: 'Nodo di input utente mancante', + notPublishedYet: 'L\'app non è ancora pubblicata', } export default translation diff --git a/web/i18n/ja-JP/app.ts b/web/i18n/ja-JP/app.ts index 140e075416..1456d7d490 100644 --- a/web/i18n/ja-JP/app.ts +++ b/web/i18n/ja-JP/app.ts @@ -172,7 +172,7 @@ const translation = { removeConfirmContent: '現在の設定は使用中です。これを削除すると、トレース機能が無効になります。', clientId: 'OAuthクライアントID', clientSecret: 'OAuthクライアントシークレット', - personalAccessToken: '(非推奨)アクセストークン', + personalAccessToken: 'パーソナルアクセストークン(レガシー)', databricksHost: 'DatabricksワークスペースのURL', }, weave: { @@ -341,6 +341,8 @@ const translation = { noMatchingCommands: '一致するコマンドが見つかりません', tryDifferentSearch: '別の検索語句をお試しください', }, + notPublishedYet: 'アプリはまだ公開されていません', + noUserInputNode: 'ユーザー入力ノードが見つかりません', } export default translation diff --git a/web/i18n/ko-KR/app.ts b/web/i18n/ko-KR/app.ts index 39227f5d2b..f1bab6f483 100644 --- a/web/i18n/ko-KR/app.ts +++ b/web/i18n/ko-KR/app.ts @@ -162,6 +162,14 @@ const translation = { removeConfirmTitle: '{{key}} 구성을 제거하시겠습니까?', removeConfirmContent: '현재 구성이 사용 중입니다. 제거하면 추적 기능이 꺼집니다.', + username: '사용자 이름', + trackingUri: '추적 URI', + password: '비밀번호', + experimentId: '실험 ID', + clientId: 'OAuth 클라이언트 ID', + clientSecret: 'OAuth 클라이언트 비밀', + databricksHost: 'Databricks 작업 영역 URL', + personalAccessToken: '개인 액세스 토큰(레거시)', }, view: '보기', opik: { @@ -354,6 +362,8 @@ const translation = { selectToNavigate: '선택하여 탐색하기', startTyping: '검색하려면 타이핑을 시작하세요', }, + noUserInputNode: '사용자 입력 노드가 없습니다', + notPublishedYet: '앱이 아직 출시되지 않았습니다', } export default translation diff --git a/web/i18n/pl-PL/app.ts b/web/i18n/pl-PL/app.ts index f51e150292..1cfbe3c744 100644 --- a/web/i18n/pl-PL/app.ts +++ b/web/i18n/pl-PL/app.ts @@ -150,6 +150,14 @@ const translation = { viewDocsLink: 'Zobacz dokumentację {{key}}', removeConfirmTitle: 'Usunąć konfigurację {{key}}?', removeConfirmContent: 'Obecna konfiguracja jest w użyciu, jej usunięcie wyłączy funkcję Śledzenia.', + password: 'Hasło', + experimentId: 'ID eksperymentu', + username: 'Nazwa użytkownika', + trackingUri: 'Śledzenie URI', + clientId: 'ID klienta OAuth', + personalAccessToken: 'Osobisty token dostępu (stary)', + clientSecret: 'Sekretny klucz klienta OAuth', + databricksHost: 'Adres URL obszaru roboczego Databricks', }, view: 'Widok', opik: { @@ -335,6 +343,8 @@ const translation = { startTyping: 'Zacznij pisać, aby wyszukać', pressEscToClose: 'Naciśnij ESC, aby zamknąć', }, + notPublishedYet: 'Aplikacja nie została jeszcze opublikowana', + noUserInputNode: 'Brak węzła wejściowego użytkownika', } export default translation diff --git a/web/i18n/pt-BR/app.ts b/web/i18n/pt-BR/app.ts index cfe0935e10..94eeccc4c1 100644 --- a/web/i18n/pt-BR/app.ts +++ b/web/i18n/pt-BR/app.ts @@ -149,6 +149,14 @@ const translation = { viewDocsLink: 'Ver documentação de {{key}}', removeConfirmTitle: 'Remover configuração de {{key}}?', removeConfirmContent: 'A configuração atual está em uso, removê-la desligará o recurso de Rastreamento.', + password: 'Senha', + clientId: 'ID do Cliente OAuth', + clientSecret: 'Segredo do Cliente OAuth', + username: 'Nome de usuário', + personalAccessToken: 'Token de Acesso Pessoal (legado)', + experimentId: 'ID do Experimento', + trackingUri: 'URI de rastreamento', + databricksHost: 'URL do Workspace do Databricks', }, view: 'Vista', opik: { @@ -334,6 +342,8 @@ const translation = { pressEscToClose: 'Pressione ESC para fechar', startTyping: 'Comece a digitar para pesquisar', }, + notPublishedYet: 'O aplicativo ainda não foi publicado', + noUserInputNode: 'Nodo de entrada do usuário ausente', } export default translation diff --git a/web/i18n/ro-RO/app.ts b/web/i18n/ro-RO/app.ts index 8457476ba4..e15b8365a2 100644 --- a/web/i18n/ro-RO/app.ts +++ b/web/i18n/ro-RO/app.ts @@ -149,6 +149,14 @@ const translation = { viewDocsLink: 'Vizualizați documentația {{key}}', removeConfirmTitle: 'Eliminați configurația {{key}}?', removeConfirmContent: 'Configurația curentă este în uz, eliminarea acesteia va dezactiva funcția de Urmărire.', + clientSecret: 'Secret client OAuth', + password: 'Parolă', + experimentId: 'ID-ul experimentului', + databricksHost: 'URL-ul spațiului de lucru Databricks', + trackingUri: 'URI de urmărire', + personalAccessToken: 'Token de acces personal (vechi)', + clientId: 'ID client OAuth', + username: 'Nume de utilizator', }, view: 'Vedere', opik: { @@ -334,6 +342,8 @@ const translation = { tips: 'Apăsați ↑↓ pentru a naviga', pressEscToClose: 'Apăsați ESC pentru a închide', }, + notPublishedYet: 'Aplicația nu este încă publicată', + noUserInputNode: 'Lipsă nod de intrare pentru utilizator', } export default translation diff --git a/web/i18n/ru-RU/app.ts b/web/i18n/ru-RU/app.ts index 59f45a1c68..d230d83082 100644 --- a/web/i18n/ru-RU/app.ts +++ b/web/i18n/ru-RU/app.ts @@ -158,6 +158,14 @@ const translation = { viewDocsLink: 'Посмотреть документацию {{key}}', removeConfirmTitle: 'Удалить конфигурацию {{key}}?', removeConfirmContent: 'Текущая конфигурация используется, ее удаление отключит функцию трассировки.', + username: 'Имя пользователя', + password: 'Пароль', + experimentId: 'ID эксперимента', + trackingUri: 'URI отслеживания', + clientSecret: 'Секрет клиента OAuth', + databricksHost: 'URL рабочего пространства Databricks', + clientId: 'Идентификатор клиента OAuth', + personalAccessToken: 'Личный токен доступа (устаревший)', }, opik: { title: 'Опик', @@ -334,6 +342,8 @@ const translation = { selectToNavigate: 'Выберите для навигации', pressEscToClose: 'Нажмите ESC для закрытия', }, + notPublishedYet: 'Приложение ещё не опубликовано', + noUserInputNode: 'Отсутствует узел ввода пользователя', } export default translation diff --git a/web/i18n/sl-SI/app.ts b/web/i18n/sl-SI/app.ts index 2e72adea24..a713d05356 100644 --- a/web/i18n/sl-SI/app.ts +++ b/web/i18n/sl-SI/app.ts @@ -163,6 +163,14 @@ const translation = { viewDocsLink: 'Ogled dokumentov {{key}}', removeConfirmTitle: 'Odstraniti konfiguracijo {{key}}?', removeConfirmContent: 'Trenutna konfiguracija je v uporabi, odstranitev bo onemogočila funkcijo sledenja.', + password: 'Geslo', + personalAccessToken: 'Osebni dostopni žeton (stari)', + experimentId: 'ID eksperimenta', + clientSecret: 'OAuth skrivnost odjemalca', + trackingUri: 'Sledenje URI', + clientId: 'ID odjemalca OAuth', + databricksHost: 'URL delovnega prostora Databricks', + username: 'Uporabniško ime', }, opik: { description: 'Opik je odprtokodna platforma za ocenjevanje, testiranje in spremljanje aplikacij LLM.', @@ -334,6 +342,8 @@ const translation = { selectToNavigate: 'Izberite za navigacijo', tips: 'Pritisnite ↑↓ za navigacijo', }, + notPublishedYet: 'Aplikacija še ni objavljena', + noUserInputNode: 'Manjka vozel uporabniškega vnosa', } export default translation diff --git a/web/i18n/th-TH/app.ts b/web/i18n/th-TH/app.ts index 51924721b4..052d2a058b 100644 --- a/web/i18n/th-TH/app.ts +++ b/web/i18n/th-TH/app.ts @@ -159,6 +159,14 @@ const translation = { viewDocsLink: 'ดูเอกสาร {{key}}', removeConfirmTitle: 'ลบการกําหนดค่า {{key}} หรือไม่?', removeConfirmContent: 'การกําหนดค่าปัจจุบันกําลังใช้งาน การลบออกจะเป็นการปิดคุณสมบัติการติดตาม', + clientId: 'รหัสลูกค้า OAuth', + trackingUri: 'ติดตาม URI', + databricksHost: 'URL ของ Workspace ใน Databricks', + username: 'ชื่อผู้ใช้', + clientSecret: 'รหัสลับของลูกค้า OAuth', + experimentId: 'รหัสการทดลอง', + password: 'รหัสผ่าน', + personalAccessToken: 'โทเค็นการเข้าถึงส่วนตัว (รุ่นเก่า)', }, opik: { title: 'โอปิก', @@ -330,6 +338,8 @@ const translation = { startTyping: 'เริ่มพิมพ์เพื่อค้นหา', tips: 'กด ↑↓ เพื่อเลื่อนดู', }, + noUserInputNode: 'ไม่มีโหนดป้อนข้อมูลผู้ใช้', + notPublishedYet: 'แอปยังไม่ได้เผยแพร่', } export default translation diff --git a/web/i18n/tr-TR/app.ts b/web/i18n/tr-TR/app.ts index 50adab2426..0af0092888 100644 --- a/web/i18n/tr-TR/app.ts +++ b/web/i18n/tr-TR/app.ts @@ -153,6 +153,14 @@ const translation = { viewDocsLink: '{{key}} dökümanlarını görüntüle', removeConfirmTitle: '{{key}} yapılandırmasını kaldır?', removeConfirmContent: 'Mevcut yapılandırma kullanımda, kaldırılması İzleme özelliğini kapatacaktır.', + password: 'Parola', + clientId: 'OAuth İstemci Kimliği', + databricksHost: 'Databricks Çalışma Alanı URL\'si', + clientSecret: 'OAuth İstemci Sırrı', + username: 'Kullanıcı Adı', + experimentId: 'Deney Kimliği', + personalAccessToken: 'Kişisel Erişim Belirteci (eski)', + trackingUri: 'İzleme URI\'si', }, view: 'Görünüm', opik: { @@ -330,6 +338,8 @@ const translation = { pressEscToClose: 'Kapatmak için ESC tuşuna basın', startTyping: 'Arama yapmak için yazmaya başlayın', }, + noUserInputNode: 'Eksik kullanıcı girdi düğümü', + notPublishedYet: 'Uygulama henüz yayımlanmadı', } export default translation diff --git a/web/i18n/uk-UA/app.ts b/web/i18n/uk-UA/app.ts index 5ccdf61894..fb7600f19c 100644 --- a/web/i18n/uk-UA/app.ts +++ b/web/i18n/uk-UA/app.ts @@ -149,6 +149,14 @@ const translation = { viewDocsLink: 'Переглянути документацію {{key}}', removeConfirmTitle: 'Видалити налаштування {{key}}?', removeConfirmContent: 'Поточне налаштування використовується, його видалення вимкне функцію Відстеження.', + password: 'Пароль', + databricksHost: 'URL робочого простору Databricks', + clientId: 'Ідентифікатор клієнта OAuth', + experimentId: 'Ідентифікатор експерименту', + trackingUri: 'Відстеження URI', + personalAccessToken: 'Персональний токен доступу (застарілий)', + username: 'Ім\'я користувача', + clientSecret: 'Секретний ключ клієнта OAuth', }, view: 'Вид', opik: { @@ -334,6 +342,8 @@ const translation = { startTyping: 'Почніть вводити для пошуку', pressEscToClose: 'Натисніть ESC, щоб закрити', }, + notPublishedYet: 'Додаток ще не опублікований', + noUserInputNode: 'Відсутній вузол введення користувача', } export default translation diff --git a/web/i18n/vi-VN/app.ts b/web/i18n/vi-VN/app.ts index 6a1cea0578..4153e996c3 100644 --- a/web/i18n/vi-VN/app.ts +++ b/web/i18n/vi-VN/app.ts @@ -149,6 +149,14 @@ const translation = { viewDocsLink: 'Xem tài liệu {{key}}', removeConfirmTitle: 'Xóa cấu hình {{key}}?', removeConfirmContent: 'Cấu hình hiện tại đang được sử dụng, việc xóa nó sẽ tắt tính năng Theo dõi.', + username: 'Tên người dùng', + password: 'Mật khẩu', + clientId: 'ID Khách Hàng OAuth', + databricksHost: 'URL Workspace của Databricks', + trackingUri: 'URI theo dõi', + clientSecret: 'Bí mật Khách hàng OAuth', + personalAccessToken: 'Mã truy cập cá nhân (cũ)', + experimentId: 'Mã thí nghiệm', }, view: 'Cảnh', opik: { @@ -334,6 +342,8 @@ const translation = { pressEscToClose: 'Nhấn ESC để đóng', tips: 'Nhấn ↑↓ để duyệt', }, + noUserInputNode: 'Thiếu nút nhập liệu của người dùng', + notPublishedYet: 'Ứng dụng chưa được phát hành', } export default translation diff --git a/web/i18n/zh-Hans/app.ts b/web/i18n/zh-Hans/app.ts index 89360f6065..f27aed770c 100644 --- a/web/i18n/zh-Hans/app.ts +++ b/web/i18n/zh-Hans/app.ts @@ -183,6 +183,14 @@ const translation = { viewDocsLink: '查看 {{key}} 的文档', removeConfirmTitle: '删除 {{key}} 配置?', removeConfirmContent: '当前配置正在使用中,删除它将关闭追踪功能。', + clientSecret: 'OAuth 客户端密钥', + trackingUri: '跟踪 URI', + password: '密码', + databricksHost: 'Databricks 工作区 URL', + username: '用户名', + clientId: 'OAuth 客户端 ID', + experimentId: '实验编号', + personalAccessToken: '个人访问令牌(旧版)', }, weave: { title: '编织', diff --git a/web/i18n/zh-Hant/app.ts b/web/i18n/zh-Hant/app.ts index 6a17751400..891aad59a6 100644 --- a/web/i18n/zh-Hant/app.ts +++ b/web/i18n/zh-Hant/app.ts @@ -149,6 +149,14 @@ const translation = { viewDocsLink: '查看{{key}}文件', removeConfirmTitle: '移除{{key}}配置?', removeConfirmContent: '當前配置正在使用中,移除它將關閉追蹤功能。', + experimentId: '實驗編號', + databricksHost: 'Databricks 工作區網址', + password: '密碼', + trackingUri: '追蹤 URI', + personalAccessToken: '個人存取權杖(舊版)', + clientSecret: 'OAuth 用戶端密鑰', + username: '使用者名稱', + clientId: 'OAuth 用戶端 ID', }, opik: { title: '奧皮克', @@ -333,6 +341,8 @@ const translation = { pressEscToClose: '按 ESC 鍵關閉', selectToNavigate: '選擇以進行導航', }, + notPublishedYet: '應用程式尚未發布', + noUserInputNode: '缺少使用者輸入節點', } export default translation From e0824c2d9344141e0db8f83a7a75a52bd0b50d59 Mon Sep 17 00:00:00 2001 From: Asuka Minato Date: Mon, 24 Nov 2025 11:04:11 +0900 Subject: [PATCH 05/22] api -> console_ns (#28246) Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> --- api/controllers/console/admin.py | 24 +- api/controllers/console/apikey.py | 62 ++-- .../console/app/advanced_prompt_template.py | 12 +- api/controllers/console/app/agent.py | 16 +- api/controllers/console/app/annotation.py | 138 ++++----- api/controllers/console/app/app.py | 164 +++++------ api/controllers/console/app/app_import.py | 3 +- api/controllers/console/app/audio.py | 46 +-- api/controllers/console/app/completion.py | 50 ++-- api/controllers/console/app/conversation.py | 78 ++--- .../console/app/conversation_variables.py | 14 +- api/controllers/console/app/generator.py | 70 ++--- api/controllers/console/app/mcp_server.py | 52 ++-- api/controllers/console/app/message.py | 66 +++-- api/controllers/console/app/model_config.py | 18 +- api/controllers/console/app/ops_trace.py | 58 ++-- api/controllers/console/app/site.py | 30 +- api/controllers/console/app/statistic.py | 84 +++--- api/controllers/console/app/workflow.py | 278 +++++++++--------- .../console/app/workflow_app_log.py | 12 +- .../console/app/workflow_draft_variable.py | 106 +++---- api/controllers/console/app/workflow_run.py | 94 +++--- .../console/app/workflow_statistic.py | 50 ++-- .../console/app/workflow_trigger.py | 8 +- api/controllers/console/auth/activate.py | 24 +- .../console/auth/data_source_oauth.py | 50 ++-- .../console/auth/forgot_password.py | 44 +-- api/controllers/console/auth/oauth.py | 24 +- api/controllers/console/billing/billing.py | 16 +- api/controllers/console/datasets/datasets.py | 156 +++++----- .../console/datasets/datasets_document.py | 90 +++--- api/controllers/console/datasets/external.py | 68 ++--- .../console/datasets/hit_testing.py | 18 +- .../datasets/rag_pipeline/datasource_auth.py | 14 +- .../datasource_content_preview.py | 6 +- .../rag_pipeline/rag_pipeline_workflow.py | 34 +-- api/controllers/console/datasets/website.py | 26 +- api/controllers/console/datasets/wraps.py | 48 ++- .../console/explore/recommended_app.py | 4 +- api/controllers/console/extension.py | 64 ++-- api/controllers/console/feature.py | 20 +- api/controllers/console/init_validate.py | 24 +- api/controllers/console/ping.py | 10 +- api/controllers/console/remote_files.py | 3 +- api/controllers/console/setup.py | 24 +- api/controllers/console/tag/tags.py | 10 +- api/controllers/console/version.py | 12 +- api/controllers/console/workspace/account.py | 32 +- .../console/workspace/agent_providers.py | 16 +- api/controllers/console/workspace/endpoint.py | 104 ++++--- api/controllers/console/workspace/members.py | 12 +- .../console/workspace/model_providers.py | 18 +- api/controllers/console/workspace/models.py | 28 +- api/controllers/console/workspace/plugin.py | 38 +-- .../console/workspace/tool_providers.py | 50 ++-- .../console/workspace/trigger_providers.py | 34 ++- .../console/workspace/workspace.py | 6 +- 57 files changed, 1354 insertions(+), 1306 deletions(-) diff --git a/api/controllers/console/admin.py b/api/controllers/console/admin.py index 2c4d8709eb..da9282cd0c 100644 --- a/api/controllers/console/admin.py +++ b/api/controllers/console/admin.py @@ -12,7 +12,7 @@ P = ParamSpec("P") R = TypeVar("R") from configs import dify_config from constants.languages import supported_language -from controllers.console import api, console_ns +from controllers.console import console_ns from controllers.console.wraps import only_edition_cloud from extensions.ext_database import db from libs.token import extract_access_token @@ -38,10 +38,10 @@ def admin_required(view: Callable[P, R]): @console_ns.route("/admin/insert-explore-apps") class InsertExploreAppListApi(Resource): - @api.doc("insert_explore_app") - @api.doc(description="Insert or update an app in the explore list") - @api.expect( - api.model( + @console_ns.doc("insert_explore_app") + @console_ns.doc(description="Insert or update an app in the explore list") + @console_ns.expect( + console_ns.model( "InsertExploreAppRequest", { "app_id": fields.String(required=True, description="Application ID"), @@ -55,9 +55,9 @@ class InsertExploreAppListApi(Resource): }, ) ) - @api.response(200, "App updated successfully") - @api.response(201, "App inserted successfully") - @api.response(404, "App not found") + @console_ns.response(200, "App updated successfully") + @console_ns.response(201, "App inserted successfully") + @console_ns.response(404, "App not found") @only_edition_cloud @admin_required def post(self): @@ -131,10 +131,10 @@ class InsertExploreAppListApi(Resource): @console_ns.route("/admin/insert-explore-apps/") class InsertExploreAppApi(Resource): - @api.doc("delete_explore_app") - @api.doc(description="Remove an app from the explore list") - @api.doc(params={"app_id": "Application ID to remove"}) - @api.response(204, "App removed successfully") + @console_ns.doc("delete_explore_app") + @console_ns.doc(description="Remove an app from the explore list") + @console_ns.doc(params={"app_id": "Application ID to remove"}) + @console_ns.response(204, "App removed successfully") @only_edition_cloud @admin_required def delete(self, app_id): diff --git a/api/controllers/console/apikey.py b/api/controllers/console/apikey.py index bd5862cbd0..d93858d3fc 100644 --- a/api/controllers/console/apikey.py +++ b/api/controllers/console/apikey.py @@ -11,7 +11,7 @@ from libs.login import current_account_with_tenant, login_required from models.dataset import Dataset from models.model import ApiToken, App -from . import api, console_ns +from . import console_ns from .wraps import account_initialization_required, edit_permission_required, setup_required api_key_fields = { @@ -133,20 +133,20 @@ class BaseApiKeyResource(Resource): @console_ns.route("/apps//api-keys") class AppApiKeyListResource(BaseApiKeyListResource): - @api.doc("get_app_api_keys") - @api.doc(description="Get all API keys for an app") - @api.doc(params={"resource_id": "App ID"}) - @api.response(200, "Success", api_key_list) - def get(self, resource_id): + @console_ns.doc("get_app_api_keys") + @console_ns.doc(description="Get all API keys for an app") + @console_ns.doc(params={"resource_id": "App ID"}) + @console_ns.response(200, "Success", api_key_list) + def get(self, resource_id): # type: ignore """Get all API keys for an app""" return super().get(resource_id) - @api.doc("create_app_api_key") - @api.doc(description="Create a new API key for an app") - @api.doc(params={"resource_id": "App ID"}) - @api.response(201, "API key created successfully", api_key_fields) - @api.response(400, "Maximum keys exceeded") - def post(self, resource_id): + @console_ns.doc("create_app_api_key") + @console_ns.doc(description="Create a new API key for an app") + @console_ns.doc(params={"resource_id": "App ID"}) + @console_ns.response(201, "API key created successfully", api_key_fields) + @console_ns.response(400, "Maximum keys exceeded") + def post(self, resource_id): # type: ignore """Create a new API key for an app""" return super().post(resource_id) @@ -158,10 +158,10 @@ class AppApiKeyListResource(BaseApiKeyListResource): @console_ns.route("/apps//api-keys/") class AppApiKeyResource(BaseApiKeyResource): - @api.doc("delete_app_api_key") - @api.doc(description="Delete an API key for an app") - @api.doc(params={"resource_id": "App ID", "api_key_id": "API key ID"}) - @api.response(204, "API key deleted successfully") + @console_ns.doc("delete_app_api_key") + @console_ns.doc(description="Delete an API key for an app") + @console_ns.doc(params={"resource_id": "App ID", "api_key_id": "API key ID"}) + @console_ns.response(204, "API key deleted successfully") def delete(self, resource_id, api_key_id): """Delete an API key for an app""" return super().delete(resource_id, api_key_id) @@ -173,20 +173,20 @@ class AppApiKeyResource(BaseApiKeyResource): @console_ns.route("/datasets//api-keys") class DatasetApiKeyListResource(BaseApiKeyListResource): - @api.doc("get_dataset_api_keys") - @api.doc(description="Get all API keys for a dataset") - @api.doc(params={"resource_id": "Dataset ID"}) - @api.response(200, "Success", api_key_list) - def get(self, resource_id): + @console_ns.doc("get_dataset_api_keys") + @console_ns.doc(description="Get all API keys for a dataset") + @console_ns.doc(params={"resource_id": "Dataset ID"}) + @console_ns.response(200, "Success", api_key_list) + def get(self, resource_id): # type: ignore """Get all API keys for a dataset""" return super().get(resource_id) - @api.doc("create_dataset_api_key") - @api.doc(description="Create a new API key for a dataset") - @api.doc(params={"resource_id": "Dataset ID"}) - @api.response(201, "API key created successfully", api_key_fields) - @api.response(400, "Maximum keys exceeded") - def post(self, resource_id): + @console_ns.doc("create_dataset_api_key") + @console_ns.doc(description="Create a new API key for a dataset") + @console_ns.doc(params={"resource_id": "Dataset ID"}) + @console_ns.response(201, "API key created successfully", api_key_fields) + @console_ns.response(400, "Maximum keys exceeded") + def post(self, resource_id): # type: ignore """Create a new API key for a dataset""" return super().post(resource_id) @@ -198,10 +198,10 @@ class DatasetApiKeyListResource(BaseApiKeyListResource): @console_ns.route("/datasets//api-keys/") class DatasetApiKeyResource(BaseApiKeyResource): - @api.doc("delete_dataset_api_key") - @api.doc(description="Delete an API key for a dataset") - @api.doc(params={"resource_id": "Dataset ID", "api_key_id": "API key ID"}) - @api.response(204, "API key deleted successfully") + @console_ns.doc("delete_dataset_api_key") + @console_ns.doc(description="Delete an API key for a dataset") + @console_ns.doc(params={"resource_id": "Dataset ID", "api_key_id": "API key ID"}) + @console_ns.response(204, "API key deleted successfully") def delete(self, resource_id, api_key_id): """Delete an API key for a dataset""" return super().delete(resource_id, api_key_id) diff --git a/api/controllers/console/app/advanced_prompt_template.py b/api/controllers/console/app/advanced_prompt_template.py index 075345d860..0ca163d2a5 100644 --- a/api/controllers/console/app/advanced_prompt_template.py +++ b/api/controllers/console/app/advanced_prompt_template.py @@ -1,6 +1,6 @@ from flask_restx import Resource, fields, reqparse -from controllers.console import api, console_ns +from controllers.console import console_ns from controllers.console.wraps import account_initialization_required, setup_required from libs.login import login_required from services.advanced_prompt_template_service import AdvancedPromptTemplateService @@ -16,13 +16,13 @@ parser = ( @console_ns.route("/app/prompt-templates") class AdvancedPromptTemplateList(Resource): - @api.doc("get_advanced_prompt_templates") - @api.doc(description="Get advanced prompt templates based on app mode and model configuration") - @api.expect(parser) - @api.response( + @console_ns.doc("get_advanced_prompt_templates") + @console_ns.doc(description="Get advanced prompt templates based on app mode and model configuration") + @console_ns.expect(parser) + @console_ns.response( 200, "Prompt templates retrieved successfully", fields.List(fields.Raw(description="Prompt template data")) ) - @api.response(400, "Invalid request parameters") + @console_ns.response(400, "Invalid request parameters") @setup_required @login_required @account_initialization_required diff --git a/api/controllers/console/app/agent.py b/api/controllers/console/app/agent.py index fde28fdb98..7e31d0a844 100644 --- a/api/controllers/console/app/agent.py +++ b/api/controllers/console/app/agent.py @@ -1,6 +1,6 @@ from flask_restx import Resource, fields, reqparse -from controllers.console import api, console_ns +from controllers.console import console_ns from controllers.console.app.wraps import get_app_model from controllers.console.wraps import account_initialization_required, setup_required from libs.helper import uuid_value @@ -17,12 +17,14 @@ parser = ( @console_ns.route("/apps//agent/logs") class AgentLogApi(Resource): - @api.doc("get_agent_logs") - @api.doc(description="Get agent execution logs for an application") - @api.doc(params={"app_id": "Application ID"}) - @api.expect(parser) - @api.response(200, "Agent logs retrieved successfully", fields.List(fields.Raw(description="Agent log entries"))) - @api.response(400, "Invalid request parameters") + @console_ns.doc("get_agent_logs") + @console_ns.doc(description="Get agent execution logs for an application") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.expect(parser) + @console_ns.response( + 200, "Agent logs retrieved successfully", fields.List(fields.Raw(description="Agent log entries")) + ) + @console_ns.response(400, "Invalid request parameters") @setup_required @login_required @account_initialization_required diff --git a/api/controllers/console/app/annotation.py b/api/controllers/console/app/annotation.py index bc4113b5c7..0be39c9178 100644 --- a/api/controllers/console/app/annotation.py +++ b/api/controllers/console/app/annotation.py @@ -4,7 +4,7 @@ from flask import request from flask_restx import Resource, fields, marshal, marshal_with, reqparse from controllers.common.errors import NoFileUploadedError, TooManyFilesError -from controllers.console import api, console_ns +from controllers.console import console_ns from controllers.console.wraps import ( account_initialization_required, cloud_edition_billing_resource_check, @@ -23,11 +23,11 @@ from services.annotation_service import AppAnnotationService @console_ns.route("/apps//annotation-reply/") class AnnotationReplyActionApi(Resource): - @api.doc("annotation_reply_action") - @api.doc(description="Enable or disable annotation reply for an app") - @api.doc(params={"app_id": "Application ID", "action": "Action to perform (enable/disable)"}) - @api.expect( - api.model( + @console_ns.doc("annotation_reply_action") + @console_ns.doc(description="Enable or disable annotation reply for an app") + @console_ns.doc(params={"app_id": "Application ID", "action": "Action to perform (enable/disable)"}) + @console_ns.expect( + console_ns.model( "AnnotationReplyActionRequest", { "score_threshold": fields.Float(required=True, description="Score threshold for annotation matching"), @@ -36,8 +36,8 @@ class AnnotationReplyActionApi(Resource): }, ) ) - @api.response(200, "Action completed successfully") - @api.response(403, "Insufficient permissions") + @console_ns.response(200, "Action completed successfully") + @console_ns.response(403, "Insufficient permissions") @setup_required @login_required @account_initialization_required @@ -61,11 +61,11 @@ class AnnotationReplyActionApi(Resource): @console_ns.route("/apps//annotation-setting") class AppAnnotationSettingDetailApi(Resource): - @api.doc("get_annotation_setting") - @api.doc(description="Get annotation settings for an app") - @api.doc(params={"app_id": "Application ID"}) - @api.response(200, "Annotation settings retrieved successfully") - @api.response(403, "Insufficient permissions") + @console_ns.doc("get_annotation_setting") + @console_ns.doc(description="Get annotation settings for an app") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.response(200, "Annotation settings retrieved successfully") + @console_ns.response(403, "Insufficient permissions") @setup_required @login_required @account_initialization_required @@ -78,11 +78,11 @@ class AppAnnotationSettingDetailApi(Resource): @console_ns.route("/apps//annotation-settings/") class AppAnnotationSettingUpdateApi(Resource): - @api.doc("update_annotation_setting") - @api.doc(description="Update annotation settings for an app") - @api.doc(params={"app_id": "Application ID", "annotation_setting_id": "Annotation setting ID"}) - @api.expect( - api.model( + @console_ns.doc("update_annotation_setting") + @console_ns.doc(description="Update annotation settings for an app") + @console_ns.doc(params={"app_id": "Application ID", "annotation_setting_id": "Annotation setting ID"}) + @console_ns.expect( + console_ns.model( "AnnotationSettingUpdateRequest", { "score_threshold": fields.Float(required=True, description="Score threshold"), @@ -91,8 +91,8 @@ class AppAnnotationSettingUpdateApi(Resource): }, ) ) - @api.response(200, "Settings updated successfully") - @api.response(403, "Insufficient permissions") + @console_ns.response(200, "Settings updated successfully") + @console_ns.response(403, "Insufficient permissions") @setup_required @login_required @account_initialization_required @@ -110,11 +110,11 @@ class AppAnnotationSettingUpdateApi(Resource): @console_ns.route("/apps//annotation-reply//status/") class AnnotationReplyActionStatusApi(Resource): - @api.doc("get_annotation_reply_action_status") - @api.doc(description="Get status of annotation reply action job") - @api.doc(params={"app_id": "Application ID", "job_id": "Job ID", "action": "Action type"}) - @api.response(200, "Job status retrieved successfully") - @api.response(403, "Insufficient permissions") + @console_ns.doc("get_annotation_reply_action_status") + @console_ns.doc(description="Get status of annotation reply action job") + @console_ns.doc(params={"app_id": "Application ID", "job_id": "Job ID", "action": "Action type"}) + @console_ns.response(200, "Job status retrieved successfully") + @console_ns.response(403, "Insufficient permissions") @setup_required @login_required @account_initialization_required @@ -138,17 +138,17 @@ class AnnotationReplyActionStatusApi(Resource): @console_ns.route("/apps//annotations") class AnnotationApi(Resource): - @api.doc("list_annotations") - @api.doc(description="Get annotations for an app with pagination") - @api.doc(params={"app_id": "Application ID"}) - @api.expect( - api.parser() + @console_ns.doc("list_annotations") + @console_ns.doc(description="Get annotations for an app with pagination") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.expect( + console_ns.parser() .add_argument("page", type=int, location="args", default=1, help="Page number") .add_argument("limit", type=int, location="args", default=20, help="Page size") .add_argument("keyword", type=str, location="args", default="", help="Search keyword") ) - @api.response(200, "Annotations retrieved successfully") - @api.response(403, "Insufficient permissions") + @console_ns.response(200, "Annotations retrieved successfully") + @console_ns.response(403, "Insufficient permissions") @setup_required @login_required @account_initialization_required @@ -169,11 +169,11 @@ class AnnotationApi(Resource): } return response, 200 - @api.doc("create_annotation") - @api.doc(description="Create a new annotation for an app") - @api.doc(params={"app_id": "Application ID"}) - @api.expect( - api.model( + @console_ns.doc("create_annotation") + @console_ns.doc(description="Create a new annotation for an app") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.expect( + console_ns.model( "CreateAnnotationRequest", { "message_id": fields.String(description="Message ID (optional)"), @@ -184,8 +184,8 @@ class AnnotationApi(Resource): }, ) ) - @api.response(201, "Annotation created successfully", annotation_fields) - @api.response(403, "Insufficient permissions") + @console_ns.response(201, "Annotation created successfully", annotation_fields) + @console_ns.response(403, "Insufficient permissions") @setup_required @login_required @account_initialization_required @@ -235,11 +235,11 @@ class AnnotationApi(Resource): @console_ns.route("/apps//annotations/export") class AnnotationExportApi(Resource): - @api.doc("export_annotations") - @api.doc(description="Export all annotations for an app") - @api.doc(params={"app_id": "Application ID"}) - @api.response(200, "Annotations exported successfully", fields.List(fields.Nested(annotation_fields))) - @api.response(403, "Insufficient permissions") + @console_ns.doc("export_annotations") + @console_ns.doc(description="Export all annotations for an app") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.response(200, "Annotations exported successfully", fields.List(fields.Nested(annotation_fields))) + @console_ns.response(403, "Insufficient permissions") @setup_required @login_required @account_initialization_required @@ -260,13 +260,13 @@ parser = ( @console_ns.route("/apps//annotations/") class AnnotationUpdateDeleteApi(Resource): - @api.doc("update_delete_annotation") - @api.doc(description="Update or delete an annotation") - @api.doc(params={"app_id": "Application ID", "annotation_id": "Annotation ID"}) - @api.response(200, "Annotation updated successfully", annotation_fields) - @api.response(204, "Annotation deleted successfully") - @api.response(403, "Insufficient permissions") - @api.expect(parser) + @console_ns.doc("update_delete_annotation") + @console_ns.doc(description="Update or delete an annotation") + @console_ns.doc(params={"app_id": "Application ID", "annotation_id": "Annotation ID"}) + @console_ns.response(200, "Annotation updated successfully", annotation_fields) + @console_ns.response(204, "Annotation deleted successfully") + @console_ns.response(403, "Insufficient permissions") + @console_ns.expect(parser) @setup_required @login_required @account_initialization_required @@ -293,12 +293,12 @@ class AnnotationUpdateDeleteApi(Resource): @console_ns.route("/apps//annotations/batch-import") class AnnotationBatchImportApi(Resource): - @api.doc("batch_import_annotations") - @api.doc(description="Batch import annotations from CSV file") - @api.doc(params={"app_id": "Application ID"}) - @api.response(200, "Batch import started successfully") - @api.response(403, "Insufficient permissions") - @api.response(400, "No file uploaded or too many files") + @console_ns.doc("batch_import_annotations") + @console_ns.doc(description="Batch import annotations from CSV file") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.response(200, "Batch import started successfully") + @console_ns.response(403, "Insufficient permissions") + @console_ns.response(400, "No file uploaded or too many files") @setup_required @login_required @account_initialization_required @@ -323,11 +323,11 @@ class AnnotationBatchImportApi(Resource): @console_ns.route("/apps//annotations/batch-import-status/") class AnnotationBatchImportStatusApi(Resource): - @api.doc("get_batch_import_status") - @api.doc(description="Get status of batch import job") - @api.doc(params={"app_id": "Application ID", "job_id": "Job ID"}) - @api.response(200, "Job status retrieved successfully") - @api.response(403, "Insufficient permissions") + @console_ns.doc("get_batch_import_status") + @console_ns.doc(description="Get status of batch import job") + @console_ns.doc(params={"app_id": "Application ID", "job_id": "Job ID"}) + @console_ns.response(200, "Job status retrieved successfully") + @console_ns.response(403, "Insufficient permissions") @setup_required @login_required @account_initialization_required @@ -350,18 +350,18 @@ class AnnotationBatchImportStatusApi(Resource): @console_ns.route("/apps//annotations//hit-histories") class AnnotationHitHistoryListApi(Resource): - @api.doc("list_annotation_hit_histories") - @api.doc(description="Get hit histories for an annotation") - @api.doc(params={"app_id": "Application ID", "annotation_id": "Annotation ID"}) - @api.expect( - api.parser() + @console_ns.doc("list_annotation_hit_histories") + @console_ns.doc(description="Get hit histories for an annotation") + @console_ns.doc(params={"app_id": "Application ID", "annotation_id": "Annotation ID"}) + @console_ns.expect( + console_ns.parser() .add_argument("page", type=int, location="args", default=1, help="Page number") .add_argument("limit", type=int, location="args", default=20, help="Page size") ) - @api.response( + @console_ns.response( 200, "Hit histories retrieved successfully", fields.List(fields.Nested(annotation_hit_history_fields)) ) - @api.response(403, "Insufficient permissions") + @console_ns.response(403, "Insufficient permissions") @setup_required @login_required @account_initialization_required diff --git a/api/controllers/console/app/app.py b/api/controllers/console/app/app.py index a487512961..85a46aa9c3 100644 --- a/api/controllers/console/app/app.py +++ b/api/controllers/console/app/app.py @@ -5,7 +5,7 @@ from sqlalchemy import select from sqlalchemy.orm import Session from werkzeug.exceptions import BadRequest, abort -from controllers.console import api, console_ns +from controllers.console import console_ns from controllers.console.app.wraps import get_app_model from controllers.console.wraps import ( account_initialization_required, @@ -32,10 +32,10 @@ ALLOW_CREATE_APP_MODES = ["chat", "agent-chat", "advanced-chat", "workflow", "co @console_ns.route("/apps") class AppListApi(Resource): - @api.doc("list_apps") - @api.doc(description="Get list of applications with pagination and filtering") - @api.expect( - api.parser() + @console_ns.doc("list_apps") + @console_ns.doc(description="Get list of applications with pagination and filtering") + @console_ns.expect( + console_ns.parser() .add_argument("page", type=int, location="args", help="Page number (1-99999)", default=1) .add_argument("limit", type=int, location="args", help="Page size (1-100)", default=20) .add_argument( @@ -50,7 +50,7 @@ class AppListApi(Resource): .add_argument("tag_ids", type=str, location="args", help="Comma-separated tag IDs") .add_argument("is_created_by_me", type=bool, location="args", help="Filter by creator") ) - @api.response(200, "Success", app_pagination_fields) + @console_ns.response(200, "Success", app_pagination_fields) @setup_required @login_required @account_initialization_required @@ -139,10 +139,10 @@ class AppListApi(Resource): return marshal(app_pagination, app_pagination_fields), 200 - @api.doc("create_app") - @api.doc(description="Create a new application") - @api.expect( - api.model( + @console_ns.doc("create_app") + @console_ns.doc(description="Create a new application") + @console_ns.expect( + console_ns.model( "CreateAppRequest", { "name": fields.String(required=True, description="App name"), @@ -154,9 +154,9 @@ class AppListApi(Resource): }, ) ) - @api.response(201, "App created successfully", app_detail_fields) - @api.response(403, "Insufficient permissions") - @api.response(400, "Invalid request parameters") + @console_ns.response(201, "App created successfully", app_detail_fields) + @console_ns.response(403, "Insufficient permissions") + @console_ns.response(400, "Invalid request parameters") @setup_required @login_required @account_initialization_required @@ -188,10 +188,10 @@ class AppListApi(Resource): @console_ns.route("/apps/") class AppApi(Resource): - @api.doc("get_app_detail") - @api.doc(description="Get application details") - @api.doc(params={"app_id": "Application ID"}) - @api.response(200, "Success", app_detail_fields_with_site) + @console_ns.doc("get_app_detail") + @console_ns.doc(description="Get application details") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.response(200, "Success", app_detail_fields_with_site) @setup_required @login_required @account_initialization_required @@ -210,11 +210,11 @@ class AppApi(Resource): return app_model - @api.doc("update_app") - @api.doc(description="Update application details") - @api.doc(params={"app_id": "Application ID"}) - @api.expect( - api.model( + @console_ns.doc("update_app") + @console_ns.doc(description="Update application details") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.expect( + console_ns.model( "UpdateAppRequest", { "name": fields.String(required=True, description="App name"), @@ -227,9 +227,9 @@ class AppApi(Resource): }, ) ) - @api.response(200, "App updated successfully", app_detail_fields_with_site) - @api.response(403, "Insufficient permissions") - @api.response(400, "Invalid request parameters") + @console_ns.response(200, "App updated successfully", app_detail_fields_with_site) + @console_ns.response(403, "Insufficient permissions") + @console_ns.response(400, "Invalid request parameters") @setup_required @login_required @account_initialization_required @@ -265,11 +265,11 @@ class AppApi(Resource): return app_model - @api.doc("delete_app") - @api.doc(description="Delete application") - @api.doc(params={"app_id": "Application ID"}) - @api.response(204, "App deleted successfully") - @api.response(403, "Insufficient permissions") + @console_ns.doc("delete_app") + @console_ns.doc(description="Delete application") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.response(204, "App deleted successfully") + @console_ns.response(403, "Insufficient permissions") @get_app_model @setup_required @login_required @@ -285,11 +285,11 @@ class AppApi(Resource): @console_ns.route("/apps//copy") class AppCopyApi(Resource): - @api.doc("copy_app") - @api.doc(description="Create a copy of an existing application") - @api.doc(params={"app_id": "Application ID to copy"}) - @api.expect( - api.model( + @console_ns.doc("copy_app") + @console_ns.doc(description="Create a copy of an existing application") + @console_ns.doc(params={"app_id": "Application ID to copy"}) + @console_ns.expect( + console_ns.model( "CopyAppRequest", { "name": fields.String(description="Name for the copied app"), @@ -300,8 +300,8 @@ class AppCopyApi(Resource): }, ) ) - @api.response(201, "App copied successfully", app_detail_fields_with_site) - @api.response(403, "Insufficient permissions") + @console_ns.response(201, "App copied successfully", app_detail_fields_with_site) + @console_ns.response(403, "Insufficient permissions") @setup_required @login_required @account_initialization_required @@ -346,20 +346,20 @@ class AppCopyApi(Resource): @console_ns.route("/apps//export") class AppExportApi(Resource): - @api.doc("export_app") - @api.doc(description="Export application configuration as DSL") - @api.doc(params={"app_id": "Application ID to export"}) - @api.expect( - api.parser() + @console_ns.doc("export_app") + @console_ns.doc(description="Export application configuration as DSL") + @console_ns.doc(params={"app_id": "Application ID to export"}) + @console_ns.expect( + console_ns.parser() .add_argument("include_secret", type=bool, location="args", default=False, help="Include secrets in export") .add_argument("workflow_id", type=str, location="args", help="Specific workflow ID to export") ) - @api.response( + @console_ns.response( 200, "App exported successfully", - api.model("AppExportResponse", {"data": fields.String(description="DSL export data")}), + console_ns.model("AppExportResponse", {"data": fields.String(description="DSL export data")}), ) - @api.response(403, "Insufficient permissions") + @console_ns.response(403, "Insufficient permissions") @get_app_model @setup_required @login_required @@ -387,11 +387,11 @@ parser = reqparse.RequestParser().add_argument("name", type=str, required=True, @console_ns.route("/apps//name") class AppNameApi(Resource): - @api.doc("check_app_name") - @api.doc(description="Check if app name is available") - @api.doc(params={"app_id": "Application ID"}) - @api.expect(parser) - @api.response(200, "Name availability checked") + @console_ns.doc("check_app_name") + @console_ns.doc(description="Check if app name is available") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.expect(parser) + @console_ns.response(200, "Name availability checked") @setup_required @login_required @account_initialization_required @@ -409,11 +409,11 @@ class AppNameApi(Resource): @console_ns.route("/apps//icon") class AppIconApi(Resource): - @api.doc("update_app_icon") - @api.doc(description="Update application icon") - @api.doc(params={"app_id": "Application ID"}) - @api.expect( - api.model( + @console_ns.doc("update_app_icon") + @console_ns.doc(description="Update application icon") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.expect( + console_ns.model( "AppIconRequest", { "icon": fields.String(required=True, description="Icon data"), @@ -422,8 +422,8 @@ class AppIconApi(Resource): }, ) ) - @api.response(200, "Icon updated successfully") - @api.response(403, "Insufficient permissions") + @console_ns.response(200, "Icon updated successfully") + @console_ns.response(403, "Insufficient permissions") @setup_required @login_required @account_initialization_required @@ -446,16 +446,16 @@ class AppIconApi(Resource): @console_ns.route("/apps//site-enable") class AppSiteStatus(Resource): - @api.doc("update_app_site_status") - @api.doc(description="Enable or disable app site") - @api.doc(params={"app_id": "Application ID"}) - @api.expect( - api.model( + @console_ns.doc("update_app_site_status") + @console_ns.doc(description="Enable or disable app site") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.expect( + console_ns.model( "AppSiteStatusRequest", {"enable_site": fields.Boolean(required=True, description="Enable or disable site")} ) ) - @api.response(200, "Site status updated successfully", app_detail_fields) - @api.response(403, "Insufficient permissions") + @console_ns.response(200, "Site status updated successfully", app_detail_fields) + @console_ns.response(403, "Insufficient permissions") @setup_required @login_required @account_initialization_required @@ -474,16 +474,16 @@ class AppSiteStatus(Resource): @console_ns.route("/apps//api-enable") class AppApiStatus(Resource): - @api.doc("update_app_api_status") - @api.doc(description="Enable or disable app API") - @api.doc(params={"app_id": "Application ID"}) - @api.expect( - api.model( + @console_ns.doc("update_app_api_status") + @console_ns.doc(description="Enable or disable app API") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.expect( + console_ns.model( "AppApiStatusRequest", {"enable_api": fields.Boolean(required=True, description="Enable or disable API")} ) ) - @api.response(200, "API status updated successfully", app_detail_fields) - @api.response(403, "Insufficient permissions") + @console_ns.response(200, "API status updated successfully", app_detail_fields) + @console_ns.response(403, "Insufficient permissions") @setup_required @login_required @is_admin_or_owner_required @@ -502,10 +502,10 @@ class AppApiStatus(Resource): @console_ns.route("/apps//trace") class AppTraceApi(Resource): - @api.doc("get_app_trace") - @api.doc(description="Get app tracing configuration") - @api.doc(params={"app_id": "Application ID"}) - @api.response(200, "Trace configuration retrieved successfully") + @console_ns.doc("get_app_trace") + @console_ns.doc(description="Get app tracing configuration") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.response(200, "Trace configuration retrieved successfully") @setup_required @login_required @account_initialization_required @@ -515,11 +515,11 @@ class AppTraceApi(Resource): return app_trace_config - @api.doc("update_app_trace") - @api.doc(description="Update app tracing configuration") - @api.doc(params={"app_id": "Application ID"}) - @api.expect( - api.model( + @console_ns.doc("update_app_trace") + @console_ns.doc(description="Update app tracing configuration") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.expect( + console_ns.model( "AppTraceRequest", { "enabled": fields.Boolean(required=True, description="Enable or disable tracing"), @@ -527,8 +527,8 @@ class AppTraceApi(Resource): }, ) ) - @api.response(200, "Trace configuration updated successfully") - @api.response(403, "Insufficient permissions") + @console_ns.response(200, "Trace configuration updated successfully") + @console_ns.response(403, "Insufficient permissions") @setup_required @login_required @account_initialization_required diff --git a/api/controllers/console/app/app_import.py b/api/controllers/console/app/app_import.py index 02dbd42515..35a3393742 100644 --- a/api/controllers/console/app/app_import.py +++ b/api/controllers/console/app/app_import.py @@ -1,7 +1,6 @@ from flask_restx import Resource, marshal_with, reqparse from sqlalchemy.orm import Session -from controllers.console import api from controllers.console.app.wraps import get_app_model from controllers.console.wraps import ( account_initialization_required, @@ -35,7 +34,7 @@ parser = ( @console_ns.route("/apps/imports") class AppImportApi(Resource): - @api.expect(parser) + @console_ns.expect(parser) @setup_required @login_required @account_initialization_required diff --git a/api/controllers/console/app/audio.py b/api/controllers/console/app/audio.py index 8170ba271a..86446f1164 100644 --- a/api/controllers/console/app/audio.py +++ b/api/controllers/console/app/audio.py @@ -5,7 +5,7 @@ from flask_restx import Resource, fields, reqparse from werkzeug.exceptions import InternalServerError import services -from controllers.console import api, console_ns +from controllers.console import console_ns from controllers.console.app.error import ( AppUnavailableError, AudioTooLargeError, @@ -36,16 +36,16 @@ logger = logging.getLogger(__name__) @console_ns.route("/apps//audio-to-text") class ChatMessageAudioApi(Resource): - @api.doc("chat_message_audio_transcript") - @api.doc(description="Transcript audio to text for chat messages") - @api.doc(params={"app_id": "App ID"}) - @api.response( + @console_ns.doc("chat_message_audio_transcript") + @console_ns.doc(description="Transcript audio to text for chat messages") + @console_ns.doc(params={"app_id": "App ID"}) + @console_ns.response( 200, "Audio transcription successful", - api.model("AudioTranscriptResponse", {"text": fields.String(description="Transcribed text from audio")}), + console_ns.model("AudioTranscriptResponse", {"text": fields.String(description="Transcribed text from audio")}), ) - @api.response(400, "Bad request - No audio uploaded or unsupported type") - @api.response(413, "Audio file too large") + @console_ns.response(400, "Bad request - No audio uploaded or unsupported type") + @console_ns.response(413, "Audio file too large") @setup_required @login_required @account_initialization_required @@ -89,11 +89,11 @@ class ChatMessageAudioApi(Resource): @console_ns.route("/apps//text-to-audio") class ChatMessageTextApi(Resource): - @api.doc("chat_message_text_to_speech") - @api.doc(description="Convert text to speech for chat messages") - @api.doc(params={"app_id": "App ID"}) - @api.expect( - api.model( + @console_ns.doc("chat_message_text_to_speech") + @console_ns.doc(description="Convert text to speech for chat messages") + @console_ns.doc(params={"app_id": "App ID"}) + @console_ns.expect( + console_ns.model( "TextToSpeechRequest", { "message_id": fields.String(description="Message ID"), @@ -103,8 +103,8 @@ class ChatMessageTextApi(Resource): }, ) ) - @api.response(200, "Text to speech conversion successful") - @api.response(400, "Bad request - Invalid parameters") + @console_ns.response(200, "Text to speech conversion successful") + @console_ns.response(400, "Bad request - Invalid parameters") @get_app_model @setup_required @login_required @@ -156,12 +156,16 @@ class ChatMessageTextApi(Resource): @console_ns.route("/apps//text-to-audio/voices") class TextModesApi(Resource): - @api.doc("get_text_to_speech_voices") - @api.doc(description="Get available TTS voices for a specific language") - @api.doc(params={"app_id": "App ID"}) - @api.expect(api.parser().add_argument("language", type=str, required=True, location="args", help="Language code")) - @api.response(200, "TTS voices retrieved successfully", fields.List(fields.Raw(description="Available voices"))) - @api.response(400, "Invalid language parameter") + @console_ns.doc("get_text_to_speech_voices") + @console_ns.doc(description="Get available TTS voices for a specific language") + @console_ns.doc(params={"app_id": "App ID"}) + @console_ns.expect( + console_ns.parser().add_argument("language", type=str, required=True, location="args", help="Language code") + ) + @console_ns.response( + 200, "TTS voices retrieved successfully", fields.List(fields.Raw(description="Available voices")) + ) + @console_ns.response(400, "Invalid language parameter") @get_app_model @setup_required @login_required diff --git a/api/controllers/console/app/completion.py b/api/controllers/console/app/completion.py index d7bc3cc20d..031a95e178 100644 --- a/api/controllers/console/app/completion.py +++ b/api/controllers/console/app/completion.py @@ -5,7 +5,7 @@ from flask_restx import Resource, fields, reqparse from werkzeug.exceptions import InternalServerError, NotFound import services -from controllers.console import api, console_ns +from controllers.console import console_ns from controllers.console.app.error import ( AppUnavailableError, CompletionRequestError, @@ -40,11 +40,11 @@ logger = logging.getLogger(__name__) # define completion message api for user @console_ns.route("/apps//completion-messages") class CompletionMessageApi(Resource): - @api.doc("create_completion_message") - @api.doc(description="Generate completion message for debugging") - @api.doc(params={"app_id": "Application ID"}) - @api.expect( - api.model( + @console_ns.doc("create_completion_message") + @console_ns.doc(description="Generate completion message for debugging") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.expect( + console_ns.model( "CompletionMessageRequest", { "inputs": fields.Raw(required=True, description="Input variables"), @@ -56,9 +56,9 @@ class CompletionMessageApi(Resource): }, ) ) - @api.response(200, "Completion generated successfully") - @api.response(400, "Invalid request parameters") - @api.response(404, "App not found") + @console_ns.response(200, "Completion generated successfully") + @console_ns.response(400, "Invalid request parameters") + @console_ns.response(404, "App not found") @setup_required @login_required @account_initialization_required @@ -110,10 +110,10 @@ class CompletionMessageApi(Resource): @console_ns.route("/apps//completion-messages//stop") class CompletionMessageStopApi(Resource): - @api.doc("stop_completion_message") - @api.doc(description="Stop a running completion message generation") - @api.doc(params={"app_id": "Application ID", "task_id": "Task ID to stop"}) - @api.response(200, "Task stopped successfully") + @console_ns.doc("stop_completion_message") + @console_ns.doc(description="Stop a running completion message generation") + @console_ns.doc(params={"app_id": "Application ID", "task_id": "Task ID to stop"}) + @console_ns.response(200, "Task stopped successfully") @setup_required @login_required @account_initialization_required @@ -128,11 +128,11 @@ class CompletionMessageStopApi(Resource): @console_ns.route("/apps//chat-messages") class ChatMessageApi(Resource): - @api.doc("create_chat_message") - @api.doc(description="Generate chat message for debugging") - @api.doc(params={"app_id": "Application ID"}) - @api.expect( - api.model( + @console_ns.doc("create_chat_message") + @console_ns.doc(description="Generate chat message for debugging") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.expect( + console_ns.model( "ChatMessageRequest", { "inputs": fields.Raw(required=True, description="Input variables"), @@ -146,9 +146,9 @@ class ChatMessageApi(Resource): }, ) ) - @api.response(200, "Chat message generated successfully") - @api.response(400, "Invalid request parameters") - @api.response(404, "App or conversation not found") + @console_ns.response(200, "Chat message generated successfully") + @console_ns.response(400, "Invalid request parameters") + @console_ns.response(404, "App or conversation not found") @setup_required @login_required @account_initialization_required @@ -209,10 +209,10 @@ class ChatMessageApi(Resource): @console_ns.route("/apps//chat-messages//stop") class ChatMessageStopApi(Resource): - @api.doc("stop_chat_message") - @api.doc(description="Stop a running chat message generation") - @api.doc(params={"app_id": "Application ID", "task_id": "Task ID to stop"}) - @api.response(200, "Task stopped successfully") + @console_ns.doc("stop_chat_message") + @console_ns.doc(description="Stop a running chat message generation") + @console_ns.doc(params={"app_id": "Application ID", "task_id": "Task ID to stop"}) + @console_ns.response(200, "Task stopped successfully") @setup_required @login_required @account_initialization_required diff --git a/api/controllers/console/app/conversation.py b/api/controllers/console/app/conversation.py index 57b6c314f3..e102300438 100644 --- a/api/controllers/console/app/conversation.py +++ b/api/controllers/console/app/conversation.py @@ -6,7 +6,7 @@ from sqlalchemy import func, or_ from sqlalchemy.orm import joinedload from werkzeug.exceptions import NotFound -from controllers.console import api, console_ns +from controllers.console import console_ns from controllers.console.app.wraps import get_app_model from controllers.console.wraps import account_initialization_required, edit_permission_required, setup_required from core.app.entities.app_invoke_entities import InvokeFrom @@ -28,11 +28,11 @@ from services.errors.conversation import ConversationNotExistsError @console_ns.route("/apps//completion-conversations") class CompletionConversationApi(Resource): - @api.doc("list_completion_conversations") - @api.doc(description="Get completion conversations with pagination and filtering") - @api.doc(params={"app_id": "Application ID"}) - @api.expect( - api.parser() + @console_ns.doc("list_completion_conversations") + @console_ns.doc(description="Get completion conversations with pagination and filtering") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.expect( + console_ns.parser() .add_argument("keyword", type=str, location="args", help="Search keyword") .add_argument("start", type=str, location="args", help="Start date (YYYY-MM-DD HH:MM)") .add_argument("end", type=str, location="args", help="End date (YYYY-MM-DD HH:MM)") @@ -47,8 +47,8 @@ class CompletionConversationApi(Resource): .add_argument("page", type=int, location="args", default=1, help="Page number") .add_argument("limit", type=int, location="args", default=20, help="Page size (1-100)") ) - @api.response(200, "Success", conversation_pagination_fields) - @api.response(403, "Insufficient permissions") + @console_ns.response(200, "Success", conversation_pagination_fields) + @console_ns.response(403, "Insufficient permissions") @setup_required @login_required @account_initialization_required @@ -122,12 +122,12 @@ class CompletionConversationApi(Resource): @console_ns.route("/apps//completion-conversations/") class CompletionConversationDetailApi(Resource): - @api.doc("get_completion_conversation") - @api.doc(description="Get completion conversation details with messages") - @api.doc(params={"app_id": "Application ID", "conversation_id": "Conversation ID"}) - @api.response(200, "Success", conversation_message_detail_fields) - @api.response(403, "Insufficient permissions") - @api.response(404, "Conversation not found") + @console_ns.doc("get_completion_conversation") + @console_ns.doc(description="Get completion conversation details with messages") + @console_ns.doc(params={"app_id": "Application ID", "conversation_id": "Conversation ID"}) + @console_ns.response(200, "Success", conversation_message_detail_fields) + @console_ns.response(403, "Insufficient permissions") + @console_ns.response(404, "Conversation not found") @setup_required @login_required @account_initialization_required @@ -139,12 +139,12 @@ class CompletionConversationDetailApi(Resource): return _get_conversation(app_model, conversation_id) - @api.doc("delete_completion_conversation") - @api.doc(description="Delete a completion conversation") - @api.doc(params={"app_id": "Application ID", "conversation_id": "Conversation ID"}) - @api.response(204, "Conversation deleted successfully") - @api.response(403, "Insufficient permissions") - @api.response(404, "Conversation not found") + @console_ns.doc("delete_completion_conversation") + @console_ns.doc(description="Delete a completion conversation") + @console_ns.doc(params={"app_id": "Application ID", "conversation_id": "Conversation ID"}) + @console_ns.response(204, "Conversation deleted successfully") + @console_ns.response(403, "Insufficient permissions") + @console_ns.response(404, "Conversation not found") @setup_required @login_required @account_initialization_required @@ -164,11 +164,11 @@ class CompletionConversationDetailApi(Resource): @console_ns.route("/apps//chat-conversations") class ChatConversationApi(Resource): - @api.doc("list_chat_conversations") - @api.doc(description="Get chat conversations with pagination, filtering and summary") - @api.doc(params={"app_id": "Application ID"}) - @api.expect( - api.parser() + @console_ns.doc("list_chat_conversations") + @console_ns.doc(description="Get chat conversations with pagination, filtering and summary") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.expect( + console_ns.parser() .add_argument("keyword", type=str, location="args", help="Search keyword") .add_argument("start", type=str, location="args", help="Start date (YYYY-MM-DD HH:MM)") .add_argument("end", type=str, location="args", help="End date (YYYY-MM-DD HH:MM)") @@ -192,8 +192,8 @@ class ChatConversationApi(Resource): help="Sort field and direction", ) ) - @api.response(200, "Success", conversation_with_summary_pagination_fields) - @api.response(403, "Insufficient permissions") + @console_ns.response(200, "Success", conversation_with_summary_pagination_fields) + @console_ns.response(403, "Insufficient permissions") @setup_required @login_required @account_initialization_required @@ -322,12 +322,12 @@ class ChatConversationApi(Resource): @console_ns.route("/apps//chat-conversations/") class ChatConversationDetailApi(Resource): - @api.doc("get_chat_conversation") - @api.doc(description="Get chat conversation details") - @api.doc(params={"app_id": "Application ID", "conversation_id": "Conversation ID"}) - @api.response(200, "Success", conversation_detail_fields) - @api.response(403, "Insufficient permissions") - @api.response(404, "Conversation not found") + @console_ns.doc("get_chat_conversation") + @console_ns.doc(description="Get chat conversation details") + @console_ns.doc(params={"app_id": "Application ID", "conversation_id": "Conversation ID"}) + @console_ns.response(200, "Success", conversation_detail_fields) + @console_ns.response(403, "Insufficient permissions") + @console_ns.response(404, "Conversation not found") @setup_required @login_required @account_initialization_required @@ -339,12 +339,12 @@ class ChatConversationDetailApi(Resource): return _get_conversation(app_model, conversation_id) - @api.doc("delete_chat_conversation") - @api.doc(description="Delete a chat conversation") - @api.doc(params={"app_id": "Application ID", "conversation_id": "Conversation ID"}) - @api.response(204, "Conversation deleted successfully") - @api.response(403, "Insufficient permissions") - @api.response(404, "Conversation not found") + @console_ns.doc("delete_chat_conversation") + @console_ns.doc(description="Delete a chat conversation") + @console_ns.doc(params={"app_id": "Application ID", "conversation_id": "Conversation ID"}) + @console_ns.response(204, "Conversation deleted successfully") + @console_ns.response(403, "Insufficient permissions") + @console_ns.response(404, "Conversation not found") @setup_required @login_required @get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT]) diff --git a/api/controllers/console/app/conversation_variables.py b/api/controllers/console/app/conversation_variables.py index d4c0b5697f..15ea004143 100644 --- a/api/controllers/console/app/conversation_variables.py +++ b/api/controllers/console/app/conversation_variables.py @@ -2,7 +2,7 @@ from flask_restx import Resource, marshal_with, reqparse from sqlalchemy import select from sqlalchemy.orm import Session -from controllers.console import api, console_ns +from controllers.console import console_ns from controllers.console.app.wraps import get_app_model from controllers.console.wraps import account_initialization_required, setup_required from extensions.ext_database import db @@ -14,15 +14,15 @@ from models.model import AppMode @console_ns.route("/apps//conversation-variables") class ConversationVariablesApi(Resource): - @api.doc("get_conversation_variables") - @api.doc(description="Get conversation variables for an application") - @api.doc(params={"app_id": "Application ID"}) - @api.expect( - api.parser().add_argument( + @console_ns.doc("get_conversation_variables") + @console_ns.doc(description="Get conversation variables for an application") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.expect( + console_ns.parser().add_argument( "conversation_id", type=str, location="args", help="Conversation ID to filter variables" ) ) - @api.response(200, "Conversation variables retrieved successfully", paginated_conversation_variable_fields) + @console_ns.response(200, "Conversation variables retrieved successfully", paginated_conversation_variable_fields) @setup_required @login_required @account_initialization_required diff --git a/api/controllers/console/app/generator.py b/api/controllers/console/app/generator.py index 54a101946c..cf8acda018 100644 --- a/api/controllers/console/app/generator.py +++ b/api/controllers/console/app/generator.py @@ -2,7 +2,7 @@ from collections.abc import Sequence from flask_restx import Resource, fields, reqparse -from controllers.console import api, console_ns +from controllers.console import console_ns from controllers.console.app.error import ( CompletionRequestError, ProviderModelCurrentlyNotSupportError, @@ -24,10 +24,10 @@ from services.workflow_service import WorkflowService @console_ns.route("/rule-generate") class RuleGenerateApi(Resource): - @api.doc("generate_rule_config") - @api.doc(description="Generate rule configuration using LLM") - @api.expect( - api.model( + @console_ns.doc("generate_rule_config") + @console_ns.doc(description="Generate rule configuration using LLM") + @console_ns.expect( + console_ns.model( "RuleGenerateRequest", { "instruction": fields.String(required=True, description="Rule generation instruction"), @@ -36,9 +36,9 @@ class RuleGenerateApi(Resource): }, ) ) - @api.response(200, "Rule configuration generated successfully") - @api.response(400, "Invalid request parameters") - @api.response(402, "Provider quota exceeded") + @console_ns.response(200, "Rule configuration generated successfully") + @console_ns.response(400, "Invalid request parameters") + @console_ns.response(402, "Provider quota exceeded") @setup_required @login_required @account_initialization_required @@ -73,10 +73,10 @@ class RuleGenerateApi(Resource): @console_ns.route("/rule-code-generate") class RuleCodeGenerateApi(Resource): - @api.doc("generate_rule_code") - @api.doc(description="Generate code rules using LLM") - @api.expect( - api.model( + @console_ns.doc("generate_rule_code") + @console_ns.doc(description="Generate code rules using LLM") + @console_ns.expect( + console_ns.model( "RuleCodeGenerateRequest", { "instruction": fields.String(required=True, description="Code generation instruction"), @@ -88,9 +88,9 @@ class RuleCodeGenerateApi(Resource): }, ) ) - @api.response(200, "Code rules generated successfully") - @api.response(400, "Invalid request parameters") - @api.response(402, "Provider quota exceeded") + @console_ns.response(200, "Code rules generated successfully") + @console_ns.response(400, "Invalid request parameters") + @console_ns.response(402, "Provider quota exceeded") @setup_required @login_required @account_initialization_required @@ -126,10 +126,10 @@ class RuleCodeGenerateApi(Resource): @console_ns.route("/rule-structured-output-generate") class RuleStructuredOutputGenerateApi(Resource): - @api.doc("generate_structured_output") - @api.doc(description="Generate structured output rules using LLM") - @api.expect( - api.model( + @console_ns.doc("generate_structured_output") + @console_ns.doc(description="Generate structured output rules using LLM") + @console_ns.expect( + console_ns.model( "StructuredOutputGenerateRequest", { "instruction": fields.String(required=True, description="Structured output generation instruction"), @@ -137,9 +137,9 @@ class RuleStructuredOutputGenerateApi(Resource): }, ) ) - @api.response(200, "Structured output generated successfully") - @api.response(400, "Invalid request parameters") - @api.response(402, "Provider quota exceeded") + @console_ns.response(200, "Structured output generated successfully") + @console_ns.response(400, "Invalid request parameters") + @console_ns.response(402, "Provider quota exceeded") @setup_required @login_required @account_initialization_required @@ -172,10 +172,10 @@ class RuleStructuredOutputGenerateApi(Resource): @console_ns.route("/instruction-generate") class InstructionGenerateApi(Resource): - @api.doc("generate_instruction") - @api.doc(description="Generate instruction for workflow nodes or general use") - @api.expect( - api.model( + @console_ns.doc("generate_instruction") + @console_ns.doc(description="Generate instruction for workflow nodes or general use") + @console_ns.expect( + console_ns.model( "InstructionGenerateRequest", { "flow_id": fields.String(required=True, description="Workflow/Flow ID"), @@ -188,9 +188,9 @@ class InstructionGenerateApi(Resource): }, ) ) - @api.response(200, "Instruction generated successfully") - @api.response(400, "Invalid request parameters or flow/workflow not found") - @api.response(402, "Provider quota exceeded") + @console_ns.response(200, "Instruction generated successfully") + @console_ns.response(400, "Invalid request parameters or flow/workflow not found") + @console_ns.response(402, "Provider quota exceeded") @setup_required @login_required @account_initialization_required @@ -283,10 +283,10 @@ class InstructionGenerateApi(Resource): @console_ns.route("/instruction-generate/template") class InstructionGenerationTemplateApi(Resource): - @api.doc("get_instruction_template") - @api.doc(description="Get instruction generation template") - @api.expect( - api.model( + @console_ns.doc("get_instruction_template") + @console_ns.doc(description="Get instruction generation template") + @console_ns.expect( + console_ns.model( "InstructionTemplateRequest", { "instruction": fields.String(required=True, description="Template instruction"), @@ -294,8 +294,8 @@ class InstructionGenerationTemplateApi(Resource): }, ) ) - @api.response(200, "Template retrieved successfully") - @api.response(400, "Invalid request parameters") + @console_ns.response(200, "Template retrieved successfully") + @console_ns.response(400, "Invalid request parameters") @setup_required @login_required @account_initialization_required diff --git a/api/controllers/console/app/mcp_server.py b/api/controllers/console/app/mcp_server.py index 3700c6b1d0..7454d87068 100644 --- a/api/controllers/console/app/mcp_server.py +++ b/api/controllers/console/app/mcp_server.py @@ -4,7 +4,7 @@ from enum import StrEnum from flask_restx import Resource, fields, marshal_with, reqparse from werkzeug.exceptions import NotFound -from controllers.console import api, console_ns +from controllers.console import console_ns from controllers.console.app.wraps import get_app_model from controllers.console.wraps import account_initialization_required, edit_permission_required, setup_required from extensions.ext_database import db @@ -20,10 +20,10 @@ class AppMCPServerStatus(StrEnum): @console_ns.route("/apps//server") class AppMCPServerController(Resource): - @api.doc("get_app_mcp_server") - @api.doc(description="Get MCP server configuration for an application") - @api.doc(params={"app_id": "Application ID"}) - @api.response(200, "MCP server configuration retrieved successfully", app_server_fields) + @console_ns.doc("get_app_mcp_server") + @console_ns.doc(description="Get MCP server configuration for an application") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.response(200, "MCP server configuration retrieved successfully", app_server_fields) @login_required @account_initialization_required @setup_required @@ -33,11 +33,11 @@ class AppMCPServerController(Resource): server = db.session.query(AppMCPServer).where(AppMCPServer.app_id == app_model.id).first() return server - @api.doc("create_app_mcp_server") - @api.doc(description="Create MCP server configuration for an application") - @api.doc(params={"app_id": "Application ID"}) - @api.expect( - api.model( + @console_ns.doc("create_app_mcp_server") + @console_ns.doc(description="Create MCP server configuration for an application") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.expect( + console_ns.model( "MCPServerCreateRequest", { "description": fields.String(description="Server description"), @@ -45,8 +45,8 @@ class AppMCPServerController(Resource): }, ) ) - @api.response(201, "MCP server configuration created successfully", app_server_fields) - @api.response(403, "Insufficient permissions") + @console_ns.response(201, "MCP server configuration created successfully", app_server_fields) + @console_ns.response(403, "Insufficient permissions") @account_initialization_required @get_app_model @login_required @@ -79,11 +79,11 @@ class AppMCPServerController(Resource): db.session.commit() return server - @api.doc("update_app_mcp_server") - @api.doc(description="Update MCP server configuration for an application") - @api.doc(params={"app_id": "Application ID"}) - @api.expect( - api.model( + @console_ns.doc("update_app_mcp_server") + @console_ns.doc(description="Update MCP server configuration for an application") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.expect( + console_ns.model( "MCPServerUpdateRequest", { "id": fields.String(required=True, description="Server ID"), @@ -93,9 +93,9 @@ class AppMCPServerController(Resource): }, ) ) - @api.response(200, "MCP server configuration updated successfully", app_server_fields) - @api.response(403, "Insufficient permissions") - @api.response(404, "Server not found") + @console_ns.response(200, "MCP server configuration updated successfully", app_server_fields) + @console_ns.response(403, "Insufficient permissions") + @console_ns.response(404, "Server not found") @get_app_model @login_required @setup_required @@ -134,12 +134,12 @@ class AppMCPServerController(Resource): @console_ns.route("/apps//server/refresh") class AppMCPServerRefreshController(Resource): - @api.doc("refresh_app_mcp_server") - @api.doc(description="Refresh MCP server configuration and regenerate server code") - @api.doc(params={"server_id": "Server ID"}) - @api.response(200, "MCP server refreshed successfully", app_server_fields) - @api.response(403, "Insufficient permissions") - @api.response(404, "Server not found") + @console_ns.doc("refresh_app_mcp_server") + @console_ns.doc(description="Refresh MCP server configuration and regenerate server code") + @console_ns.doc(params={"server_id": "Server ID"}) + @console_ns.response(200, "MCP server refreshed successfully", app_server_fields) + @console_ns.response(403, "Insufficient permissions") + @console_ns.response(404, "Server not found") @setup_required @login_required @account_initialization_required diff --git a/api/controllers/console/app/message.py b/api/controllers/console/app/message.py index 3f66278940..b6672c88e0 100644 --- a/api/controllers/console/app/message.py +++ b/api/controllers/console/app/message.py @@ -5,7 +5,7 @@ from flask_restx.inputs import int_range from sqlalchemy import exists, select from werkzeug.exceptions import InternalServerError, NotFound -from controllers.console import api, console_ns +from controllers.console import console_ns from controllers.console.app.error import ( CompletionRequestError, ProviderModelCurrentlyNotSupportError, @@ -43,17 +43,17 @@ class ChatMessageListApi(Resource): "data": fields.List(fields.Nested(message_detail_fields)), } - @api.doc("list_chat_messages") - @api.doc(description="Get chat messages for a conversation with pagination") - @api.doc(params={"app_id": "Application ID"}) - @api.expect( - api.parser() + @console_ns.doc("list_chat_messages") + @console_ns.doc(description="Get chat messages for a conversation with pagination") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.expect( + console_ns.parser() .add_argument("conversation_id", type=str, required=True, location="args", help="Conversation ID") .add_argument("first_id", type=str, location="args", help="First message ID for pagination") .add_argument("limit", type=int, location="args", default=20, help="Number of messages to return (1-100)") ) - @api.response(200, "Success", message_infinite_scroll_pagination_fields) - @api.response(404, "Conversation not found") + @console_ns.response(200, "Success", message_infinite_scroll_pagination_fields) + @console_ns.response(404, "Conversation not found") @login_required @account_initialization_required @setup_required @@ -132,11 +132,11 @@ class ChatMessageListApi(Resource): @console_ns.route("/apps//feedbacks") class MessageFeedbackApi(Resource): - @api.doc("create_message_feedback") - @api.doc(description="Create or update message feedback (like/dislike)") - @api.doc(params={"app_id": "Application ID"}) - @api.expect( - api.model( + @console_ns.doc("create_message_feedback") + @console_ns.doc(description="Create or update message feedback (like/dislike)") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.expect( + console_ns.model( "MessageFeedbackRequest", { "message_id": fields.String(required=True, description="Message ID"), @@ -144,9 +144,9 @@ class MessageFeedbackApi(Resource): }, ) ) - @api.response(200, "Feedback updated successfully") - @api.response(404, "Message not found") - @api.response(403, "Insufficient permissions") + @console_ns.response(200, "Feedback updated successfully") + @console_ns.response(404, "Message not found") + @console_ns.response(403, "Insufficient permissions") @get_app_model @setup_required @login_required @@ -194,13 +194,13 @@ class MessageFeedbackApi(Resource): @console_ns.route("/apps//annotations/count") class MessageAnnotationCountApi(Resource): - @api.doc("get_annotation_count") - @api.doc(description="Get count of message annotations for the app") - @api.doc(params={"app_id": "Application ID"}) - @api.response( + @console_ns.doc("get_annotation_count") + @console_ns.doc(description="Get count of message annotations for the app") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.response( 200, "Annotation count retrieved successfully", - api.model("AnnotationCountResponse", {"count": fields.Integer(description="Number of annotations")}), + console_ns.model("AnnotationCountResponse", {"count": fields.Integer(description="Number of annotations")}), ) @get_app_model @setup_required @@ -214,15 +214,17 @@ class MessageAnnotationCountApi(Resource): @console_ns.route("/apps//chat-messages//suggested-questions") class MessageSuggestedQuestionApi(Resource): - @api.doc("get_message_suggested_questions") - @api.doc(description="Get suggested questions for a message") - @api.doc(params={"app_id": "Application ID", "message_id": "Message ID"}) - @api.response( + @console_ns.doc("get_message_suggested_questions") + @console_ns.doc(description="Get suggested questions for a message") + @console_ns.doc(params={"app_id": "Application ID", "message_id": "Message ID"}) + @console_ns.response( 200, "Suggested questions retrieved successfully", - api.model("SuggestedQuestionsResponse", {"data": fields.List(fields.String(description="Suggested question"))}), + console_ns.model( + "SuggestedQuestionsResponse", {"data": fields.List(fields.String(description="Suggested question"))} + ), ) - @api.response(404, "Message or conversation not found") + @console_ns.response(404, "Message or conversation not found") @setup_required @login_required @account_initialization_required @@ -258,11 +260,11 @@ class MessageSuggestedQuestionApi(Resource): @console_ns.route("/apps//messages/") class MessageApi(Resource): - @api.doc("get_message") - @api.doc(description="Get message details by ID") - @api.doc(params={"app_id": "Application ID", "message_id": "Message ID"}) - @api.response(200, "Message retrieved successfully", message_detail_fields) - @api.response(404, "Message not found") + @console_ns.doc("get_message") + @console_ns.doc(description="Get message details by ID") + @console_ns.doc(params={"app_id": "Application ID", "message_id": "Message ID"}) + @console_ns.response(200, "Message retrieved successfully", message_detail_fields) + @console_ns.response(404, "Message not found") @get_app_model @setup_required @login_required diff --git a/api/controllers/console/app/model_config.py b/api/controllers/console/app/model_config.py index 91e2cfd60e..a85e54fb51 100644 --- a/api/controllers/console/app/model_config.py +++ b/api/controllers/console/app/model_config.py @@ -4,7 +4,7 @@ from typing import cast from flask import request from flask_restx import Resource, fields -from controllers.console import api, console_ns +from controllers.console import console_ns from controllers.console.app.wraps import get_app_model from controllers.console.wraps import account_initialization_required, edit_permission_required, setup_required from core.agent.entities import AgentToolEntity @@ -20,11 +20,11 @@ from services.app_model_config_service import AppModelConfigService @console_ns.route("/apps//model-config") class ModelConfigResource(Resource): - @api.doc("update_app_model_config") - @api.doc(description="Update application model configuration") - @api.doc(params={"app_id": "Application ID"}) - @api.expect( - api.model( + @console_ns.doc("update_app_model_config") + @console_ns.doc(description="Update application model configuration") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.expect( + console_ns.model( "ModelConfigRequest", { "provider": fields.String(description="Model provider"), @@ -42,9 +42,9 @@ class ModelConfigResource(Resource): }, ) ) - @api.response(200, "Model configuration updated successfully") - @api.response(400, "Invalid configuration") - @api.response(404, "App not found") + @console_ns.response(200, "Model configuration updated successfully") + @console_ns.response(400, "Invalid configuration") + @console_ns.response(404, "App not found") @setup_required @login_required @edit_permission_required diff --git a/api/controllers/console/app/ops_trace.py b/api/controllers/console/app/ops_trace.py index 1d80314774..19c1a11258 100644 --- a/api/controllers/console/app/ops_trace.py +++ b/api/controllers/console/app/ops_trace.py @@ -1,7 +1,7 @@ from flask_restx import Resource, fields, reqparse from werkzeug.exceptions import BadRequest -from controllers.console import api, console_ns +from controllers.console import console_ns from controllers.console.app.error import TracingConfigCheckError, TracingConfigIsExist, TracingConfigNotExist from controllers.console.wraps import account_initialization_required, setup_required from libs.login import login_required @@ -14,18 +14,18 @@ class TraceAppConfigApi(Resource): Manage trace app configurations """ - @api.doc("get_trace_app_config") - @api.doc(description="Get tracing configuration for an application") - @api.doc(params={"app_id": "Application ID"}) - @api.expect( - api.parser().add_argument( + @console_ns.doc("get_trace_app_config") + @console_ns.doc(description="Get tracing configuration for an application") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.expect( + console_ns.parser().add_argument( "tracing_provider", type=str, required=True, location="args", help="Tracing provider name" ) ) - @api.response( + @console_ns.response( 200, "Tracing configuration retrieved successfully", fields.Raw(description="Tracing configuration data") ) - @api.response(400, "Invalid request parameters") + @console_ns.response(400, "Invalid request parameters") @setup_required @login_required @account_initialization_required @@ -41,11 +41,11 @@ class TraceAppConfigApi(Resource): except Exception as e: raise BadRequest(str(e)) - @api.doc("create_trace_app_config") - @api.doc(description="Create a new tracing configuration for an application") - @api.doc(params={"app_id": "Application ID"}) - @api.expect( - api.model( + @console_ns.doc("create_trace_app_config") + @console_ns.doc(description="Create a new tracing configuration for an application") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.expect( + console_ns.model( "TraceConfigCreateRequest", { "tracing_provider": fields.String(required=True, description="Tracing provider name"), @@ -53,10 +53,10 @@ class TraceAppConfigApi(Resource): }, ) ) - @api.response( + @console_ns.response( 201, "Tracing configuration created successfully", fields.Raw(description="Created configuration data") ) - @api.response(400, "Invalid request parameters or configuration already exists") + @console_ns.response(400, "Invalid request parameters or configuration already exists") @setup_required @login_required @account_initialization_required @@ -81,11 +81,11 @@ class TraceAppConfigApi(Resource): except Exception as e: raise BadRequest(str(e)) - @api.doc("update_trace_app_config") - @api.doc(description="Update an existing tracing configuration for an application") - @api.doc(params={"app_id": "Application ID"}) - @api.expect( - api.model( + @console_ns.doc("update_trace_app_config") + @console_ns.doc(description="Update an existing tracing configuration for an application") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.expect( + console_ns.model( "TraceConfigUpdateRequest", { "tracing_provider": fields.String(required=True, description="Tracing provider name"), @@ -93,8 +93,8 @@ class TraceAppConfigApi(Resource): }, ) ) - @api.response(200, "Tracing configuration updated successfully", fields.Raw(description="Success response")) - @api.response(400, "Invalid request parameters or configuration not found") + @console_ns.response(200, "Tracing configuration updated successfully", fields.Raw(description="Success response")) + @console_ns.response(400, "Invalid request parameters or configuration not found") @setup_required @login_required @account_initialization_required @@ -117,16 +117,16 @@ class TraceAppConfigApi(Resource): except Exception as e: raise BadRequest(str(e)) - @api.doc("delete_trace_app_config") - @api.doc(description="Delete an existing tracing configuration for an application") - @api.doc(params={"app_id": "Application ID"}) - @api.expect( - api.parser().add_argument( + @console_ns.doc("delete_trace_app_config") + @console_ns.doc(description="Delete an existing tracing configuration for an application") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.expect( + console_ns.parser().add_argument( "tracing_provider", type=str, required=True, location="args", help="Tracing provider name" ) ) - @api.response(204, "Tracing configuration deleted successfully") - @api.response(400, "Invalid request parameters or configuration not found") + @console_ns.response(204, "Tracing configuration deleted successfully") + @console_ns.response(400, "Invalid request parameters or configuration not found") @setup_required @login_required @account_initialization_required diff --git a/api/controllers/console/app/site.py b/api/controllers/console/app/site.py index b8edbf77c7..b2f1997620 100644 --- a/api/controllers/console/app/site.py +++ b/api/controllers/console/app/site.py @@ -2,7 +2,7 @@ from flask_restx import Resource, fields, marshal_with, reqparse from werkzeug.exceptions import NotFound from constants.languages import supported_language -from controllers.console import api, console_ns +from controllers.console import console_ns from controllers.console.app.wraps import get_app_model from controllers.console.wraps import ( account_initialization_required, @@ -48,11 +48,11 @@ def parse_app_site_args(): @console_ns.route("/apps//site") class AppSite(Resource): - @api.doc("update_app_site") - @api.doc(description="Update application site configuration") - @api.doc(params={"app_id": "Application ID"}) - @api.expect( - api.model( + @console_ns.doc("update_app_site") + @console_ns.doc(description="Update application site configuration") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.expect( + console_ns.model( "AppSiteRequest", { "title": fields.String(description="Site title"), @@ -76,9 +76,9 @@ class AppSite(Resource): }, ) ) - @api.response(200, "Site configuration updated successfully", app_site_fields) - @api.response(403, "Insufficient permissions") - @api.response(404, "App not found") + @console_ns.response(200, "Site configuration updated successfully", app_site_fields) + @console_ns.response(403, "Insufficient permissions") + @console_ns.response(404, "App not found") @setup_required @login_required @edit_permission_required @@ -123,12 +123,12 @@ class AppSite(Resource): @console_ns.route("/apps//site/access-token-reset") class AppSiteAccessTokenReset(Resource): - @api.doc("reset_app_site_access_token") - @api.doc(description="Reset access token for application site") - @api.doc(params={"app_id": "Application ID"}) - @api.response(200, "Access token reset successfully", app_site_fields) - @api.response(403, "Insufficient permissions (admin/owner required)") - @api.response(404, "App or site not found") + @console_ns.doc("reset_app_site_access_token") + @console_ns.doc(description="Reset access token for application site") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.response(200, "Access token reset successfully", app_site_fields) + @console_ns.response(403, "Insufficient permissions (admin/owner required)") + @console_ns.response(404, "App or site not found") @setup_required @login_required @is_admin_or_owner_required diff --git a/api/controllers/console/app/statistic.py b/api/controllers/console/app/statistic.py index b4bd05e891..c8f54c638e 100644 --- a/api/controllers/console/app/statistic.py +++ b/api/controllers/console/app/statistic.py @@ -4,7 +4,7 @@ import sqlalchemy as sa from flask import abort, jsonify from flask_restx import Resource, fields, reqparse -from controllers.console import api, console_ns +from controllers.console import console_ns from controllers.console.app.wraps import get_app_model from controllers.console.wraps import account_initialization_required, setup_required from core.app.entities.app_invoke_entities import InvokeFrom @@ -17,15 +17,15 @@ from models import AppMode @console_ns.route("/apps//statistics/daily-messages") class DailyMessageStatistic(Resource): - @api.doc("get_daily_message_statistics") - @api.doc(description="Get daily message statistics for an application") - @api.doc(params={"app_id": "Application ID"}) - @api.expect( - api.parser() + @console_ns.doc("get_daily_message_statistics") + @console_ns.doc(description="Get daily message statistics for an application") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.expect( + console_ns.parser() .add_argument("start", type=str, location="args", help="Start date (YYYY-MM-DD HH:MM)") .add_argument("end", type=str, location="args", help="End date (YYYY-MM-DD HH:MM)") ) - @api.response( + @console_ns.response( 200, "Daily message statistics retrieved successfully", fields.List(fields.Raw(description="Daily message count data")), @@ -90,11 +90,11 @@ parser = ( @console_ns.route("/apps//statistics/daily-conversations") class DailyConversationStatistic(Resource): - @api.doc("get_daily_conversation_statistics") - @api.doc(description="Get daily conversation statistics for an application") - @api.doc(params={"app_id": "Application ID"}) - @api.expect(parser) - @api.response( + @console_ns.doc("get_daily_conversation_statistics") + @console_ns.doc(description="Get daily conversation statistics for an application") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.expect(parser) + @console_ns.response( 200, "Daily conversation statistics retrieved successfully", fields.List(fields.Raw(description="Daily conversation count data")), @@ -146,11 +146,11 @@ WHERE @console_ns.route("/apps//statistics/daily-end-users") class DailyTerminalsStatistic(Resource): - @api.doc("get_daily_terminals_statistics") - @api.doc(description="Get daily terminal/end-user statistics for an application") - @api.doc(params={"app_id": "Application ID"}) - @api.expect(parser) - @api.response( + @console_ns.doc("get_daily_terminals_statistics") + @console_ns.doc(description="Get daily terminal/end-user statistics for an application") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.expect(parser) + @console_ns.response( 200, "Daily terminal statistics retrieved successfully", fields.List(fields.Raw(description="Daily terminal count data")), @@ -203,11 +203,11 @@ WHERE @console_ns.route("/apps//statistics/token-costs") class DailyTokenCostStatistic(Resource): - @api.doc("get_daily_token_cost_statistics") - @api.doc(description="Get daily token cost statistics for an application") - @api.doc(params={"app_id": "Application ID"}) - @api.expect(parser) - @api.response( + @console_ns.doc("get_daily_token_cost_statistics") + @console_ns.doc(description="Get daily token cost statistics for an application") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.expect(parser) + @console_ns.response( 200, "Daily token cost statistics retrieved successfully", fields.List(fields.Raw(description="Daily token cost data")), @@ -263,11 +263,11 @@ WHERE @console_ns.route("/apps//statistics/average-session-interactions") class AverageSessionInteractionStatistic(Resource): - @api.doc("get_average_session_interaction_statistics") - @api.doc(description="Get average session interaction statistics for an application") - @api.doc(params={"app_id": "Application ID"}) - @api.expect(parser) - @api.response( + @console_ns.doc("get_average_session_interaction_statistics") + @console_ns.doc(description="Get average session interaction statistics for an application") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.expect(parser) + @console_ns.response( 200, "Average session interaction statistics retrieved successfully", fields.List(fields.Raw(description="Average session interaction data")), @@ -339,11 +339,11 @@ ORDER BY @console_ns.route("/apps//statistics/user-satisfaction-rate") class UserSatisfactionRateStatistic(Resource): - @api.doc("get_user_satisfaction_rate_statistics") - @api.doc(description="Get user satisfaction rate statistics for an application") - @api.doc(params={"app_id": "Application ID"}) - @api.expect(parser) - @api.response( + @console_ns.doc("get_user_satisfaction_rate_statistics") + @console_ns.doc(description="Get user satisfaction rate statistics for an application") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.expect(parser) + @console_ns.response( 200, "User satisfaction rate statistics retrieved successfully", fields.List(fields.Raw(description="User satisfaction rate data")), @@ -405,11 +405,11 @@ WHERE @console_ns.route("/apps//statistics/average-response-time") class AverageResponseTimeStatistic(Resource): - @api.doc("get_average_response_time_statistics") - @api.doc(description="Get average response time statistics for an application") - @api.doc(params={"app_id": "Application ID"}) - @api.expect(parser) - @api.response( + @console_ns.doc("get_average_response_time_statistics") + @console_ns.doc(description="Get average response time statistics for an application") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.expect(parser) + @console_ns.response( 200, "Average response time statistics retrieved successfully", fields.List(fields.Raw(description="Average response time data")), @@ -462,11 +462,11 @@ WHERE @console_ns.route("/apps//statistics/tokens-per-second") class TokensPerSecondStatistic(Resource): - @api.doc("get_tokens_per_second_statistics") - @api.doc(description="Get tokens per second statistics for an application") - @api.doc(params={"app_id": "Application ID"}) - @api.expect(parser) - @api.response( + @console_ns.doc("get_tokens_per_second_statistics") + @console_ns.doc(description="Get tokens per second statistics for an application") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.expect(parser) + @console_ns.response( 200, "Tokens per second statistics retrieved successfully", fields.List(fields.Raw(description="Tokens per second data")), diff --git a/api/controllers/console/app/workflow.py b/api/controllers/console/app/workflow.py index 2f6808f11d..9b5a4e895c 100644 --- a/api/controllers/console/app/workflow.py +++ b/api/controllers/console/app/workflow.py @@ -9,7 +9,7 @@ from sqlalchemy.orm import Session from werkzeug.exceptions import Forbidden, InternalServerError, NotFound import services -from controllers.console import api, console_ns +from controllers.console import console_ns from controllers.console.app.error import ConversationCompletedError, DraftWorkflowNotExist, DraftWorkflowNotSync from controllers.console.app.wraps import get_app_model from controllers.console.wraps import account_initialization_required, edit_permission_required, setup_required @@ -70,11 +70,11 @@ def _parse_file(workflow: Workflow, files: list[dict] | None = None) -> Sequence @console_ns.route("/apps//workflows/draft") class DraftWorkflowApi(Resource): - @api.doc("get_draft_workflow") - @api.doc(description="Get draft workflow for an application") - @api.doc(params={"app_id": "Application ID"}) - @api.response(200, "Draft workflow retrieved successfully", workflow_fields) - @api.response(404, "Draft workflow not found") + @console_ns.doc("get_draft_workflow") + @console_ns.doc(description="Get draft workflow for an application") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.response(200, "Draft workflow retrieved successfully", workflow_fields) + @console_ns.response(404, "Draft workflow not found") @setup_required @login_required @account_initialization_required @@ -99,10 +99,10 @@ class DraftWorkflowApi(Resource): @login_required @account_initialization_required @get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW]) - @api.doc("sync_draft_workflow") - @api.doc(description="Sync draft workflow configuration") - @api.expect( - api.model( + @console_ns.doc("sync_draft_workflow") + @console_ns.doc(description="Sync draft workflow configuration") + @console_ns.expect( + console_ns.model( "SyncDraftWorkflowRequest", { "graph": fields.Raw(required=True, description="Workflow graph configuration"), @@ -113,10 +113,10 @@ class DraftWorkflowApi(Resource): }, ) ) - @api.response( + @console_ns.response( 200, "Draft workflow synced successfully", - api.model( + console_ns.model( "SyncDraftWorkflowResponse", { "result": fields.String, @@ -125,8 +125,8 @@ class DraftWorkflowApi(Resource): }, ), ) - @api.response(400, "Invalid workflow configuration") - @api.response(403, "Permission denied") + @console_ns.response(400, "Invalid workflow configuration") + @console_ns.response(403, "Permission denied") @edit_permission_required def post(self, app_model: App): """ @@ -198,11 +198,11 @@ class DraftWorkflowApi(Resource): @console_ns.route("/apps//advanced-chat/workflows/draft/run") class AdvancedChatDraftWorkflowRunApi(Resource): - @api.doc("run_advanced_chat_draft_workflow") - @api.doc(description="Run draft workflow for advanced chat application") - @api.doc(params={"app_id": "Application ID"}) - @api.expect( - api.model( + @console_ns.doc("run_advanced_chat_draft_workflow") + @console_ns.doc(description="Run draft workflow for advanced chat application") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.expect( + console_ns.model( "AdvancedChatWorkflowRunRequest", { "query": fields.String(required=True, description="User query"), @@ -212,9 +212,9 @@ class AdvancedChatDraftWorkflowRunApi(Resource): }, ) ) - @api.response(200, "Workflow run started successfully") - @api.response(400, "Invalid request parameters") - @api.response(403, "Permission denied") + @console_ns.response(200, "Workflow run started successfully") + @console_ns.response(400, "Invalid request parameters") + @console_ns.response(403, "Permission denied") @setup_required @login_required @account_initialization_required @@ -262,11 +262,11 @@ class AdvancedChatDraftWorkflowRunApi(Resource): @console_ns.route("/apps//advanced-chat/workflows/draft/iteration/nodes//run") class AdvancedChatDraftRunIterationNodeApi(Resource): - @api.doc("run_advanced_chat_draft_iteration_node") - @api.doc(description="Run draft workflow iteration node for advanced chat") - @api.doc(params={"app_id": "Application ID", "node_id": "Node ID"}) - @api.expect( - api.model( + @console_ns.doc("run_advanced_chat_draft_iteration_node") + @console_ns.doc(description="Run draft workflow iteration node for advanced chat") + @console_ns.doc(params={"app_id": "Application ID", "node_id": "Node ID"}) + @console_ns.expect( + console_ns.model( "IterationNodeRunRequest", { "task_id": fields.String(required=True, description="Task ID"), @@ -274,9 +274,9 @@ class AdvancedChatDraftRunIterationNodeApi(Resource): }, ) ) - @api.response(200, "Iteration node run started successfully") - @api.response(403, "Permission denied") - @api.response(404, "Node not found") + @console_ns.response(200, "Iteration node run started successfully") + @console_ns.response(403, "Permission denied") + @console_ns.response(404, "Node not found") @setup_required @login_required @account_initialization_required @@ -309,11 +309,11 @@ class AdvancedChatDraftRunIterationNodeApi(Resource): @console_ns.route("/apps//workflows/draft/iteration/nodes//run") class WorkflowDraftRunIterationNodeApi(Resource): - @api.doc("run_workflow_draft_iteration_node") - @api.doc(description="Run draft workflow iteration node") - @api.doc(params={"app_id": "Application ID", "node_id": "Node ID"}) - @api.expect( - api.model( + @console_ns.doc("run_workflow_draft_iteration_node") + @console_ns.doc(description="Run draft workflow iteration node") + @console_ns.doc(params={"app_id": "Application ID", "node_id": "Node ID"}) + @console_ns.expect( + console_ns.model( "WorkflowIterationNodeRunRequest", { "task_id": fields.String(required=True, description="Task ID"), @@ -321,9 +321,9 @@ class WorkflowDraftRunIterationNodeApi(Resource): }, ) ) - @api.response(200, "Workflow iteration node run started successfully") - @api.response(403, "Permission denied") - @api.response(404, "Node not found") + @console_ns.response(200, "Workflow iteration node run started successfully") + @console_ns.response(403, "Permission denied") + @console_ns.response(404, "Node not found") @setup_required @login_required @account_initialization_required @@ -356,11 +356,11 @@ class WorkflowDraftRunIterationNodeApi(Resource): @console_ns.route("/apps//advanced-chat/workflows/draft/loop/nodes//run") class AdvancedChatDraftRunLoopNodeApi(Resource): - @api.doc("run_advanced_chat_draft_loop_node") - @api.doc(description="Run draft workflow loop node for advanced chat") - @api.doc(params={"app_id": "Application ID", "node_id": "Node ID"}) - @api.expect( - api.model( + @console_ns.doc("run_advanced_chat_draft_loop_node") + @console_ns.doc(description="Run draft workflow loop node for advanced chat") + @console_ns.doc(params={"app_id": "Application ID", "node_id": "Node ID"}) + @console_ns.expect( + console_ns.model( "LoopNodeRunRequest", { "task_id": fields.String(required=True, description="Task ID"), @@ -368,9 +368,9 @@ class AdvancedChatDraftRunLoopNodeApi(Resource): }, ) ) - @api.response(200, "Loop node run started successfully") - @api.response(403, "Permission denied") - @api.response(404, "Node not found") + @console_ns.response(200, "Loop node run started successfully") + @console_ns.response(403, "Permission denied") + @console_ns.response(404, "Node not found") @setup_required @login_required @account_initialization_required @@ -403,11 +403,11 @@ class AdvancedChatDraftRunLoopNodeApi(Resource): @console_ns.route("/apps//workflows/draft/loop/nodes//run") class WorkflowDraftRunLoopNodeApi(Resource): - @api.doc("run_workflow_draft_loop_node") - @api.doc(description="Run draft workflow loop node") - @api.doc(params={"app_id": "Application ID", "node_id": "Node ID"}) - @api.expect( - api.model( + @console_ns.doc("run_workflow_draft_loop_node") + @console_ns.doc(description="Run draft workflow loop node") + @console_ns.doc(params={"app_id": "Application ID", "node_id": "Node ID"}) + @console_ns.expect( + console_ns.model( "WorkflowLoopNodeRunRequest", { "task_id": fields.String(required=True, description="Task ID"), @@ -415,9 +415,9 @@ class WorkflowDraftRunLoopNodeApi(Resource): }, ) ) - @api.response(200, "Workflow loop node run started successfully") - @api.response(403, "Permission denied") - @api.response(404, "Node not found") + @console_ns.response(200, "Workflow loop node run started successfully") + @console_ns.response(403, "Permission denied") + @console_ns.response(404, "Node not found") @setup_required @login_required @account_initialization_required @@ -450,11 +450,11 @@ class WorkflowDraftRunLoopNodeApi(Resource): @console_ns.route("/apps//workflows/draft/run") class DraftWorkflowRunApi(Resource): - @api.doc("run_draft_workflow") - @api.doc(description="Run draft workflow") - @api.doc(params={"app_id": "Application ID"}) - @api.expect( - api.model( + @console_ns.doc("run_draft_workflow") + @console_ns.doc(description="Run draft workflow") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.expect( + console_ns.model( "DraftWorkflowRunRequest", { "inputs": fields.Raw(required=True, description="Input variables"), @@ -462,8 +462,8 @@ class DraftWorkflowRunApi(Resource): }, ) ) - @api.response(200, "Draft workflow run started successfully") - @api.response(403, "Permission denied") + @console_ns.response(200, "Draft workflow run started successfully") + @console_ns.response(403, "Permission denied") @setup_required @login_required @account_initialization_required @@ -501,12 +501,12 @@ class DraftWorkflowRunApi(Resource): @console_ns.route("/apps//workflow-runs/tasks//stop") class WorkflowTaskStopApi(Resource): - @api.doc("stop_workflow_task") - @api.doc(description="Stop running workflow task") - @api.doc(params={"app_id": "Application ID", "task_id": "Task ID"}) - @api.response(200, "Task stopped successfully") - @api.response(404, "Task not found") - @api.response(403, "Permission denied") + @console_ns.doc("stop_workflow_task") + @console_ns.doc(description="Stop running workflow task") + @console_ns.doc(params={"app_id": "Application ID", "task_id": "Task ID"}) + @console_ns.response(200, "Task stopped successfully") + @console_ns.response(404, "Task not found") + @console_ns.response(403, "Permission denied") @setup_required @login_required @account_initialization_required @@ -528,20 +528,20 @@ class WorkflowTaskStopApi(Resource): @console_ns.route("/apps//workflows/draft/nodes//run") class DraftWorkflowNodeRunApi(Resource): - @api.doc("run_draft_workflow_node") - @api.doc(description="Run draft workflow node") - @api.doc(params={"app_id": "Application ID", "node_id": "Node ID"}) - @api.expect( - api.model( + @console_ns.doc("run_draft_workflow_node") + @console_ns.doc(description="Run draft workflow node") + @console_ns.doc(params={"app_id": "Application ID", "node_id": "Node ID"}) + @console_ns.expect( + console_ns.model( "DraftWorkflowNodeRunRequest", { "inputs": fields.Raw(description="Input variables"), }, ) ) - @api.response(200, "Node run started successfully", workflow_run_node_execution_fields) - @api.response(403, "Permission denied") - @api.response(404, "Node not found") + @console_ns.response(200, "Node run started successfully", workflow_run_node_execution_fields) + @console_ns.response(403, "Permission denied") + @console_ns.response(404, "Node not found") @setup_required @login_required @account_initialization_required @@ -595,11 +595,11 @@ parser_publish = ( @console_ns.route("/apps//workflows/publish") class PublishedWorkflowApi(Resource): - @api.doc("get_published_workflow") - @api.doc(description="Get published workflow for an application") - @api.doc(params={"app_id": "Application ID"}) - @api.response(200, "Published workflow retrieved successfully", workflow_fields) - @api.response(404, "Published workflow not found") + @console_ns.doc("get_published_workflow") + @console_ns.doc(description="Get published workflow for an application") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.response(200, "Published workflow retrieved successfully", workflow_fields) + @console_ns.response(404, "Published workflow not found") @setup_required @login_required @account_initialization_required @@ -617,7 +617,7 @@ class PublishedWorkflowApi(Resource): # return workflow, if not found, return None return workflow - @api.expect(parser_publish) + @console_ns.expect(parser_publish) @setup_required @login_required @account_initialization_required @@ -666,10 +666,10 @@ class PublishedWorkflowApi(Resource): @console_ns.route("/apps//workflows/default-workflow-block-configs") class DefaultBlockConfigsApi(Resource): - @api.doc("get_default_block_configs") - @api.doc(description="Get default block configurations for workflow") - @api.doc(params={"app_id": "Application ID"}) - @api.response(200, "Default block configurations retrieved successfully") + @console_ns.doc("get_default_block_configs") + @console_ns.doc(description="Get default block configurations for workflow") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.response(200, "Default block configurations retrieved successfully") @setup_required @login_required @account_initialization_required @@ -689,12 +689,12 @@ parser_block = reqparse.RequestParser().add_argument("q", type=str, location="ar @console_ns.route("/apps//workflows/default-workflow-block-configs/") class DefaultBlockConfigApi(Resource): - @api.doc("get_default_block_config") - @api.doc(description="Get default block configuration by type") - @api.doc(params={"app_id": "Application ID", "block_type": "Block type"}) - @api.response(200, "Default block configuration retrieved successfully") - @api.response(404, "Block type not found") - @api.expect(parser_block) + @console_ns.doc("get_default_block_config") + @console_ns.doc(description="Get default block configuration by type") + @console_ns.doc(params={"app_id": "Application ID", "block_type": "Block type"}) + @console_ns.response(200, "Default block configuration retrieved successfully") + @console_ns.response(404, "Block type not found") + @console_ns.expect(parser_block) @setup_required @login_required @account_initialization_required @@ -731,13 +731,13 @@ parser_convert = ( @console_ns.route("/apps//convert-to-workflow") class ConvertToWorkflowApi(Resource): - @api.expect(parser_convert) - @api.doc("convert_to_workflow") - @api.doc(description="Convert application to workflow mode") - @api.doc(params={"app_id": "Application ID"}) - @api.response(200, "Application converted to workflow successfully") - @api.response(400, "Application cannot be converted") - @api.response(403, "Permission denied") + @console_ns.expect(parser_convert) + @console_ns.doc("convert_to_workflow") + @console_ns.doc(description="Convert application to workflow mode") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.response(200, "Application converted to workflow successfully") + @console_ns.response(400, "Application cannot be converted") + @console_ns.response(403, "Permission denied") @setup_required @login_required @account_initialization_required @@ -777,11 +777,11 @@ parser_workflows = ( @console_ns.route("/apps//workflows") class PublishedAllWorkflowApi(Resource): - @api.expect(parser_workflows) - @api.doc("get_all_published_workflows") - @api.doc(description="Get all published workflows for an application") - @api.doc(params={"app_id": "Application ID"}) - @api.response(200, "Published workflows retrieved successfully", workflow_pagination_fields) + @console_ns.expect(parser_workflows) + @console_ns.doc("get_all_published_workflows") + @console_ns.doc(description="Get all published workflows for an application") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.response(200, "Published workflows retrieved successfully", workflow_pagination_fields) @setup_required @login_required @account_initialization_required @@ -826,11 +826,11 @@ class PublishedAllWorkflowApi(Resource): @console_ns.route("/apps//workflows/") class WorkflowByIdApi(Resource): - @api.doc("update_workflow_by_id") - @api.doc(description="Update workflow by ID") - @api.doc(params={"app_id": "Application ID", "workflow_id": "Workflow ID"}) - @api.expect( - api.model( + @console_ns.doc("update_workflow_by_id") + @console_ns.doc(description="Update workflow by ID") + @console_ns.doc(params={"app_id": "Application ID", "workflow_id": "Workflow ID"}) + @console_ns.expect( + console_ns.model( "UpdateWorkflowRequest", { "environment_variables": fields.List(fields.Raw, description="Environment variables"), @@ -838,9 +838,9 @@ class WorkflowByIdApi(Resource): }, ) ) - @api.response(200, "Workflow updated successfully", workflow_fields) - @api.response(404, "Workflow not found") - @api.response(403, "Permission denied") + @console_ns.response(200, "Workflow updated successfully", workflow_fields) + @console_ns.response(404, "Workflow not found") + @console_ns.response(403, "Permission denied") @setup_required @login_required @account_initialization_required @@ -926,12 +926,12 @@ class WorkflowByIdApi(Resource): @console_ns.route("/apps//workflows/draft/nodes//last-run") class DraftWorkflowNodeLastRunApi(Resource): - @api.doc("get_draft_workflow_node_last_run") - @api.doc(description="Get last run result for draft workflow node") - @api.doc(params={"app_id": "Application ID", "node_id": "Node ID"}) - @api.response(200, "Node last run retrieved successfully", workflow_run_node_execution_fields) - @api.response(404, "Node last run not found") - @api.response(403, "Permission denied") + @console_ns.doc("get_draft_workflow_node_last_run") + @console_ns.doc(description="Get last run result for draft workflow node") + @console_ns.doc(params={"app_id": "Application ID", "node_id": "Node ID"}) + @console_ns.response(200, "Node last run retrieved successfully", workflow_run_node_execution_fields) + @console_ns.response(404, "Node last run not found") + @console_ns.response(403, "Permission denied") @setup_required @login_required @account_initialization_required @@ -959,20 +959,20 @@ class DraftWorkflowTriggerRunApi(Resource): Path: /apps//workflows/draft/trigger/run """ - @api.doc("poll_draft_workflow_trigger_run") - @api.doc(description="Poll for trigger events and execute full workflow when event arrives") - @api.doc(params={"app_id": "Application ID"}) - @api.expect( - api.model( + @console_ns.doc("poll_draft_workflow_trigger_run") + @console_ns.doc(description="Poll for trigger events and execute full workflow when event arrives") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.expect( + console_ns.model( "DraftWorkflowTriggerRunRequest", { "node_id": fields.String(required=True, description="Node ID"), }, ) ) - @api.response(200, "Trigger event received and workflow executed successfully") - @api.response(403, "Permission denied") - @api.response(500, "Internal server error") + @console_ns.response(200, "Trigger event received and workflow executed successfully") + @console_ns.response(403, "Permission denied") + @console_ns.response(500, "Internal server error") @setup_required @login_required @account_initialization_required @@ -1033,12 +1033,12 @@ class DraftWorkflowTriggerNodeApi(Resource): Path: /apps//workflows/draft/nodes//trigger/run """ - @api.doc("poll_draft_workflow_trigger_node") - @api.doc(description="Poll for trigger events and execute single node when event arrives") - @api.doc(params={"app_id": "Application ID", "node_id": "Node ID"}) - @api.response(200, "Trigger event received and node executed successfully") - @api.response(403, "Permission denied") - @api.response(500, "Internal server error") + @console_ns.doc("poll_draft_workflow_trigger_node") + @console_ns.doc(description="Poll for trigger events and execute single node when event arrives") + @console_ns.doc(params={"app_id": "Application ID", "node_id": "Node ID"}) + @console_ns.response(200, "Trigger event received and node executed successfully") + @console_ns.response(403, "Permission denied") + @console_ns.response(500, "Internal server error") @setup_required @login_required @account_initialization_required @@ -1112,20 +1112,20 @@ class DraftWorkflowTriggerRunAllApi(Resource): Path: /apps//workflows/draft/trigger/run-all """ - @api.doc("draft_workflow_trigger_run_all") - @api.doc(description="Full workflow debug when the start node is a trigger") - @api.doc(params={"app_id": "Application ID"}) - @api.expect( - api.model( + @console_ns.doc("draft_workflow_trigger_run_all") + @console_ns.doc(description="Full workflow debug when the start node is a trigger") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.expect( + console_ns.model( "DraftWorkflowTriggerRunAllRequest", { "node_ids": fields.List(fields.String, required=True, description="Node IDs"), }, ) ) - @api.response(200, "Workflow executed successfully") - @api.response(403, "Permission denied") - @api.response(500, "Internal server error") + @console_ns.response(200, "Workflow executed successfully") + @console_ns.response(403, "Permission denied") + @console_ns.response(500, "Internal server error") @setup_required @login_required @account_initialization_required diff --git a/api/controllers/console/app/workflow_app_log.py b/api/controllers/console/app/workflow_app_log.py index d7ecc7c91b..fc1fa9cb13 100644 --- a/api/controllers/console/app/workflow_app_log.py +++ b/api/controllers/console/app/workflow_app_log.py @@ -3,7 +3,7 @@ from flask_restx import Resource, marshal_with, reqparse from flask_restx.inputs import int_range from sqlalchemy.orm import Session -from controllers.console import api, console_ns +from controllers.console import console_ns from controllers.console.app.wraps import get_app_model from controllers.console.wraps import account_initialization_required, setup_required from core.workflow.enums import WorkflowExecutionStatus @@ -17,10 +17,10 @@ from services.workflow_app_service import WorkflowAppService @console_ns.route("/apps//workflow-app-logs") class WorkflowAppLogApi(Resource): - @api.doc("get_workflow_app_logs") - @api.doc(description="Get workflow application execution logs") - @api.doc(params={"app_id": "Application ID"}) - @api.doc( + @console_ns.doc("get_workflow_app_logs") + @console_ns.doc(description="Get workflow application execution logs") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.doc( params={ "keyword": "Search keyword for filtering logs", "status": "Filter by execution status (succeeded, failed, stopped, partial-succeeded)", @@ -33,7 +33,7 @@ class WorkflowAppLogApi(Resource): "limit": "Number of items per page (1-100)", } ) - @api.response(200, "Workflow app logs retrieved successfully", workflow_app_log_pagination_fields) + @console_ns.response(200, "Workflow app logs retrieved successfully", workflow_app_log_pagination_fields) @setup_required @login_required @account_initialization_required diff --git a/api/controllers/console/app/workflow_draft_variable.py b/api/controllers/console/app/workflow_draft_variable.py index ca97d8520c..007061ae7a 100644 --- a/api/controllers/console/app/workflow_draft_variable.py +++ b/api/controllers/console/app/workflow_draft_variable.py @@ -7,7 +7,7 @@ from flask import Response from flask_restx import Resource, fields, inputs, marshal, marshal_with, reqparse from sqlalchemy.orm import Session -from controllers.console import api, console_ns +from controllers.console import console_ns from controllers.console.app.error import ( DraftWorkflowNotExist, ) @@ -170,12 +170,14 @@ def _api_prerequisite(f: Callable[P, R]): @console_ns.route("/apps//workflows/draft/variables") class WorkflowVariableCollectionApi(Resource): - @api.expect(_create_pagination_parser()) - @api.doc("get_workflow_variables") - @api.doc(description="Get draft workflow variables") - @api.doc(params={"app_id": "Application ID"}) - @api.doc(params={"page": "Page number (1-100000)", "limit": "Number of items per page (1-100)"}) - @api.response(200, "Workflow variables retrieved successfully", _WORKFLOW_DRAFT_VARIABLE_LIST_WITHOUT_VALUE_FIELDS) + @console_ns.expect(_create_pagination_parser()) + @console_ns.doc("get_workflow_variables") + @console_ns.doc(description="Get draft workflow variables") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.doc(params={"page": "Page number (1-100000)", "limit": "Number of items per page (1-100)"}) + @console_ns.response( + 200, "Workflow variables retrieved successfully", _WORKFLOW_DRAFT_VARIABLE_LIST_WITHOUT_VALUE_FIELDS + ) @_api_prerequisite @marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_WITHOUT_VALUE_FIELDS) def get(self, app_model: App): @@ -204,9 +206,9 @@ class WorkflowVariableCollectionApi(Resource): return workflow_vars - @api.doc("delete_workflow_variables") - @api.doc(description="Delete all draft workflow variables") - @api.response(204, "Workflow variables deleted successfully") + @console_ns.doc("delete_workflow_variables") + @console_ns.doc(description="Delete all draft workflow variables") + @console_ns.response(204, "Workflow variables deleted successfully") @_api_prerequisite def delete(self, app_model: App): draft_var_srv = WorkflowDraftVariableService( @@ -237,10 +239,10 @@ def validate_node_id(node_id: str) -> NoReturn | None: @console_ns.route("/apps//workflows/draft/nodes//variables") class NodeVariableCollectionApi(Resource): - @api.doc("get_node_variables") - @api.doc(description="Get variables for a specific node") - @api.doc(params={"app_id": "Application ID", "node_id": "Node ID"}) - @api.response(200, "Node variables retrieved successfully", _WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS) + @console_ns.doc("get_node_variables") + @console_ns.doc(description="Get variables for a specific node") + @console_ns.doc(params={"app_id": "Application ID", "node_id": "Node ID"}) + @console_ns.response(200, "Node variables retrieved successfully", _WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS) @_api_prerequisite @marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS) def get(self, app_model: App, node_id: str): @@ -253,9 +255,9 @@ class NodeVariableCollectionApi(Resource): return node_vars - @api.doc("delete_node_variables") - @api.doc(description="Delete all variables for a specific node") - @api.response(204, "Node variables deleted successfully") + @console_ns.doc("delete_node_variables") + @console_ns.doc(description="Delete all variables for a specific node") + @console_ns.response(204, "Node variables deleted successfully") @_api_prerequisite def delete(self, app_model: App, node_id: str): validate_node_id(node_id) @@ -270,11 +272,11 @@ class VariableApi(Resource): _PATCH_NAME_FIELD = "name" _PATCH_VALUE_FIELD = "value" - @api.doc("get_variable") - @api.doc(description="Get a specific workflow variable") - @api.doc(params={"app_id": "Application ID", "variable_id": "Variable ID"}) - @api.response(200, "Variable retrieved successfully", _WORKFLOW_DRAFT_VARIABLE_FIELDS) - @api.response(404, "Variable not found") + @console_ns.doc("get_variable") + @console_ns.doc(description="Get a specific workflow variable") + @console_ns.doc(params={"app_id": "Application ID", "variable_id": "Variable ID"}) + @console_ns.response(200, "Variable retrieved successfully", _WORKFLOW_DRAFT_VARIABLE_FIELDS) + @console_ns.response(404, "Variable not found") @_api_prerequisite @marshal_with(_WORKFLOW_DRAFT_VARIABLE_FIELDS) def get(self, app_model: App, variable_id: str): @@ -288,10 +290,10 @@ class VariableApi(Resource): raise NotFoundError(description=f"variable not found, id={variable_id}") return variable - @api.doc("update_variable") - @api.doc(description="Update a workflow variable") - @api.expect( - api.model( + @console_ns.doc("update_variable") + @console_ns.doc(description="Update a workflow variable") + @console_ns.expect( + console_ns.model( "UpdateVariableRequest", { "name": fields.String(description="Variable name"), @@ -299,8 +301,8 @@ class VariableApi(Resource): }, ) ) - @api.response(200, "Variable updated successfully", _WORKFLOW_DRAFT_VARIABLE_FIELDS) - @api.response(404, "Variable not found") + @console_ns.response(200, "Variable updated successfully", _WORKFLOW_DRAFT_VARIABLE_FIELDS) + @console_ns.response(404, "Variable not found") @_api_prerequisite @marshal_with(_WORKFLOW_DRAFT_VARIABLE_FIELDS) def patch(self, app_model: App, variable_id: str): @@ -364,10 +366,10 @@ class VariableApi(Resource): db.session.commit() return variable - @api.doc("delete_variable") - @api.doc(description="Delete a workflow variable") - @api.response(204, "Variable deleted successfully") - @api.response(404, "Variable not found") + @console_ns.doc("delete_variable") + @console_ns.doc(description="Delete a workflow variable") + @console_ns.response(204, "Variable deleted successfully") + @console_ns.response(404, "Variable not found") @_api_prerequisite def delete(self, app_model: App, variable_id: str): draft_var_srv = WorkflowDraftVariableService( @@ -385,12 +387,12 @@ class VariableApi(Resource): @console_ns.route("/apps//workflows/draft/variables//reset") class VariableResetApi(Resource): - @api.doc("reset_variable") - @api.doc(description="Reset a workflow variable to its default value") - @api.doc(params={"app_id": "Application ID", "variable_id": "Variable ID"}) - @api.response(200, "Variable reset successfully", _WORKFLOW_DRAFT_VARIABLE_FIELDS) - @api.response(204, "Variable reset (no content)") - @api.response(404, "Variable not found") + @console_ns.doc("reset_variable") + @console_ns.doc(description="Reset a workflow variable to its default value") + @console_ns.doc(params={"app_id": "Application ID", "variable_id": "Variable ID"}) + @console_ns.response(200, "Variable reset successfully", _WORKFLOW_DRAFT_VARIABLE_FIELDS) + @console_ns.response(204, "Variable reset (no content)") + @console_ns.response(404, "Variable not found") @_api_prerequisite def put(self, app_model: App, variable_id: str): draft_var_srv = WorkflowDraftVariableService( @@ -433,11 +435,11 @@ def _get_variable_list(app_model: App, node_id) -> WorkflowDraftVariableList: @console_ns.route("/apps//workflows/draft/conversation-variables") class ConversationVariableCollectionApi(Resource): - @api.doc("get_conversation_variables") - @api.doc(description="Get conversation variables for workflow") - @api.doc(params={"app_id": "Application ID"}) - @api.response(200, "Conversation variables retrieved successfully", _WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS) - @api.response(404, "Draft workflow not found") + @console_ns.doc("get_conversation_variables") + @console_ns.doc(description="Get conversation variables for workflow") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.response(200, "Conversation variables retrieved successfully", _WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS) + @console_ns.response(404, "Draft workflow not found") @_api_prerequisite @marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS) def get(self, app_model: App): @@ -455,10 +457,10 @@ class ConversationVariableCollectionApi(Resource): @console_ns.route("/apps//workflows/draft/system-variables") class SystemVariableCollectionApi(Resource): - @api.doc("get_system_variables") - @api.doc(description="Get system variables for workflow") - @api.doc(params={"app_id": "Application ID"}) - @api.response(200, "System variables retrieved successfully", _WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS) + @console_ns.doc("get_system_variables") + @console_ns.doc(description="Get system variables for workflow") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.response(200, "System variables retrieved successfully", _WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS) @_api_prerequisite @marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS) def get(self, app_model: App): @@ -467,11 +469,11 @@ class SystemVariableCollectionApi(Resource): @console_ns.route("/apps//workflows/draft/environment-variables") class EnvironmentVariableCollectionApi(Resource): - @api.doc("get_environment_variables") - @api.doc(description="Get environment variables for workflow") - @api.doc(params={"app_id": "Application ID"}) - @api.response(200, "Environment variables retrieved successfully") - @api.response(404, "Draft workflow not found") + @console_ns.doc("get_environment_variables") + @console_ns.doc(description="Get environment variables for workflow") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.response(200, "Environment variables retrieved successfully") + @console_ns.response(404, "Draft workflow not found") @_api_prerequisite def get(self, app_model: App): """ diff --git a/api/controllers/console/app/workflow_run.py b/api/controllers/console/app/workflow_run.py index 23c228efbe..51f7445ce0 100644 --- a/api/controllers/console/app/workflow_run.py +++ b/api/controllers/console/app/workflow_run.py @@ -3,7 +3,7 @@ from typing import cast from flask_restx import Resource, marshal_with, reqparse from flask_restx.inputs import int_range -from controllers.console import api, console_ns +from controllers.console import console_ns from controllers.console.app.wraps import get_app_model from controllers.console.wraps import account_initialization_required, setup_required from fields.workflow_run_fields import ( @@ -90,13 +90,17 @@ def _parse_workflow_run_count_args(): @console_ns.route("/apps//advanced-chat/workflow-runs") class AdvancedChatAppWorkflowRunListApi(Resource): - @api.doc("get_advanced_chat_workflow_runs") - @api.doc(description="Get advanced chat workflow run list") - @api.doc(params={"app_id": "Application ID"}) - @api.doc(params={"last_id": "Last run ID for pagination", "limit": "Number of items per page (1-100)"}) - @api.doc(params={"status": "Filter by status (optional): running, succeeded, failed, stopped, partial-succeeded"}) - @api.doc(params={"triggered_from": "Filter by trigger source (optional): debugging or app-run. Default: debugging"}) - @api.response(200, "Workflow runs retrieved successfully", advanced_chat_workflow_run_pagination_fields) + @console_ns.doc("get_advanced_chat_workflow_runs") + @console_ns.doc(description="Get advanced chat workflow run list") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.doc(params={"last_id": "Last run ID for pagination", "limit": "Number of items per page (1-100)"}) + @console_ns.doc( + params={"status": "Filter by status (optional): running, succeeded, failed, stopped, partial-succeeded"} + ) + @console_ns.doc( + params={"triggered_from": "Filter by trigger source (optional): debugging or app-run. Default: debugging"} + ) + @console_ns.response(200, "Workflow runs retrieved successfully", advanced_chat_workflow_run_pagination_fields) @setup_required @login_required @account_initialization_required @@ -125,11 +129,13 @@ class AdvancedChatAppWorkflowRunListApi(Resource): @console_ns.route("/apps//advanced-chat/workflow-runs/count") class AdvancedChatAppWorkflowRunCountApi(Resource): - @api.doc("get_advanced_chat_workflow_runs_count") - @api.doc(description="Get advanced chat workflow runs count statistics") - @api.doc(params={"app_id": "Application ID"}) - @api.doc(params={"status": "Filter by status (optional): running, succeeded, failed, stopped, partial-succeeded"}) - @api.doc( + @console_ns.doc("get_advanced_chat_workflow_runs_count") + @console_ns.doc(description="Get advanced chat workflow runs count statistics") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.doc( + params={"status": "Filter by status (optional): running, succeeded, failed, stopped, partial-succeeded"} + ) + @console_ns.doc( params={ "time_range": ( "Filter by time range (optional): e.g., 7d (7 days), 4h (4 hours), " @@ -137,8 +143,10 @@ class AdvancedChatAppWorkflowRunCountApi(Resource): ) } ) - @api.doc(params={"triggered_from": "Filter by trigger source (optional): debugging or app-run. Default: debugging"}) - @api.response(200, "Workflow runs count retrieved successfully", workflow_run_count_fields) + @console_ns.doc( + params={"triggered_from": "Filter by trigger source (optional): debugging or app-run. Default: debugging"} + ) + @console_ns.response(200, "Workflow runs count retrieved successfully", workflow_run_count_fields) @setup_required @login_required @account_initialization_required @@ -170,13 +178,17 @@ class AdvancedChatAppWorkflowRunCountApi(Resource): @console_ns.route("/apps//workflow-runs") class WorkflowRunListApi(Resource): - @api.doc("get_workflow_runs") - @api.doc(description="Get workflow run list") - @api.doc(params={"app_id": "Application ID"}) - @api.doc(params={"last_id": "Last run ID for pagination", "limit": "Number of items per page (1-100)"}) - @api.doc(params={"status": "Filter by status (optional): running, succeeded, failed, stopped, partial-succeeded"}) - @api.doc(params={"triggered_from": "Filter by trigger source (optional): debugging or app-run. Default: debugging"}) - @api.response(200, "Workflow runs retrieved successfully", workflow_run_pagination_fields) + @console_ns.doc("get_workflow_runs") + @console_ns.doc(description="Get workflow run list") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.doc(params={"last_id": "Last run ID for pagination", "limit": "Number of items per page (1-100)"}) + @console_ns.doc( + params={"status": "Filter by status (optional): running, succeeded, failed, stopped, partial-succeeded"} + ) + @console_ns.doc( + params={"triggered_from": "Filter by trigger source (optional): debugging or app-run. Default: debugging"} + ) + @console_ns.response(200, "Workflow runs retrieved successfully", workflow_run_pagination_fields) @setup_required @login_required @account_initialization_required @@ -205,11 +217,13 @@ class WorkflowRunListApi(Resource): @console_ns.route("/apps//workflow-runs/count") class WorkflowRunCountApi(Resource): - @api.doc("get_workflow_runs_count") - @api.doc(description="Get workflow runs count statistics") - @api.doc(params={"app_id": "Application ID"}) - @api.doc(params={"status": "Filter by status (optional): running, succeeded, failed, stopped, partial-succeeded"}) - @api.doc( + @console_ns.doc("get_workflow_runs_count") + @console_ns.doc(description="Get workflow runs count statistics") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.doc( + params={"status": "Filter by status (optional): running, succeeded, failed, stopped, partial-succeeded"} + ) + @console_ns.doc( params={ "time_range": ( "Filter by time range (optional): e.g., 7d (7 days), 4h (4 hours), " @@ -217,8 +231,10 @@ class WorkflowRunCountApi(Resource): ) } ) - @api.doc(params={"triggered_from": "Filter by trigger source (optional): debugging or app-run. Default: debugging"}) - @api.response(200, "Workflow runs count retrieved successfully", workflow_run_count_fields) + @console_ns.doc( + params={"triggered_from": "Filter by trigger source (optional): debugging or app-run. Default: debugging"} + ) + @console_ns.response(200, "Workflow runs count retrieved successfully", workflow_run_count_fields) @setup_required @login_required @account_initialization_required @@ -250,11 +266,11 @@ class WorkflowRunCountApi(Resource): @console_ns.route("/apps//workflow-runs/") class WorkflowRunDetailApi(Resource): - @api.doc("get_workflow_run_detail") - @api.doc(description="Get workflow run detail") - @api.doc(params={"app_id": "Application ID", "run_id": "Workflow run ID"}) - @api.response(200, "Workflow run detail retrieved successfully", workflow_run_detail_fields) - @api.response(404, "Workflow run not found") + @console_ns.doc("get_workflow_run_detail") + @console_ns.doc(description="Get workflow run detail") + @console_ns.doc(params={"app_id": "Application ID", "run_id": "Workflow run ID"}) + @console_ns.response(200, "Workflow run detail retrieved successfully", workflow_run_detail_fields) + @console_ns.response(404, "Workflow run not found") @setup_required @login_required @account_initialization_required @@ -274,11 +290,11 @@ class WorkflowRunDetailApi(Resource): @console_ns.route("/apps//workflow-runs//node-executions") class WorkflowRunNodeExecutionListApi(Resource): - @api.doc("get_workflow_run_node_executions") - @api.doc(description="Get workflow run node execution list") - @api.doc(params={"app_id": "Application ID", "run_id": "Workflow run ID"}) - @api.response(200, "Node executions retrieved successfully", workflow_run_node_execution_list_fields) - @api.response(404, "Workflow run not found") + @console_ns.doc("get_workflow_run_node_executions") + @console_ns.doc(description="Get workflow run node execution list") + @console_ns.doc(params={"app_id": "Application ID", "run_id": "Workflow run ID"}) + @console_ns.response(200, "Node executions retrieved successfully", workflow_run_node_execution_list_fields) + @console_ns.response(404, "Workflow run not found") @setup_required @login_required @account_initialization_required diff --git a/api/controllers/console/app/workflow_statistic.py b/api/controllers/console/app/workflow_statistic.py index ef5205c1ee..4a873e5ec1 100644 --- a/api/controllers/console/app/workflow_statistic.py +++ b/api/controllers/console/app/workflow_statistic.py @@ -2,7 +2,7 @@ from flask import abort, jsonify from flask_restx import Resource, reqparse from sqlalchemy.orm import sessionmaker -from controllers.console import api, console_ns +from controllers.console import console_ns from controllers.console.app.wraps import get_app_model from controllers.console.wraps import account_initialization_required, setup_required from extensions.ext_database import db @@ -21,11 +21,13 @@ class WorkflowDailyRunsStatistic(Resource): session_maker = sessionmaker(bind=db.engine, expire_on_commit=False) self._workflow_run_repo = DifyAPIRepositoryFactory.create_api_workflow_run_repository(session_maker) - @api.doc("get_workflow_daily_runs_statistic") - @api.doc(description="Get workflow daily runs statistics") - @api.doc(params={"app_id": "Application ID"}) - @api.doc(params={"start": "Start date and time (YYYY-MM-DD HH:MM)", "end": "End date and time (YYYY-MM-DD HH:MM)"}) - @api.response(200, "Daily runs statistics retrieved successfully") + @console_ns.doc("get_workflow_daily_runs_statistic") + @console_ns.doc(description="Get workflow daily runs statistics") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.doc( + params={"start": "Start date and time (YYYY-MM-DD HH:MM)", "end": "End date and time (YYYY-MM-DD HH:MM)"} + ) + @console_ns.response(200, "Daily runs statistics retrieved successfully") @get_app_model @setup_required @login_required @@ -66,11 +68,13 @@ class WorkflowDailyTerminalsStatistic(Resource): session_maker = sessionmaker(bind=db.engine, expire_on_commit=False) self._workflow_run_repo = DifyAPIRepositoryFactory.create_api_workflow_run_repository(session_maker) - @api.doc("get_workflow_daily_terminals_statistic") - @api.doc(description="Get workflow daily terminals statistics") - @api.doc(params={"app_id": "Application ID"}) - @api.doc(params={"start": "Start date and time (YYYY-MM-DD HH:MM)", "end": "End date and time (YYYY-MM-DD HH:MM)"}) - @api.response(200, "Daily terminals statistics retrieved successfully") + @console_ns.doc("get_workflow_daily_terminals_statistic") + @console_ns.doc(description="Get workflow daily terminals statistics") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.doc( + params={"start": "Start date and time (YYYY-MM-DD HH:MM)", "end": "End date and time (YYYY-MM-DD HH:MM)"} + ) + @console_ns.response(200, "Daily terminals statistics retrieved successfully") @get_app_model @setup_required @login_required @@ -111,11 +115,13 @@ class WorkflowDailyTokenCostStatistic(Resource): session_maker = sessionmaker(bind=db.engine, expire_on_commit=False) self._workflow_run_repo = DifyAPIRepositoryFactory.create_api_workflow_run_repository(session_maker) - @api.doc("get_workflow_daily_token_cost_statistic") - @api.doc(description="Get workflow daily token cost statistics") - @api.doc(params={"app_id": "Application ID"}) - @api.doc(params={"start": "Start date and time (YYYY-MM-DD HH:MM)", "end": "End date and time (YYYY-MM-DD HH:MM)"}) - @api.response(200, "Daily token cost statistics retrieved successfully") + @console_ns.doc("get_workflow_daily_token_cost_statistic") + @console_ns.doc(description="Get workflow daily token cost statistics") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.doc( + params={"start": "Start date and time (YYYY-MM-DD HH:MM)", "end": "End date and time (YYYY-MM-DD HH:MM)"} + ) + @console_ns.response(200, "Daily token cost statistics retrieved successfully") @get_app_model @setup_required @login_required @@ -156,11 +162,13 @@ class WorkflowAverageAppInteractionStatistic(Resource): session_maker = sessionmaker(bind=db.engine, expire_on_commit=False) self._workflow_run_repo = DifyAPIRepositoryFactory.create_api_workflow_run_repository(session_maker) - @api.doc("get_workflow_average_app_interaction_statistic") - @api.doc(description="Get workflow average app interaction statistics") - @api.doc(params={"app_id": "Application ID"}) - @api.doc(params={"start": "Start date and time (YYYY-MM-DD HH:MM)", "end": "End date and time (YYYY-MM-DD HH:MM)"}) - @api.response(200, "Average app interaction statistics retrieved successfully") + @console_ns.doc("get_workflow_average_app_interaction_statistic") + @console_ns.doc(description="Get workflow average app interaction statistics") + @console_ns.doc(params={"app_id": "Application ID"}) + @console_ns.doc( + params={"start": "Start date and time (YYYY-MM-DD HH:MM)", "end": "End date and time (YYYY-MM-DD HH:MM)"} + ) + @console_ns.response(200, "Average app interaction statistics retrieved successfully") @setup_required @login_required @account_initialization_required diff --git a/api/controllers/console/app/workflow_trigger.py b/api/controllers/console/app/workflow_trigger.py index 785813c5f0..c3ea60ae3a 100644 --- a/api/controllers/console/app/workflow_trigger.py +++ b/api/controllers/console/app/workflow_trigger.py @@ -6,7 +6,7 @@ from sqlalchemy.orm import Session from werkzeug.exceptions import NotFound from configs import dify_config -from controllers.console import api +from controllers.console import console_ns from controllers.console.app.wraps import get_app_model from controllers.console.wraps import account_initialization_required, edit_permission_required, setup_required from extensions.ext_database import db @@ -139,6 +139,6 @@ class AppTriggerEnableApi(Resource): return trigger -api.add_resource(WebhookTriggerApi, "/apps//workflows/triggers/webhook") -api.add_resource(AppTriggersApi, "/apps//triggers") -api.add_resource(AppTriggerEnableApi, "/apps//trigger-enable") +console_ns.add_resource(WebhookTriggerApi, "/apps//workflows/triggers/webhook") +console_ns.add_resource(AppTriggersApi, "/apps//triggers") +console_ns.add_resource(AppTriggerEnableApi, "/apps//trigger-enable") diff --git a/api/controllers/console/auth/activate.py b/api/controllers/console/auth/activate.py index 2eeef079a1..a11b741040 100644 --- a/api/controllers/console/auth/activate.py +++ b/api/controllers/console/auth/activate.py @@ -2,7 +2,7 @@ from flask import request from flask_restx import Resource, fields, reqparse from constants.languages import supported_language -from controllers.console import api, console_ns +from controllers.console import console_ns from controllers.console.error import AlreadyActivateError from extensions.ext_database import db from libs.datetime_utils import naive_utc_now @@ -20,13 +20,13 @@ active_check_parser = ( @console_ns.route("/activate/check") class ActivateCheckApi(Resource): - @api.doc("check_activation_token") - @api.doc(description="Check if activation token is valid") - @api.expect(active_check_parser) - @api.response( + @console_ns.doc("check_activation_token") + @console_ns.doc(description="Check if activation token is valid") + @console_ns.expect(active_check_parser) + @console_ns.response( 200, "Success", - api.model( + console_ns.model( "ActivationCheckResponse", { "is_valid": fields.Boolean(description="Whether token is valid"), @@ -69,13 +69,13 @@ active_parser = ( @console_ns.route("/activate") class ActivateApi(Resource): - @api.doc("activate_account") - @api.doc(description="Activate account with invitation token") - @api.expect(active_parser) - @api.response( + @console_ns.doc("activate_account") + @console_ns.doc(description="Activate account with invitation token") + @console_ns.expect(active_parser) + @console_ns.response( 200, "Account activated successfully", - api.model( + console_ns.model( "ActivationResponse", { "result": fields.String(description="Operation result"), @@ -83,7 +83,7 @@ class ActivateApi(Resource): }, ), ) - @api.response(400, "Already activated or invalid token") + @console_ns.response(400, "Already activated or invalid token") def post(self): args = active_parser.parse_args() diff --git a/api/controllers/console/auth/data_source_oauth.py b/api/controllers/console/auth/data_source_oauth.py index a27932ccd8..cd547caf20 100644 --- a/api/controllers/console/auth/data_source_oauth.py +++ b/api/controllers/console/auth/data_source_oauth.py @@ -5,7 +5,7 @@ from flask import current_app, redirect, request from flask_restx import Resource, fields from configs import dify_config -from controllers.console import api, console_ns +from controllers.console import console_ns from controllers.console.wraps import is_admin_or_owner_required from libs.login import login_required from libs.oauth_data_source import NotionOAuth @@ -29,19 +29,19 @@ def get_oauth_providers(): @console_ns.route("/oauth/data-source/") class OAuthDataSource(Resource): - @api.doc("oauth_data_source") - @api.doc(description="Get OAuth authorization URL for data source provider") - @api.doc(params={"provider": "Data source provider name (notion)"}) - @api.response( + @console_ns.doc("oauth_data_source") + @console_ns.doc(description="Get OAuth authorization URL for data source provider") + @console_ns.doc(params={"provider": "Data source provider name (notion)"}) + @console_ns.response( 200, "Authorization URL or internal setup success", - api.model( + console_ns.model( "OAuthDataSourceResponse", {"data": fields.Raw(description="Authorization URL or 'internal' for internal setup")}, ), ) - @api.response(400, "Invalid provider") - @api.response(403, "Admin privileges required") + @console_ns.response(400, "Invalid provider") + @console_ns.response(403, "Admin privileges required") @is_admin_or_owner_required def get(self, provider: str): # The role of the current user in the table must be admin or owner @@ -63,17 +63,17 @@ class OAuthDataSource(Resource): @console_ns.route("/oauth/data-source/callback/") class OAuthDataSourceCallback(Resource): - @api.doc("oauth_data_source_callback") - @api.doc(description="Handle OAuth callback from data source provider") - @api.doc( + @console_ns.doc("oauth_data_source_callback") + @console_ns.doc(description="Handle OAuth callback from data source provider") + @console_ns.doc( params={ "provider": "Data source provider name (notion)", "code": "Authorization code from OAuth provider", "error": "Error message from OAuth provider", } ) - @api.response(302, "Redirect to console with result") - @api.response(400, "Invalid provider") + @console_ns.response(302, "Redirect to console with result") + @console_ns.response(400, "Invalid provider") def get(self, provider: str): OAUTH_DATASOURCE_PROVIDERS = get_oauth_providers() with current_app.app_context(): @@ -94,17 +94,17 @@ class OAuthDataSourceCallback(Resource): @console_ns.route("/oauth/data-source/binding/") class OAuthDataSourceBinding(Resource): - @api.doc("oauth_data_source_binding") - @api.doc(description="Bind OAuth data source with authorization code") - @api.doc( + @console_ns.doc("oauth_data_source_binding") + @console_ns.doc(description="Bind OAuth data source with authorization code") + @console_ns.doc( params={"provider": "Data source provider name (notion)", "code": "Authorization code from OAuth provider"} ) - @api.response( + @console_ns.response( 200, "Data source binding success", - api.model("OAuthDataSourceBindingResponse", {"result": fields.String(description="Operation result")}), + console_ns.model("OAuthDataSourceBindingResponse", {"result": fields.String(description="Operation result")}), ) - @api.response(400, "Invalid provider or code") + @console_ns.response(400, "Invalid provider or code") def get(self, provider: str): OAUTH_DATASOURCE_PROVIDERS = get_oauth_providers() with current_app.app_context(): @@ -128,15 +128,15 @@ class OAuthDataSourceBinding(Resource): @console_ns.route("/oauth/data-source///sync") class OAuthDataSourceSync(Resource): - @api.doc("oauth_data_source_sync") - @api.doc(description="Sync data from OAuth data source") - @api.doc(params={"provider": "Data source provider name (notion)", "binding_id": "Data source binding ID"}) - @api.response( + @console_ns.doc("oauth_data_source_sync") + @console_ns.doc(description="Sync data from OAuth data source") + @console_ns.doc(params={"provider": "Data source provider name (notion)", "binding_id": "Data source binding ID"}) + @console_ns.response( 200, "Data source sync success", - api.model("OAuthDataSourceSyncResponse", {"result": fields.String(description="Operation result")}), + console_ns.model("OAuthDataSourceSyncResponse", {"result": fields.String(description="Operation result")}), ) - @api.response(400, "Invalid provider or sync failed") + @console_ns.response(400, "Invalid provider or sync failed") @setup_required @login_required @account_initialization_required diff --git a/api/controllers/console/auth/forgot_password.py b/api/controllers/console/auth/forgot_password.py index 6be6ad51fe..ee561bdd30 100644 --- a/api/controllers/console/auth/forgot_password.py +++ b/api/controllers/console/auth/forgot_password.py @@ -6,7 +6,7 @@ from flask_restx import Resource, fields, reqparse from sqlalchemy import select from sqlalchemy.orm import Session -from controllers.console import api, console_ns +from controllers.console import console_ns from controllers.console.auth.error import ( EmailCodeError, EmailPasswordResetLimitError, @@ -27,10 +27,10 @@ from services.feature_service import FeatureService @console_ns.route("/forgot-password") class ForgotPasswordSendEmailApi(Resource): - @api.doc("send_forgot_password_email") - @api.doc(description="Send password reset email") - @api.expect( - api.model( + @console_ns.doc("send_forgot_password_email") + @console_ns.doc(description="Send password reset email") + @console_ns.expect( + console_ns.model( "ForgotPasswordEmailRequest", { "email": fields.String(required=True, description="Email address"), @@ -38,10 +38,10 @@ class ForgotPasswordSendEmailApi(Resource): }, ) ) - @api.response( + @console_ns.response( 200, "Email sent successfully", - api.model( + console_ns.model( "ForgotPasswordEmailResponse", { "result": fields.String(description="Operation result"), @@ -50,7 +50,7 @@ class ForgotPasswordSendEmailApi(Resource): }, ), ) - @api.response(400, "Invalid email or rate limit exceeded") + @console_ns.response(400, "Invalid email or rate limit exceeded") @setup_required @email_password_login_enabled def post(self): @@ -85,10 +85,10 @@ class ForgotPasswordSendEmailApi(Resource): @console_ns.route("/forgot-password/validity") class ForgotPasswordCheckApi(Resource): - @api.doc("check_forgot_password_code") - @api.doc(description="Verify password reset code") - @api.expect( - api.model( + @console_ns.doc("check_forgot_password_code") + @console_ns.doc(description="Verify password reset code") + @console_ns.expect( + console_ns.model( "ForgotPasswordCheckRequest", { "email": fields.String(required=True, description="Email address"), @@ -97,10 +97,10 @@ class ForgotPasswordCheckApi(Resource): }, ) ) - @api.response( + @console_ns.response( 200, "Code verified successfully", - api.model( + console_ns.model( "ForgotPasswordCheckResponse", { "is_valid": fields.Boolean(description="Whether code is valid"), @@ -109,7 +109,7 @@ class ForgotPasswordCheckApi(Resource): }, ), ) - @api.response(400, "Invalid code or token") + @console_ns.response(400, "Invalid code or token") @setup_required @email_password_login_enabled def post(self): @@ -152,10 +152,10 @@ class ForgotPasswordCheckApi(Resource): @console_ns.route("/forgot-password/resets") class ForgotPasswordResetApi(Resource): - @api.doc("reset_password") - @api.doc(description="Reset password with verification token") - @api.expect( - api.model( + @console_ns.doc("reset_password") + @console_ns.doc(description="Reset password with verification token") + @console_ns.expect( + console_ns.model( "ForgotPasswordResetRequest", { "token": fields.String(required=True, description="Verification token"), @@ -164,12 +164,12 @@ class ForgotPasswordResetApi(Resource): }, ) ) - @api.response( + @console_ns.response( 200, "Password reset successfully", - api.model("ForgotPasswordResetResponse", {"result": fields.String(description="Operation result")}), + console_ns.model("ForgotPasswordResetResponse", {"result": fields.String(description="Operation result")}), ) - @api.response(400, "Invalid token or password mismatch") + @console_ns.response(400, "Invalid token or password mismatch") @setup_required @email_password_login_enabled def post(self): diff --git a/api/controllers/console/auth/oauth.py b/api/controllers/console/auth/oauth.py index 29653b32ec..7ad1e56373 100644 --- a/api/controllers/console/auth/oauth.py +++ b/api/controllers/console/auth/oauth.py @@ -26,7 +26,7 @@ from services.errors.account import AccountNotFoundError, AccountRegisterError from services.errors.workspace import WorkSpaceNotAllowedCreateError, WorkSpaceNotFoundError from services.feature_service import FeatureService -from .. import api, console_ns +from .. import console_ns logger = logging.getLogger(__name__) @@ -56,11 +56,13 @@ def get_oauth_providers(): @console_ns.route("/oauth/login/") class OAuthLogin(Resource): - @api.doc("oauth_login") - @api.doc(description="Initiate OAuth login process") - @api.doc(params={"provider": "OAuth provider name (github/google)", "invite_token": "Optional invitation token"}) - @api.response(302, "Redirect to OAuth authorization URL") - @api.response(400, "Invalid provider") + @console_ns.doc("oauth_login") + @console_ns.doc(description="Initiate OAuth login process") + @console_ns.doc( + params={"provider": "OAuth provider name (github/google)", "invite_token": "Optional invitation token"} + ) + @console_ns.response(302, "Redirect to OAuth authorization URL") + @console_ns.response(400, "Invalid provider") def get(self, provider: str): invite_token = request.args.get("invite_token") or None OAUTH_PROVIDERS = get_oauth_providers() @@ -75,17 +77,17 @@ class OAuthLogin(Resource): @console_ns.route("/oauth/authorize/") class OAuthCallback(Resource): - @api.doc("oauth_callback") - @api.doc(description="Handle OAuth callback and complete login process") - @api.doc( + @console_ns.doc("oauth_callback") + @console_ns.doc(description="Handle OAuth callback and complete login process") + @console_ns.doc( params={ "provider": "OAuth provider name (github/google)", "code": "Authorization code from OAuth provider", "state": "Optional state parameter (used for invite token)", } ) - @api.response(302, "Redirect to console with access token") - @api.response(400, "OAuth process failed") + @console_ns.response(302, "Redirect to console with access token") + @console_ns.response(400, "OAuth process failed") def get(self, provider: str): OAUTH_PROVIDERS = get_oauth_providers() with current_app.app_context(): diff --git a/api/controllers/console/billing/billing.py b/api/controllers/console/billing/billing.py index 6efb4564ca..4fef1ba40d 100644 --- a/api/controllers/console/billing/billing.py +++ b/api/controllers/console/billing/billing.py @@ -3,7 +3,7 @@ import base64 from flask_restx import Resource, fields, reqparse from werkzeug.exceptions import BadRequest -from controllers.console import api, console_ns +from controllers.console import console_ns from controllers.console.wraps import account_initialization_required, only_edition_cloud, setup_required from enums.cloud_plan import CloudPlan from libs.login import current_account_with_tenant, login_required @@ -48,17 +48,17 @@ class Invoices(Resource): @console_ns.route("/billing/partners//tenants") class PartnerTenants(Resource): - @api.doc("sync_partner_tenants_bindings") - @api.doc(description="Sync partner tenants bindings") - @api.doc(params={"partner_key": "Partner key"}) - @api.expect( - api.model( + @console_ns.doc("sync_partner_tenants_bindings") + @console_ns.doc(description="Sync partner tenants bindings") + @console_ns.doc(params={"partner_key": "Partner key"}) + @console_ns.expect( + console_ns.model( "SyncPartnerTenantsBindingsRequest", {"click_id": fields.String(required=True, description="Click Id from partner referral link")}, ) ) - @api.response(200, "Tenants synced to partner successfully") - @api.response(400, "Invalid partner information") + @console_ns.response(200, "Tenants synced to partner successfully") + @console_ns.response(400, "Invalid partner information") @setup_required @login_required @account_initialization_required diff --git a/api/controllers/console/datasets/datasets.py b/api/controllers/console/datasets/datasets.py index 3aac571300..54761413f4 100644 --- a/api/controllers/console/datasets/datasets.py +++ b/api/controllers/console/datasets/datasets.py @@ -7,7 +7,7 @@ from werkzeug.exceptions import Forbidden, NotFound import services from configs import dify_config -from controllers.console import api, console_ns +from controllers.console import console_ns from controllers.console.apikey import api_key_fields, api_key_list from controllers.console.app.error import ProviderNotInitializeError from controllers.console.datasets.error import DatasetInUseError, DatasetNameDuplicateError, IndexingEstimateError @@ -119,9 +119,9 @@ def _get_retrieval_methods_by_vector_type(vector_type: str | None, is_mock: bool @console_ns.route("/datasets") class DatasetListApi(Resource): - @api.doc("get_datasets") - @api.doc(description="Get list of datasets") - @api.doc( + @console_ns.doc("get_datasets") + @console_ns.doc(description="Get list of datasets") + @console_ns.doc( params={ "page": "Page number (default: 1)", "limit": "Number of items per page (default: 20)", @@ -131,7 +131,7 @@ class DatasetListApi(Resource): "include_all": "Include all datasets (default: false)", } ) - @api.response(200, "Datasets retrieved successfully") + @console_ns.response(200, "Datasets retrieved successfully") @setup_required @login_required @account_initialization_required @@ -184,10 +184,10 @@ class DatasetListApi(Resource): response = {"data": data, "has_more": len(datasets) == limit, "limit": limit, "total": total, "page": page} return response, 200 - @api.doc("create_dataset") - @api.doc(description="Create a new dataset") - @api.expect( - api.model( + @console_ns.doc("create_dataset") + @console_ns.doc(description="Create a new dataset") + @console_ns.expect( + console_ns.model( "CreateDatasetRequest", { "name": fields.String(required=True, description="Dataset name (1-40 characters)"), @@ -200,8 +200,8 @@ class DatasetListApi(Resource): }, ) ) - @api.response(201, "Dataset created successfully") - @api.response(400, "Invalid request parameters") + @console_ns.response(201, "Dataset created successfully") + @console_ns.response(400, "Invalid request parameters") @setup_required @login_required @account_initialization_required @@ -279,12 +279,12 @@ class DatasetListApi(Resource): @console_ns.route("/datasets/") class DatasetApi(Resource): - @api.doc("get_dataset") - @api.doc(description="Get dataset details") - @api.doc(params={"dataset_id": "Dataset ID"}) - @api.response(200, "Dataset retrieved successfully", dataset_detail_fields) - @api.response(404, "Dataset not found") - @api.response(403, "Permission denied") + @console_ns.doc("get_dataset") + @console_ns.doc(description="Get dataset details") + @console_ns.doc(params={"dataset_id": "Dataset ID"}) + @console_ns.response(200, "Dataset retrieved successfully", dataset_detail_fields) + @console_ns.response(404, "Dataset not found") + @console_ns.response(403, "Permission denied") @setup_required @login_required @account_initialization_required @@ -328,10 +328,10 @@ class DatasetApi(Resource): return data, 200 - @api.doc("update_dataset") - @api.doc(description="Update dataset details") - @api.expect( - api.model( + @console_ns.doc("update_dataset") + @console_ns.doc(description="Update dataset details") + @console_ns.expect( + console_ns.model( "UpdateDatasetRequest", { "name": fields.String(description="Dataset name"), @@ -342,9 +342,9 @@ class DatasetApi(Resource): }, ) ) - @api.response(200, "Dataset updated successfully", dataset_detail_fields) - @api.response(404, "Dataset not found") - @api.response(403, "Permission denied") + @console_ns.response(200, "Dataset updated successfully", dataset_detail_fields) + @console_ns.response(404, "Dataset not found") + @console_ns.response(403, "Permission denied") @setup_required @login_required @account_initialization_required @@ -488,10 +488,10 @@ class DatasetApi(Resource): @console_ns.route("/datasets//use-check") class DatasetUseCheckApi(Resource): - @api.doc("check_dataset_use") - @api.doc(description="Check if dataset is in use") - @api.doc(params={"dataset_id": "Dataset ID"}) - @api.response(200, "Dataset use status retrieved successfully") + @console_ns.doc("check_dataset_use") + @console_ns.doc(description="Check if dataset is in use") + @console_ns.doc(params={"dataset_id": "Dataset ID"}) + @console_ns.response(200, "Dataset use status retrieved successfully") @setup_required @login_required @account_initialization_required @@ -504,10 +504,10 @@ class DatasetUseCheckApi(Resource): @console_ns.route("/datasets//queries") class DatasetQueryApi(Resource): - @api.doc("get_dataset_queries") - @api.doc(description="Get dataset query history") - @api.doc(params={"dataset_id": "Dataset ID"}) - @api.response(200, "Query history retrieved successfully", dataset_query_detail_fields) + @console_ns.doc("get_dataset_queries") + @console_ns.doc(description="Get dataset query history") + @console_ns.doc(params={"dataset_id": "Dataset ID"}) + @console_ns.response(200, "Query history retrieved successfully", dataset_query_detail_fields) @setup_required @login_required @account_initialization_required @@ -540,9 +540,9 @@ class DatasetQueryApi(Resource): @console_ns.route("/datasets/indexing-estimate") class DatasetIndexingEstimateApi(Resource): - @api.doc("estimate_dataset_indexing") - @api.doc(description="Estimate dataset indexing cost") - @api.response(200, "Indexing estimate calculated successfully") + @console_ns.doc("estimate_dataset_indexing") + @console_ns.doc(description="Estimate dataset indexing cost") + @console_ns.response(200, "Indexing estimate calculated successfully") @setup_required @login_required @account_initialization_required @@ -650,10 +650,10 @@ class DatasetIndexingEstimateApi(Resource): @console_ns.route("/datasets//related-apps") class DatasetRelatedAppListApi(Resource): - @api.doc("get_dataset_related_apps") - @api.doc(description="Get applications related to dataset") - @api.doc(params={"dataset_id": "Dataset ID"}) - @api.response(200, "Related apps retrieved successfully", related_app_list) + @console_ns.doc("get_dataset_related_apps") + @console_ns.doc(description="Get applications related to dataset") + @console_ns.doc(params={"dataset_id": "Dataset ID"}) + @console_ns.response(200, "Related apps retrieved successfully", related_app_list) @setup_required @login_required @account_initialization_required @@ -683,10 +683,10 @@ class DatasetRelatedAppListApi(Resource): @console_ns.route("/datasets//indexing-status") class DatasetIndexingStatusApi(Resource): - @api.doc("get_dataset_indexing_status") - @api.doc(description="Get dataset indexing status") - @api.doc(params={"dataset_id": "Dataset ID"}) - @api.response(200, "Indexing status retrieved successfully") + @console_ns.doc("get_dataset_indexing_status") + @console_ns.doc(description="Get dataset indexing status") + @console_ns.doc(params={"dataset_id": "Dataset ID"}) + @console_ns.response(200, "Indexing status retrieved successfully") @setup_required @login_required @account_initialization_required @@ -738,9 +738,9 @@ class DatasetApiKeyApi(Resource): token_prefix = "dataset-" resource_type = "dataset" - @api.doc("get_dataset_api_keys") - @api.doc(description="Get dataset API keys") - @api.response(200, "API keys retrieved successfully", api_key_list) + @console_ns.doc("get_dataset_api_keys") + @console_ns.doc(description="Get dataset API keys") + @console_ns.response(200, "API keys retrieved successfully", api_key_list) @setup_required @login_required @account_initialization_required @@ -767,7 +767,7 @@ class DatasetApiKeyApi(Resource): ) if current_key_count >= self.max_keys: - api.abort( + console_ns.abort( 400, message=f"Cannot create more than {self.max_keys} API keys for this resource type.", code="max_keys_exceeded", @@ -787,10 +787,10 @@ class DatasetApiKeyApi(Resource): class DatasetApiDeleteApi(Resource): resource_type = "dataset" - @api.doc("delete_dataset_api_key") - @api.doc(description="Delete dataset API key") - @api.doc(params={"api_key_id": "API key ID"}) - @api.response(204, "API key deleted successfully") + @console_ns.doc("delete_dataset_api_key") + @console_ns.doc(description="Delete dataset API key") + @console_ns.doc(params={"api_key_id": "API key ID"}) + @console_ns.response(204, "API key deleted successfully") @setup_required @login_required @is_admin_or_owner_required @@ -809,7 +809,7 @@ class DatasetApiDeleteApi(Resource): ) if key is None: - api.abort(404, message="API key not found") + console_ns.abort(404, message="API key not found") db.session.query(ApiToken).where(ApiToken.id == api_key_id).delete() db.session.commit() @@ -832,9 +832,9 @@ class DatasetEnableApiApi(Resource): @console_ns.route("/datasets/api-base-info") class DatasetApiBaseUrlApi(Resource): - @api.doc("get_dataset_api_base_info") - @api.doc(description="Get dataset API base information") - @api.response(200, "API base info retrieved successfully") + @console_ns.doc("get_dataset_api_base_info") + @console_ns.doc(description="Get dataset API base information") + @console_ns.response(200, "API base info retrieved successfully") @setup_required @login_required @account_initialization_required @@ -844,9 +844,9 @@ class DatasetApiBaseUrlApi(Resource): @console_ns.route("/datasets/retrieval-setting") class DatasetRetrievalSettingApi(Resource): - @api.doc("get_dataset_retrieval_setting") - @api.doc(description="Get dataset retrieval settings") - @api.response(200, "Retrieval settings retrieved successfully") + @console_ns.doc("get_dataset_retrieval_setting") + @console_ns.doc(description="Get dataset retrieval settings") + @console_ns.response(200, "Retrieval settings retrieved successfully") @setup_required @login_required @account_initialization_required @@ -857,10 +857,10 @@ class DatasetRetrievalSettingApi(Resource): @console_ns.route("/datasets/retrieval-setting/") class DatasetRetrievalSettingMockApi(Resource): - @api.doc("get_dataset_retrieval_setting_mock") - @api.doc(description="Get mock dataset retrieval settings by vector type") - @api.doc(params={"vector_type": "Vector store type"}) - @api.response(200, "Mock retrieval settings retrieved successfully") + @console_ns.doc("get_dataset_retrieval_setting_mock") + @console_ns.doc(description="Get mock dataset retrieval settings by vector type") + @console_ns.doc(params={"vector_type": "Vector store type"}) + @console_ns.response(200, "Mock retrieval settings retrieved successfully") @setup_required @login_required @account_initialization_required @@ -870,11 +870,11 @@ class DatasetRetrievalSettingMockApi(Resource): @console_ns.route("/datasets//error-docs") class DatasetErrorDocs(Resource): - @api.doc("get_dataset_error_docs") - @api.doc(description="Get dataset error documents") - @api.doc(params={"dataset_id": "Dataset ID"}) - @api.response(200, "Error documents retrieved successfully") - @api.response(404, "Dataset not found") + @console_ns.doc("get_dataset_error_docs") + @console_ns.doc(description="Get dataset error documents") + @console_ns.doc(params={"dataset_id": "Dataset ID"}) + @console_ns.response(200, "Error documents retrieved successfully") + @console_ns.response(404, "Dataset not found") @setup_required @login_required @account_initialization_required @@ -890,12 +890,12 @@ class DatasetErrorDocs(Resource): @console_ns.route("/datasets//permission-part-users") class DatasetPermissionUserListApi(Resource): - @api.doc("get_dataset_permission_users") - @api.doc(description="Get dataset permission user list") - @api.doc(params={"dataset_id": "Dataset ID"}) - @api.response(200, "Permission users retrieved successfully") - @api.response(404, "Dataset not found") - @api.response(403, "Permission denied") + @console_ns.doc("get_dataset_permission_users") + @console_ns.doc(description="Get dataset permission user list") + @console_ns.doc(params={"dataset_id": "Dataset ID"}) + @console_ns.response(200, "Permission users retrieved successfully") + @console_ns.response(404, "Dataset not found") + @console_ns.response(403, "Permission denied") @setup_required @login_required @account_initialization_required @@ -919,11 +919,11 @@ class DatasetPermissionUserListApi(Resource): @console_ns.route("/datasets//auto-disable-logs") class DatasetAutoDisableLogApi(Resource): - @api.doc("get_dataset_auto_disable_logs") - @api.doc(description="Get dataset auto disable logs") - @api.doc(params={"dataset_id": "Dataset ID"}) - @api.response(200, "Auto disable logs retrieved successfully") - @api.response(404, "Dataset not found") + @console_ns.doc("get_dataset_auto_disable_logs") + @console_ns.doc(description="Get dataset auto disable logs") + @console_ns.doc(params={"dataset_id": "Dataset ID"}) + @console_ns.response(200, "Auto disable logs retrieved successfully") + @console_ns.response(404, "Dataset not found") @setup_required @login_required @account_initialization_required diff --git a/api/controllers/console/datasets/datasets_document.py b/api/controllers/console/datasets/datasets_document.py index 92c85b4951..b5761c9ada 100644 --- a/api/controllers/console/datasets/datasets_document.py +++ b/api/controllers/console/datasets/datasets_document.py @@ -11,7 +11,7 @@ from sqlalchemy import asc, desc, select from werkzeug.exceptions import Forbidden, NotFound import services -from controllers.console import api, console_ns +from controllers.console import console_ns from controllers.console.app.error import ( ProviderModelCurrentlyNotSupportError, ProviderNotInitializeError, @@ -104,10 +104,10 @@ class DocumentResource(Resource): @console_ns.route("/datasets/process-rule") class GetProcessRuleApi(Resource): - @api.doc("get_process_rule") - @api.doc(description="Get dataset document processing rules") - @api.doc(params={"document_id": "Document ID (optional)"}) - @api.response(200, "Process rules retrieved successfully") + @console_ns.doc("get_process_rule") + @console_ns.doc(description="Get dataset document processing rules") + @console_ns.doc(params={"document_id": "Document ID (optional)"}) + @console_ns.response(200, "Process rules retrieved successfully") @setup_required @login_required @account_initialization_required @@ -152,9 +152,9 @@ class GetProcessRuleApi(Resource): @console_ns.route("/datasets//documents") class DatasetDocumentListApi(Resource): - @api.doc("get_dataset_documents") - @api.doc(description="Get documents in a dataset") - @api.doc( + @console_ns.doc("get_dataset_documents") + @console_ns.doc(description="Get documents in a dataset") + @console_ns.doc( params={ "dataset_id": "Dataset ID", "page": "Page number (default: 1)", @@ -165,7 +165,7 @@ class DatasetDocumentListApi(Resource): "status": "Filter documents by display status", } ) - @api.response(200, "Documents retrieved successfully") + @console_ns.response(200, "Documents retrieved successfully") @setup_required @login_required @account_initialization_required @@ -357,10 +357,10 @@ class DatasetDocumentListApi(Resource): @console_ns.route("/datasets/init") class DatasetInitApi(Resource): - @api.doc("init_dataset") - @api.doc(description="Initialize dataset with documents") - @api.expect( - api.model( + @console_ns.doc("init_dataset") + @console_ns.doc(description="Initialize dataset with documents") + @console_ns.expect( + console_ns.model( "DatasetInitRequest", { "upload_file_id": fields.String(required=True, description="Upload file ID"), @@ -370,8 +370,8 @@ class DatasetInitApi(Resource): }, ) ) - @api.response(201, "Dataset initialized successfully", dataset_and_document_fields) - @api.response(400, "Invalid request parameters") + @console_ns.response(201, "Dataset initialized successfully", dataset_and_document_fields) + @console_ns.response(400, "Invalid request parameters") @setup_required @login_required @account_initialization_required @@ -446,12 +446,12 @@ class DatasetInitApi(Resource): @console_ns.route("/datasets//documents//indexing-estimate") class DocumentIndexingEstimateApi(DocumentResource): - @api.doc("estimate_document_indexing") - @api.doc(description="Estimate document indexing cost") - @api.doc(params={"dataset_id": "Dataset ID", "document_id": "Document ID"}) - @api.response(200, "Indexing estimate calculated successfully") - @api.response(404, "Document not found") - @api.response(400, "Document already finished") + @console_ns.doc("estimate_document_indexing") + @console_ns.doc(description="Estimate document indexing cost") + @console_ns.doc(params={"dataset_id": "Dataset ID", "document_id": "Document ID"}) + @console_ns.response(200, "Indexing estimate calculated successfully") + @console_ns.response(404, "Document not found") + @console_ns.response(400, "Document already finished") @setup_required @login_required @account_initialization_required @@ -661,11 +661,11 @@ class DocumentBatchIndexingStatusApi(DocumentResource): @console_ns.route("/datasets//documents//indexing-status") class DocumentIndexingStatusApi(DocumentResource): - @api.doc("get_document_indexing_status") - @api.doc(description="Get document indexing status") - @api.doc(params={"dataset_id": "Dataset ID", "document_id": "Document ID"}) - @api.response(200, "Indexing status retrieved successfully") - @api.response(404, "Document not found") + @console_ns.doc("get_document_indexing_status") + @console_ns.doc(description="Get document indexing status") + @console_ns.doc(params={"dataset_id": "Dataset ID", "document_id": "Document ID"}) + @console_ns.response(200, "Indexing status retrieved successfully") + @console_ns.response(404, "Document not found") @setup_required @login_required @account_initialization_required @@ -711,17 +711,17 @@ class DocumentIndexingStatusApi(DocumentResource): class DocumentApi(DocumentResource): METADATA_CHOICES = {"all", "only", "without"} - @api.doc("get_document") - @api.doc(description="Get document details") - @api.doc( + @console_ns.doc("get_document") + @console_ns.doc(description="Get document details") + @console_ns.doc( params={ "dataset_id": "Dataset ID", "document_id": "Document ID", "metadata": "Metadata inclusion (all/only/without)", } ) - @api.response(200, "Document retrieved successfully") - @api.response(404, "Document not found") + @console_ns.response(200, "Document retrieved successfully") + @console_ns.response(404, "Document not found") @setup_required @login_required @account_initialization_required @@ -832,14 +832,14 @@ class DocumentApi(DocumentResource): @console_ns.route("/datasets//documents//processing/") class DocumentProcessingApi(DocumentResource): - @api.doc("update_document_processing") - @api.doc(description="Update document processing status (pause/resume)") - @api.doc( + @console_ns.doc("update_document_processing") + @console_ns.doc(description="Update document processing status (pause/resume)") + @console_ns.doc( params={"dataset_id": "Dataset ID", "document_id": "Document ID", "action": "Action to perform (pause/resume)"} ) - @api.response(200, "Processing status updated successfully") - @api.response(404, "Document not found") - @api.response(400, "Invalid action") + @console_ns.response(200, "Processing status updated successfully") + @console_ns.response(404, "Document not found") + @console_ns.response(400, "Invalid action") @setup_required @login_required @account_initialization_required @@ -877,11 +877,11 @@ class DocumentProcessingApi(DocumentResource): @console_ns.route("/datasets//documents//metadata") class DocumentMetadataApi(DocumentResource): - @api.doc("update_document_metadata") - @api.doc(description="Update document metadata") - @api.doc(params={"dataset_id": "Dataset ID", "document_id": "Document ID"}) - @api.expect( - api.model( + @console_ns.doc("update_document_metadata") + @console_ns.doc(description="Update document metadata") + @console_ns.doc(params={"dataset_id": "Dataset ID", "document_id": "Document ID"}) + @console_ns.expect( + console_ns.model( "UpdateDocumentMetadataRequest", { "doc_type": fields.String(description="Document type"), @@ -889,9 +889,9 @@ class DocumentMetadataApi(DocumentResource): }, ) ) - @api.response(200, "Document metadata updated successfully") - @api.response(404, "Document not found") - @api.response(403, "Permission denied") + @console_ns.response(200, "Document metadata updated successfully") + @console_ns.response(404, "Document not found") + @console_ns.response(403, "Permission denied") @setup_required @login_required @account_initialization_required diff --git a/api/controllers/console/datasets/external.py b/api/controllers/console/datasets/external.py index fe96a8199a..f48f384e94 100644 --- a/api/controllers/console/datasets/external.py +++ b/api/controllers/console/datasets/external.py @@ -3,7 +3,7 @@ from flask_restx import Resource, fields, marshal, reqparse from werkzeug.exceptions import Forbidden, InternalServerError, NotFound import services -from controllers.console import api, console_ns +from controllers.console import console_ns from controllers.console.datasets.error import DatasetNameDuplicateError from controllers.console.wraps import account_initialization_required, edit_permission_required, setup_required from fields.dataset_fields import dataset_detail_fields @@ -22,16 +22,16 @@ def _validate_name(name: str) -> str: @console_ns.route("/datasets/external-knowledge-api") class ExternalApiTemplateListApi(Resource): - @api.doc("get_external_api_templates") - @api.doc(description="Get external knowledge API templates") - @api.doc( + @console_ns.doc("get_external_api_templates") + @console_ns.doc(description="Get external knowledge API templates") + @console_ns.doc( params={ "page": "Page number (default: 1)", "limit": "Number of items per page (default: 20)", "keyword": "Search keyword", } ) - @api.response(200, "External API templates retrieved successfully") + @console_ns.response(200, "External API templates retrieved successfully") @setup_required @login_required @account_initialization_required @@ -95,11 +95,11 @@ class ExternalApiTemplateListApi(Resource): @console_ns.route("/datasets/external-knowledge-api/") class ExternalApiTemplateApi(Resource): - @api.doc("get_external_api_template") - @api.doc(description="Get external knowledge API template details") - @api.doc(params={"external_knowledge_api_id": "External knowledge API ID"}) - @api.response(200, "External API template retrieved successfully") - @api.response(404, "Template not found") + @console_ns.doc("get_external_api_template") + @console_ns.doc(description="Get external knowledge API template details") + @console_ns.doc(params={"external_knowledge_api_id": "External knowledge API ID"}) + @console_ns.response(200, "External API template retrieved successfully") + @console_ns.response(404, "Template not found") @setup_required @login_required @account_initialization_required @@ -163,10 +163,10 @@ class ExternalApiTemplateApi(Resource): @console_ns.route("/datasets/external-knowledge-api//use-check") class ExternalApiUseCheckApi(Resource): - @api.doc("check_external_api_usage") - @api.doc(description="Check if external knowledge API is being used") - @api.doc(params={"external_knowledge_api_id": "External knowledge API ID"}) - @api.response(200, "Usage check completed successfully") + @console_ns.doc("check_external_api_usage") + @console_ns.doc(description="Check if external knowledge API is being used") + @console_ns.doc(params={"external_knowledge_api_id": "External knowledge API ID"}) + @console_ns.response(200, "Usage check completed successfully") @setup_required @login_required @account_initialization_required @@ -181,10 +181,10 @@ class ExternalApiUseCheckApi(Resource): @console_ns.route("/datasets/external") class ExternalDatasetCreateApi(Resource): - @api.doc("create_external_dataset") - @api.doc(description="Create external knowledge dataset") - @api.expect( - api.model( + @console_ns.doc("create_external_dataset") + @console_ns.doc(description="Create external knowledge dataset") + @console_ns.expect( + console_ns.model( "CreateExternalDatasetRequest", { "external_knowledge_api_id": fields.String(required=True, description="External knowledge API ID"), @@ -194,9 +194,9 @@ class ExternalDatasetCreateApi(Resource): }, ) ) - @api.response(201, "External dataset created successfully", dataset_detail_fields) - @api.response(400, "Invalid parameters") - @api.response(403, "Permission denied") + @console_ns.response(201, "External dataset created successfully", dataset_detail_fields) + @console_ns.response(400, "Invalid parameters") + @console_ns.response(403, "Permission denied") @setup_required @login_required @account_initialization_required @@ -239,11 +239,11 @@ class ExternalDatasetCreateApi(Resource): @console_ns.route("/datasets//external-hit-testing") class ExternalKnowledgeHitTestingApi(Resource): - @api.doc("test_external_knowledge_retrieval") - @api.doc(description="Test external knowledge retrieval for dataset") - @api.doc(params={"dataset_id": "Dataset ID"}) - @api.expect( - api.model( + @console_ns.doc("test_external_knowledge_retrieval") + @console_ns.doc(description="Test external knowledge retrieval for dataset") + @console_ns.doc(params={"dataset_id": "Dataset ID"}) + @console_ns.expect( + console_ns.model( "ExternalHitTestingRequest", { "query": fields.String(required=True, description="Query text for testing"), @@ -252,9 +252,9 @@ class ExternalKnowledgeHitTestingApi(Resource): }, ) ) - @api.response(200, "External hit testing completed successfully") - @api.response(404, "Dataset not found") - @api.response(400, "Invalid parameters") + @console_ns.response(200, "External hit testing completed successfully") + @console_ns.response(404, "Dataset not found") + @console_ns.response(400, "Invalid parameters") @setup_required @login_required @account_initialization_required @@ -297,10 +297,10 @@ class ExternalKnowledgeHitTestingApi(Resource): @console_ns.route("/test/retrieval") class BedrockRetrievalApi(Resource): # this api is only for internal testing - @api.doc("bedrock_retrieval_test") - @api.doc(description="Bedrock retrieval test (internal use only)") - @api.expect( - api.model( + @console_ns.doc("bedrock_retrieval_test") + @console_ns.doc(description="Bedrock retrieval test (internal use only)") + @console_ns.expect( + console_ns.model( "BedrockRetrievalTestRequest", { "retrieval_setting": fields.Raw(required=True, description="Retrieval settings"), @@ -309,7 +309,7 @@ class BedrockRetrievalApi(Resource): }, ) ) - @api.response(200, "Bedrock retrieval test completed") + @console_ns.response(200, "Bedrock retrieval test completed") def post(self): parser = ( reqparse.RequestParser() diff --git a/api/controllers/console/datasets/hit_testing.py b/api/controllers/console/datasets/hit_testing.py index abaca88090..7ba2eeb7dd 100644 --- a/api/controllers/console/datasets/hit_testing.py +++ b/api/controllers/console/datasets/hit_testing.py @@ -1,6 +1,6 @@ from flask_restx import Resource, fields -from controllers.console import api, console_ns +from controllers.console import console_ns from controllers.console.datasets.hit_testing_base import DatasetsHitTestingBase from controllers.console.wraps import ( account_initialization_required, @@ -12,11 +12,11 @@ from libs.login import login_required @console_ns.route("/datasets//hit-testing") class HitTestingApi(Resource, DatasetsHitTestingBase): - @api.doc("test_dataset_retrieval") - @api.doc(description="Test dataset knowledge retrieval") - @api.doc(params={"dataset_id": "Dataset ID"}) - @api.expect( - api.model( + @console_ns.doc("test_dataset_retrieval") + @console_ns.doc(description="Test dataset knowledge retrieval") + @console_ns.doc(params={"dataset_id": "Dataset ID"}) + @console_ns.expect( + console_ns.model( "HitTestingRequest", { "query": fields.String(required=True, description="Query text for testing"), @@ -26,9 +26,9 @@ class HitTestingApi(Resource, DatasetsHitTestingBase): }, ) ) - @api.response(200, "Hit testing completed successfully") - @api.response(404, "Dataset not found") - @api.response(400, "Invalid parameters") + @console_ns.response(200, "Hit testing completed successfully") + @console_ns.response(404, "Dataset not found") + @console_ns.response(400, "Invalid parameters") @setup_required @login_required @account_initialization_required diff --git a/api/controllers/console/datasets/rag_pipeline/datasource_auth.py b/api/controllers/console/datasets/rag_pipeline/datasource_auth.py index f83ee69beb..cf9e5d2990 100644 --- a/api/controllers/console/datasets/rag_pipeline/datasource_auth.py +++ b/api/controllers/console/datasets/rag_pipeline/datasource_auth.py @@ -3,7 +3,7 @@ from flask_restx import Resource, reqparse from werkzeug.exceptions import Forbidden, NotFound from configs import dify_config -from controllers.console import api, console_ns +from controllers.console import console_ns from controllers.console.wraps import account_initialization_required, edit_permission_required, setup_required from core.model_runtime.errors.validate import CredentialsValidateFailedError from core.model_runtime.utils.encoders import jsonable_encoder @@ -130,7 +130,7 @@ parser_datasource = ( @console_ns.route("/auth/plugin/datasource/") class DatasourceAuth(Resource): - @api.expect(parser_datasource) + @console_ns.expect(parser_datasource) @setup_required @login_required @account_initialization_required @@ -176,7 +176,7 @@ parser_datasource_delete = reqparse.RequestParser().add_argument( @console_ns.route("/auth/plugin/datasource//delete") class DatasourceAuthDeleteApi(Resource): - @api.expect(parser_datasource_delete) + @console_ns.expect(parser_datasource_delete) @setup_required @login_required @account_initialization_required @@ -209,7 +209,7 @@ parser_datasource_update = ( @console_ns.route("/auth/plugin/datasource//update") class DatasourceAuthUpdateApi(Resource): - @api.expect(parser_datasource_update) + @console_ns.expect(parser_datasource_update) @setup_required @login_required @account_initialization_required @@ -267,7 +267,7 @@ parser_datasource_custom = ( @console_ns.route("/auth/plugin/datasource//custom-client") class DatasourceAuthOauthCustomClient(Resource): - @api.expect(parser_datasource_custom) + @console_ns.expect(parser_datasource_custom) @setup_required @login_required @account_initialization_required @@ -306,7 +306,7 @@ parser_default = reqparse.RequestParser().add_argument("id", type=str, required= @console_ns.route("/auth/plugin/datasource//default") class DatasourceAuthDefaultApi(Resource): - @api.expect(parser_default) + @console_ns.expect(parser_default) @setup_required @login_required @account_initialization_required @@ -334,7 +334,7 @@ parser_update_name = ( @console_ns.route("/auth/plugin/datasource//update-name") class DatasourceUpdateProviderNameApi(Resource): - @api.expect(parser_update_name) + @console_ns.expect(parser_update_name) @setup_required @login_required @account_initialization_required diff --git a/api/controllers/console/datasets/rag_pipeline/datasource_content_preview.py b/api/controllers/console/datasets/rag_pipeline/datasource_content_preview.py index 5e3b3428eb..42387557d6 100644 --- a/api/controllers/console/datasets/rag_pipeline/datasource_content_preview.py +++ b/api/controllers/console/datasets/rag_pipeline/datasource_content_preview.py @@ -4,7 +4,7 @@ from flask_restx import ( # type: ignore from pydantic import BaseModel from werkzeug.exceptions import Forbidden -from controllers.console import api, console_ns +from controllers.console import console_ns from controllers.console.datasets.wraps import get_rag_pipeline from controllers.console.wraps import account_initialization_required, setup_required from libs.login import current_user, login_required @@ -26,7 +26,7 @@ console_ns.schema_model(Parser.__name__, Parser.model_json_schema(ref_template=D @console_ns.route("/rag/pipelines//workflows/published/datasource/nodes//preview") class DataSourceContentPreviewApi(Resource): - @api.expect(console_ns.models[Parser.__name__], validate=True) + @console_ns.expect(console_ns.models[Parser.__name__], validate=True) @setup_required @login_required @account_initialization_required @@ -38,7 +38,7 @@ class DataSourceContentPreviewApi(Resource): if not isinstance(current_user, Account): raise Forbidden() - args = Parser.model_validate(api.payload) + args = Parser.model_validate(console_ns.payload) inputs = args.inputs datasource_type = args.datasource_type diff --git a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_workflow.py b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_workflow.py index bc8d4fbf81..a0dc692c4e 100644 --- a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_workflow.py +++ b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_workflow.py @@ -9,7 +9,7 @@ from sqlalchemy.orm import Session from werkzeug.exceptions import Forbidden, InternalServerError, NotFound import services -from controllers.console import api, console_ns +from controllers.console import console_ns from controllers.console.app.error import ( ConversationCompletedError, DraftWorkflowNotExist, @@ -153,7 +153,7 @@ parser_run = reqparse.RequestParser().add_argument("inputs", type=dict, location @console_ns.route("/rag/pipelines//workflows/draft/iteration/nodes//run") class RagPipelineDraftRunIterationNodeApi(Resource): - @api.expect(parser_run) + @console_ns.expect(parser_run) @setup_required @login_required @account_initialization_required @@ -187,7 +187,7 @@ class RagPipelineDraftRunIterationNodeApi(Resource): @console_ns.route("/rag/pipelines//workflows/draft/loop/nodes//run") class RagPipelineDraftRunLoopNodeApi(Resource): - @api.expect(parser_run) + @console_ns.expect(parser_run) @setup_required @login_required @account_initialization_required @@ -230,7 +230,7 @@ parser_draft_run = ( @console_ns.route("/rag/pipelines//workflows/draft/run") class DraftRagPipelineRunApi(Resource): - @api.expect(parser_draft_run) + @console_ns.expect(parser_draft_run) @setup_required @login_required @account_initialization_required @@ -273,7 +273,7 @@ parser_published_run = ( @console_ns.route("/rag/pipelines//workflows/published/run") class PublishedRagPipelineRunApi(Resource): - @api.expect(parser_published_run) + @console_ns.expect(parser_published_run) @setup_required @login_required @account_initialization_required @@ -397,7 +397,7 @@ parser_rag_run = ( @console_ns.route("/rag/pipelines//workflows/published/datasource/nodes//run") class RagPipelinePublishedDatasourceNodeRunApi(Resource): - @api.expect(parser_rag_run) + @console_ns.expect(parser_rag_run) @setup_required @login_required @account_initialization_required @@ -437,7 +437,7 @@ class RagPipelinePublishedDatasourceNodeRunApi(Resource): @console_ns.route("/rag/pipelines//workflows/draft/datasource/nodes//run") class RagPipelineDraftDatasourceNodeRunApi(Resource): - @api.expect(parser_rag_run) + @console_ns.expect(parser_rag_run) @setup_required @login_required @edit_permission_required @@ -482,7 +482,7 @@ parser_run_api = reqparse.RequestParser().add_argument( @console_ns.route("/rag/pipelines//workflows/draft/nodes//run") class RagPipelineDraftNodeRunApi(Resource): - @api.expect(parser_run_api) + @console_ns.expect(parser_run_api) @setup_required @login_required @edit_permission_required @@ -607,7 +607,7 @@ parser_default = reqparse.RequestParser().add_argument("q", type=str, location=" @console_ns.route("/rag/pipelines//workflows/default-workflow-block-configs/") class DefaultRagPipelineBlockConfigApi(Resource): - @api.expect(parser_default) + @console_ns.expect(parser_default) @setup_required @login_required @account_initialization_required @@ -644,7 +644,7 @@ parser_wf = ( @console_ns.route("/rag/pipelines//workflows") class PublishedAllRagPipelineApi(Resource): - @api.expect(parser_wf) + @console_ns.expect(parser_wf) @setup_required @login_required @account_initialization_required @@ -696,7 +696,7 @@ parser_wf_id = ( @console_ns.route("/rag/pipelines//workflows/") class RagPipelineByIdApi(Resource): - @api.expect(parser_wf_id) + @console_ns.expect(parser_wf_id) @setup_required @login_required @account_initialization_required @@ -754,7 +754,7 @@ parser_parameters = reqparse.RequestParser().add_argument("node_id", type=str, r @console_ns.route("/rag/pipelines//workflows/published/processing/parameters") class PublishedRagPipelineSecondStepApi(Resource): - @api.expect(parser_parameters) + @console_ns.expect(parser_parameters) @setup_required @login_required @account_initialization_required @@ -777,7 +777,7 @@ class PublishedRagPipelineSecondStepApi(Resource): @console_ns.route("/rag/pipelines//workflows/published/pre-processing/parameters") class PublishedRagPipelineFirstStepApi(Resource): - @api.expect(parser_parameters) + @console_ns.expect(parser_parameters) @setup_required @login_required @account_initialization_required @@ -800,7 +800,7 @@ class PublishedRagPipelineFirstStepApi(Resource): @console_ns.route("/rag/pipelines//workflows/draft/pre-processing/parameters") class DraftRagPipelineFirstStepApi(Resource): - @api.expect(parser_parameters) + @console_ns.expect(parser_parameters) @setup_required @login_required @account_initialization_required @@ -823,7 +823,7 @@ class DraftRagPipelineFirstStepApi(Resource): @console_ns.route("/rag/pipelines//workflows/draft/processing/parameters") class DraftRagPipelineSecondStepApi(Resource): - @api.expect(parser_parameters) + @console_ns.expect(parser_parameters) @setup_required @login_required @account_initialization_required @@ -854,7 +854,7 @@ parser_wf_run = ( @console_ns.route("/rag/pipelines//workflow-runs") class RagPipelineWorkflowRunListApi(Resource): - @api.expect(parser_wf_run) + @console_ns.expect(parser_wf_run) @setup_required @login_required @account_initialization_required @@ -975,7 +975,7 @@ parser_var = ( @console_ns.route("/rag/pipelines//workflows/draft/datasource/variables-inspect") class RagPipelineDatasourceVariableApi(Resource): - @api.expect(parser_var) + @console_ns.expect(parser_var) @setup_required @login_required @account_initialization_required diff --git a/api/controllers/console/datasets/website.py b/api/controllers/console/datasets/website.py index fe6eaaa0de..b2998a8d3e 100644 --- a/api/controllers/console/datasets/website.py +++ b/api/controllers/console/datasets/website.py @@ -1,6 +1,6 @@ from flask_restx import Resource, fields, reqparse -from controllers.console import api, console_ns +from controllers.console import console_ns from controllers.console.datasets.error import WebsiteCrawlError from controllers.console.wraps import account_initialization_required, setup_required from libs.login import login_required @@ -9,10 +9,10 @@ from services.website_service import WebsiteCrawlApiRequest, WebsiteCrawlStatusA @console_ns.route("/website/crawl") class WebsiteCrawlApi(Resource): - @api.doc("crawl_website") - @api.doc(description="Crawl website content") - @api.expect( - api.model( + @console_ns.doc("crawl_website") + @console_ns.doc(description="Crawl website content") + @console_ns.expect( + console_ns.model( "WebsiteCrawlRequest", { "provider": fields.String( @@ -25,8 +25,8 @@ class WebsiteCrawlApi(Resource): }, ) ) - @api.response(200, "Website crawl initiated successfully") - @api.response(400, "Invalid crawl parameters") + @console_ns.response(200, "Website crawl initiated successfully") + @console_ns.response(400, "Invalid crawl parameters") @setup_required @login_required @account_initialization_required @@ -62,12 +62,12 @@ class WebsiteCrawlApi(Resource): @console_ns.route("/website/crawl/status/") class WebsiteCrawlStatusApi(Resource): - @api.doc("get_crawl_status") - @api.doc(description="Get website crawl status") - @api.doc(params={"job_id": "Crawl job ID", "provider": "Crawl provider (firecrawl/watercrawl/jinareader)"}) - @api.response(200, "Crawl status retrieved successfully") - @api.response(404, "Crawl job not found") - @api.response(400, "Invalid provider") + @console_ns.doc("get_crawl_status") + @console_ns.doc(description="Get website crawl status") + @console_ns.doc(params={"job_id": "Crawl job ID", "provider": "Crawl provider (firecrawl/watercrawl/jinareader)"}) + @console_ns.response(200, "Crawl status retrieved successfully") + @console_ns.response(404, "Crawl job not found") + @console_ns.response(400, "Invalid provider") @setup_required @login_required @account_initialization_required diff --git a/api/controllers/console/datasets/wraps.py b/api/controllers/console/datasets/wraps.py index a8c1298e3e..3ef1341abc 100644 --- a/api/controllers/console/datasets/wraps.py +++ b/api/controllers/console/datasets/wraps.py @@ -1,44 +1,40 @@ from collections.abc import Callable from functools import wraps +from typing import ParamSpec, TypeVar from controllers.console.datasets.error import PipelineNotFoundError from extensions.ext_database import db from libs.login import current_account_with_tenant from models.dataset import Pipeline +P = ParamSpec("P") +R = TypeVar("R") -def get_rag_pipeline( - view: Callable | None = None, -): - def decorator(view_func): - @wraps(view_func) - def decorated_view(*args, **kwargs): - if not kwargs.get("pipeline_id"): - raise ValueError("missing pipeline_id in path parameters") - _, current_tenant_id = current_account_with_tenant() +def get_rag_pipeline(view_func: Callable[P, R]): + @wraps(view_func) + def decorated_view(*args: P.args, **kwargs: P.kwargs): + if not kwargs.get("pipeline_id"): + raise ValueError("missing pipeline_id in path parameters") - pipeline_id = kwargs.get("pipeline_id") - pipeline_id = str(pipeline_id) + _, current_tenant_id = current_account_with_tenant() - del kwargs["pipeline_id"] + pipeline_id = kwargs.get("pipeline_id") + pipeline_id = str(pipeline_id) - pipeline = ( - db.session.query(Pipeline) - .where(Pipeline.id == pipeline_id, Pipeline.tenant_id == current_tenant_id) - .first() - ) + del kwargs["pipeline_id"] - if not pipeline: - raise PipelineNotFoundError() + pipeline = ( + db.session.query(Pipeline) + .where(Pipeline.id == pipeline_id, Pipeline.tenant_id == current_tenant_id) + .first() + ) - kwargs["pipeline"] = pipeline + if not pipeline: + raise PipelineNotFoundError() - return view_func(*args, **kwargs) + kwargs["pipeline"] = pipeline - return decorated_view + return view_func(*args, **kwargs) - if view is None: - return decorator - else: - return decorator(view) + return decorated_view diff --git a/api/controllers/console/explore/recommended_app.py b/api/controllers/console/explore/recommended_app.py index 11c7a1bc18..5a9c3ef133 100644 --- a/api/controllers/console/explore/recommended_app.py +++ b/api/controllers/console/explore/recommended_app.py @@ -1,7 +1,7 @@ from flask_restx import Resource, fields, marshal_with, reqparse from constants.languages import languages -from controllers.console import api, console_ns +from controllers.console import console_ns from controllers.console.wraps import account_initialization_required from libs.helper import AppIconUrlField from libs.login import current_user, login_required @@ -40,7 +40,7 @@ parser_apps = reqparse.RequestParser().add_argument("language", type=str, locati @console_ns.route("/explore/apps") class RecommendedAppListApi(Resource): - @api.expect(parser_apps) + @console_ns.expect(parser_apps) @login_required @account_initialization_required @marshal_with(recommended_app_list_fields) diff --git a/api/controllers/console/extension.py b/api/controllers/console/extension.py index a1d36def0d..6f92b9744f 100644 --- a/api/controllers/console/extension.py +++ b/api/controllers/console/extension.py @@ -1,7 +1,7 @@ from flask_restx import Resource, fields, marshal_with, reqparse from constants import HIDDEN_VALUE -from controllers.console import api, console_ns +from controllers.console import console_ns from controllers.console.wraps import account_initialization_required, setup_required from fields.api_based_extension_fields import api_based_extension_fields from libs.login import current_account_with_tenant, login_required @@ -12,15 +12,17 @@ from services.code_based_extension_service import CodeBasedExtensionService @console_ns.route("/code-based-extension") class CodeBasedExtensionAPI(Resource): - @api.doc("get_code_based_extension") - @api.doc(description="Get code-based extension data by module name") - @api.expect( - api.parser().add_argument("module", type=str, required=True, location="args", help="Extension module name") + @console_ns.doc("get_code_based_extension") + @console_ns.doc(description="Get code-based extension data by module name") + @console_ns.expect( + console_ns.parser().add_argument( + "module", type=str, required=True, location="args", help="Extension module name" + ) ) - @api.response( + @console_ns.response( 200, "Success", - api.model( + console_ns.model( "CodeBasedExtensionResponse", {"module": fields.String(description="Module name"), "data": fields.Raw(description="Extension data")}, ), @@ -37,9 +39,9 @@ class CodeBasedExtensionAPI(Resource): @console_ns.route("/api-based-extension") class APIBasedExtensionAPI(Resource): - @api.doc("get_api_based_extensions") - @api.doc(description="Get all API-based extensions for current tenant") - @api.response(200, "Success", fields.List(fields.Nested(api_based_extension_fields))) + @console_ns.doc("get_api_based_extensions") + @console_ns.doc(description="Get all API-based extensions for current tenant") + @console_ns.response(200, "Success", fields.List(fields.Nested(api_based_extension_fields))) @setup_required @login_required @account_initialization_required @@ -48,10 +50,10 @@ class APIBasedExtensionAPI(Resource): _, tenant_id = current_account_with_tenant() return APIBasedExtensionService.get_all_by_tenant_id(tenant_id) - @api.doc("create_api_based_extension") - @api.doc(description="Create a new API-based extension") - @api.expect( - api.model( + @console_ns.doc("create_api_based_extension") + @console_ns.doc(description="Create a new API-based extension") + @console_ns.expect( + console_ns.model( "CreateAPIBasedExtensionRequest", { "name": fields.String(required=True, description="Extension name"), @@ -60,13 +62,13 @@ class APIBasedExtensionAPI(Resource): }, ) ) - @api.response(201, "Extension created successfully", api_based_extension_fields) + @console_ns.response(201, "Extension created successfully", api_based_extension_fields) @setup_required @login_required @account_initialization_required @marshal_with(api_based_extension_fields) def post(self): - args = api.payload + args = console_ns.payload _, current_tenant_id = current_account_with_tenant() extension_data = APIBasedExtension( @@ -81,10 +83,10 @@ class APIBasedExtensionAPI(Resource): @console_ns.route("/api-based-extension/") class APIBasedExtensionDetailAPI(Resource): - @api.doc("get_api_based_extension") - @api.doc(description="Get API-based extension by ID") - @api.doc(params={"id": "Extension ID"}) - @api.response(200, "Success", api_based_extension_fields) + @console_ns.doc("get_api_based_extension") + @console_ns.doc(description="Get API-based extension by ID") + @console_ns.doc(params={"id": "Extension ID"}) + @console_ns.response(200, "Success", api_based_extension_fields) @setup_required @login_required @account_initialization_required @@ -95,11 +97,11 @@ class APIBasedExtensionDetailAPI(Resource): return APIBasedExtensionService.get_with_tenant_id(tenant_id, api_based_extension_id) - @api.doc("update_api_based_extension") - @api.doc(description="Update API-based extension") - @api.doc(params={"id": "Extension ID"}) - @api.expect( - api.model( + @console_ns.doc("update_api_based_extension") + @console_ns.doc(description="Update API-based extension") + @console_ns.doc(params={"id": "Extension ID"}) + @console_ns.expect( + console_ns.model( "UpdateAPIBasedExtensionRequest", { "name": fields.String(required=True, description="Extension name"), @@ -108,7 +110,7 @@ class APIBasedExtensionDetailAPI(Resource): }, ) ) - @api.response(200, "Extension updated successfully", api_based_extension_fields) + @console_ns.response(200, "Extension updated successfully", api_based_extension_fields) @setup_required @login_required @account_initialization_required @@ -119,7 +121,7 @@ class APIBasedExtensionDetailAPI(Resource): extension_data_from_db = APIBasedExtensionService.get_with_tenant_id(current_tenant_id, api_based_extension_id) - args = api.payload + args = console_ns.payload extension_data_from_db.name = args["name"] extension_data_from_db.api_endpoint = args["api_endpoint"] @@ -129,10 +131,10 @@ class APIBasedExtensionDetailAPI(Resource): return APIBasedExtensionService.save(extension_data_from_db) - @api.doc("delete_api_based_extension") - @api.doc(description="Delete API-based extension") - @api.doc(params={"id": "Extension ID"}) - @api.response(204, "Extension deleted successfully") + @console_ns.doc("delete_api_based_extension") + @console_ns.doc(description="Delete API-based extension") + @console_ns.doc(params={"id": "Extension ID"}) + @console_ns.response(204, "Extension deleted successfully") @setup_required @login_required @account_initialization_required diff --git a/api/controllers/console/feature.py b/api/controllers/console/feature.py index 39bcf3424c..6951c906e9 100644 --- a/api/controllers/console/feature.py +++ b/api/controllers/console/feature.py @@ -3,18 +3,18 @@ from flask_restx import Resource, fields from libs.login import current_account_with_tenant, login_required from services.feature_service import FeatureService -from . import api, console_ns +from . import console_ns from .wraps import account_initialization_required, cloud_utm_record, setup_required @console_ns.route("/features") class FeatureApi(Resource): - @api.doc("get_tenant_features") - @api.doc(description="Get feature configuration for current tenant") - @api.response( + @console_ns.doc("get_tenant_features") + @console_ns.doc(description="Get feature configuration for current tenant") + @console_ns.response( 200, "Success", - api.model("FeatureResponse", {"features": fields.Raw(description="Feature configuration object")}), + console_ns.model("FeatureResponse", {"features": fields.Raw(description="Feature configuration object")}), ) @setup_required @login_required @@ -29,12 +29,14 @@ class FeatureApi(Resource): @console_ns.route("/system-features") class SystemFeatureApi(Resource): - @api.doc("get_system_features") - @api.doc(description="Get system-wide feature configuration") - @api.response( + @console_ns.doc("get_system_features") + @console_ns.doc(description="Get system-wide feature configuration") + @console_ns.response( 200, "Success", - api.model("SystemFeatureResponse", {"features": fields.Raw(description="System feature configuration object")}), + console_ns.model( + "SystemFeatureResponse", {"features": fields.Raw(description="System feature configuration object")} + ), ) def get(self): """Get system-wide feature configuration""" diff --git a/api/controllers/console/init_validate.py b/api/controllers/console/init_validate.py index f219425d07..f27fa26983 100644 --- a/api/controllers/console/init_validate.py +++ b/api/controllers/console/init_validate.py @@ -11,19 +11,19 @@ from libs.helper import StrLen from models.model import DifySetup from services.account_service import TenantService -from . import api, console_ns +from . import console_ns from .error import AlreadySetupError, InitValidateFailedError from .wraps import only_edition_self_hosted @console_ns.route("/init") class InitValidateAPI(Resource): - @api.doc("get_init_status") - @api.doc(description="Get initialization validation status") - @api.response( + @console_ns.doc("get_init_status") + @console_ns.doc(description="Get initialization validation status") + @console_ns.response( 200, "Success", - model=api.model( + model=console_ns.model( "InitStatusResponse", {"status": fields.String(description="Initialization status", enum=["finished", "not_started"])}, ), @@ -35,20 +35,20 @@ class InitValidateAPI(Resource): return {"status": "finished"} return {"status": "not_started"} - @api.doc("validate_init_password") - @api.doc(description="Validate initialization password for self-hosted edition") - @api.expect( - api.model( + @console_ns.doc("validate_init_password") + @console_ns.doc(description="Validate initialization password for self-hosted edition") + @console_ns.expect( + console_ns.model( "InitValidateRequest", {"password": fields.String(required=True, description="Initialization password", max_length=30)}, ) ) - @api.response( + @console_ns.response( 201, "Success", - model=api.model("InitValidateResponse", {"result": fields.String(description="Operation result")}), + model=console_ns.model("InitValidateResponse", {"result": fields.String(description="Operation result")}), ) - @api.response(400, "Already setup or validation failed") + @console_ns.response(400, "Already setup or validation failed") @only_edition_self_hosted def post(self): """Validate initialization password""" diff --git a/api/controllers/console/ping.py b/api/controllers/console/ping.py index 29f49b99de..25a3d80522 100644 --- a/api/controllers/console/ping.py +++ b/api/controllers/console/ping.py @@ -1,16 +1,16 @@ from flask_restx import Resource, fields -from . import api, console_ns +from . import console_ns @console_ns.route("/ping") class PingApi(Resource): - @api.doc("health_check") - @api.doc(description="Health check endpoint for connection testing") - @api.response( + @console_ns.doc("health_check") + @console_ns.doc(description="Health check endpoint for connection testing") + @console_ns.response( 200, "Success", - api.model("PingResponse", {"result": fields.String(description="Health check result", example="pong")}), + console_ns.model("PingResponse", {"result": fields.String(description="Health check result", example="pong")}), ) def get(self): """Health check endpoint for connection testing""" diff --git a/api/controllers/console/remote_files.py b/api/controllers/console/remote_files.py index 47c7ecde9a..49a4df1b5a 100644 --- a/api/controllers/console/remote_files.py +++ b/api/controllers/console/remote_files.py @@ -10,7 +10,6 @@ from controllers.common.errors import ( RemoteFileUploadError, UnsupportedFileTypeError, ) -from controllers.console import api from core.file import helpers as file_helpers from core.helper import ssrf_proxy from extensions.ext_database import db @@ -42,7 +41,7 @@ parser_upload = reqparse.RequestParser().add_argument("url", type=str, required= @console_ns.route("/remote-files/upload") class RemoteFileUploadApi(Resource): - @api.expect(parser_upload) + @console_ns.expect(parser_upload) @marshal_with(file_fields_with_signed_url) def post(self): args = parser_upload.parse_args() diff --git a/api/controllers/console/setup.py b/api/controllers/console/setup.py index 22929c851e..0c2a4d797b 100644 --- a/api/controllers/console/setup.py +++ b/api/controllers/console/setup.py @@ -7,7 +7,7 @@ from libs.password import valid_password from models.model import DifySetup, db from services.account_service import RegisterService, TenantService -from . import api, console_ns +from . import console_ns from .error import AlreadySetupError, NotInitValidateError from .init_validate import get_init_validate_status from .wraps import only_edition_self_hosted @@ -15,12 +15,12 @@ from .wraps import only_edition_self_hosted @console_ns.route("/setup") class SetupApi(Resource): - @api.doc("get_setup_status") - @api.doc(description="Get system setup status") - @api.response( + @console_ns.doc("get_setup_status") + @console_ns.doc(description="Get system setup status") + @console_ns.response( 200, "Success", - api.model( + console_ns.model( "SetupStatusResponse", { "step": fields.String(description="Setup step status", enum=["not_started", "finished"]), @@ -40,10 +40,10 @@ class SetupApi(Resource): return {"step": "not_started"} return {"step": "finished"} - @api.doc("setup_system") - @api.doc(description="Initialize system setup with admin account") - @api.expect( - api.model( + @console_ns.doc("setup_system") + @console_ns.doc(description="Initialize system setup with admin account") + @console_ns.expect( + console_ns.model( "SetupRequest", { "email": fields.String(required=True, description="Admin email address"), @@ -53,8 +53,10 @@ class SetupApi(Resource): }, ) ) - @api.response(201, "Success", api.model("SetupResponse", {"result": fields.String(description="Setup result")})) - @api.response(400, "Already setup or validation failed") + @console_ns.response( + 201, "Success", console_ns.model("SetupResponse", {"result": fields.String(description="Setup result")}) + ) + @console_ns.response(400, "Already setup or validation failed") @only_edition_self_hosted def post(self): """Initialize system setup with admin account""" diff --git a/api/controllers/console/tag/tags.py b/api/controllers/console/tag/tags.py index ee032756eb..17cfc3ff4b 100644 --- a/api/controllers/console/tag/tags.py +++ b/api/controllers/console/tag/tags.py @@ -2,7 +2,7 @@ from flask import request from flask_restx import Resource, marshal_with, reqparse from werkzeug.exceptions import Forbidden -from controllers.console import api, console_ns +from controllers.console import console_ns from controllers.console.wraps import account_initialization_required, edit_permission_required, setup_required from fields.tag_fields import dataset_tag_fields from libs.login import current_account_with_tenant, login_required @@ -43,7 +43,7 @@ class TagListApi(Resource): return tags, 200 - @api.expect(parser_tags) + @console_ns.expect(parser_tags) @setup_required @login_required @account_initialization_required @@ -68,7 +68,7 @@ parser_tag_id = reqparse.RequestParser().add_argument( @console_ns.route("/tags/") class TagUpdateDeleteApi(Resource): - @api.expect(parser_tag_id) + @console_ns.expect(parser_tag_id) @setup_required @login_required @account_initialization_required @@ -110,7 +110,7 @@ parser_create = ( @console_ns.route("/tag-bindings/create") class TagBindingCreateApi(Resource): - @api.expect(parser_create) + @console_ns.expect(parser_create) @setup_required @login_required @account_initialization_required @@ -136,7 +136,7 @@ parser_remove = ( @console_ns.route("/tag-bindings/remove") class TagBindingDeleteApi(Resource): - @api.expect(parser_remove) + @console_ns.expect(parser_remove) @setup_required @login_required @account_initialization_required diff --git a/api/controllers/console/version.py b/api/controllers/console/version.py index 104a205fc8..6c5505f42a 100644 --- a/api/controllers/console/version.py +++ b/api/controllers/console/version.py @@ -7,7 +7,7 @@ from packaging import version from configs import dify_config -from . import api, console_ns +from . import console_ns logger = logging.getLogger(__name__) @@ -18,13 +18,13 @@ parser = reqparse.RequestParser().add_argument( @console_ns.route("/version") class VersionApi(Resource): - @api.doc("check_version_update") - @api.doc(description="Check for application version updates") - @api.expect(parser) - @api.response( + @console_ns.doc("check_version_update") + @console_ns.doc(description="Check for application version updates") + @console_ns.expect(parser) + @console_ns.response( 200, "Success", - api.model( + console_ns.model( "VersionResponse", { "version": fields.String(description="Latest version number"), diff --git a/api/controllers/console/workspace/account.py b/api/controllers/console/workspace/account.py index 0833b39f41..838cd3ee95 100644 --- a/api/controllers/console/workspace/account.py +++ b/api/controllers/console/workspace/account.py @@ -8,7 +8,7 @@ from sqlalchemy.orm import Session from configs import dify_config from constants.languages import supported_language -from controllers.console import api, console_ns +from controllers.console import console_ns from controllers.console.auth.error import ( EmailAlreadyInUseError, EmailChangeLimitError, @@ -55,7 +55,7 @@ def _init_parser(): @console_ns.route("/account/init") class AccountInitApi(Resource): - @api.expect(_init_parser()) + @console_ns.expect(_init_parser()) @setup_required @login_required def post(self): @@ -115,7 +115,7 @@ parser_name = reqparse.RequestParser().add_argument("name", type=str, required=T @console_ns.route("/account/name") class AccountNameApi(Resource): - @api.expect(parser_name) + @console_ns.expect(parser_name) @setup_required @login_required @account_initialization_required @@ -138,7 +138,7 @@ parser_avatar = reqparse.RequestParser().add_argument("avatar", type=str, requir @console_ns.route("/account/avatar") class AccountAvatarApi(Resource): - @api.expect(parser_avatar) + @console_ns.expect(parser_avatar) @setup_required @login_required @account_initialization_required @@ -159,7 +159,7 @@ parser_interface = reqparse.RequestParser().add_argument( @console_ns.route("/account/interface-language") class AccountInterfaceLanguageApi(Resource): - @api.expect(parser_interface) + @console_ns.expect(parser_interface) @setup_required @login_required @account_initialization_required @@ -180,7 +180,7 @@ parser_theme = reqparse.RequestParser().add_argument( @console_ns.route("/account/interface-theme") class AccountInterfaceThemeApi(Resource): - @api.expect(parser_theme) + @console_ns.expect(parser_theme) @setup_required @login_required @account_initialization_required @@ -199,7 +199,7 @@ parser_timezone = reqparse.RequestParser().add_argument("timezone", type=str, re @console_ns.route("/account/timezone") class AccountTimezoneApi(Resource): - @api.expect(parser_timezone) + @console_ns.expect(parser_timezone) @setup_required @login_required @account_initialization_required @@ -227,7 +227,7 @@ parser_pw = ( @console_ns.route("/account/password") class AccountPasswordApi(Resource): - @api.expect(parser_pw) + @console_ns.expect(parser_pw) @setup_required @login_required @account_initialization_required @@ -325,7 +325,7 @@ parser_delete = ( @console_ns.route("/account/delete") class AccountDeleteApi(Resource): - @api.expect(parser_delete) + @console_ns.expect(parser_delete) @setup_required @login_required @account_initialization_required @@ -351,7 +351,7 @@ parser_feedback = ( @console_ns.route("/account/delete/feedback") class AccountDeleteUpdateFeedbackApi(Resource): - @api.expect(parser_feedback) + @console_ns.expect(parser_feedback) @setup_required def post(self): args = parser_feedback.parse_args() @@ -396,7 +396,7 @@ class EducationApi(Resource): "allow_refresh": fields.Boolean, } - @api.expect(parser_edu) + @console_ns.expect(parser_edu) @setup_required @login_required @account_initialization_required @@ -441,7 +441,7 @@ class EducationAutoCompleteApi(Resource): "has_next": fields.Boolean, } - @api.expect(parser_autocomplete) + @console_ns.expect(parser_autocomplete) @setup_required @login_required @account_initialization_required @@ -465,7 +465,7 @@ parser_change_email = ( @console_ns.route("/account/change-email") class ChangeEmailSendEmailApi(Resource): - @api.expect(parser_change_email) + @console_ns.expect(parser_change_email) @enable_change_email @setup_required @login_required @@ -517,7 +517,7 @@ parser_validity = ( @console_ns.route("/account/change-email/validity") class ChangeEmailCheckApi(Resource): - @api.expect(parser_validity) + @console_ns.expect(parser_validity) @enable_change_email @setup_required @login_required @@ -563,7 +563,7 @@ parser_reset = ( @console_ns.route("/account/change-email/reset") class ChangeEmailResetApi(Resource): - @api.expect(parser_reset) + @console_ns.expect(parser_reset) @enable_change_email @setup_required @login_required @@ -603,7 +603,7 @@ parser_check = reqparse.RequestParser().add_argument("email", type=email, requir @console_ns.route("/account/change-email/check-email-unique") class CheckEmailUnique(Resource): - @api.expect(parser_check) + @console_ns.expect(parser_check) @setup_required def post(self): args = parser_check.parse_args() diff --git a/api/controllers/console/workspace/agent_providers.py b/api/controllers/console/workspace/agent_providers.py index 0a8f49d2e5..9527fe782e 100644 --- a/api/controllers/console/workspace/agent_providers.py +++ b/api/controllers/console/workspace/agent_providers.py @@ -1,6 +1,6 @@ from flask_restx import Resource, fields -from controllers.console import api, console_ns +from controllers.console import console_ns from controllers.console.wraps import account_initialization_required, setup_required from core.model_runtime.utils.encoders import jsonable_encoder from libs.login import current_account_with_tenant, login_required @@ -9,9 +9,9 @@ from services.agent_service import AgentService @console_ns.route("/workspaces/current/agent-providers") class AgentProviderListApi(Resource): - @api.doc("list_agent_providers") - @api.doc(description="Get list of available agent providers") - @api.response( + @console_ns.doc("list_agent_providers") + @console_ns.doc(description="Get list of available agent providers") + @console_ns.response( 200, "Success", fields.List(fields.Raw(description="Agent provider information")), @@ -31,10 +31,10 @@ class AgentProviderListApi(Resource): @console_ns.route("/workspaces/current/agent-provider/") class AgentProviderApi(Resource): - @api.doc("get_agent_provider") - @api.doc(description="Get specific agent provider details") - @api.doc(params={"provider_name": "Agent provider name"}) - @api.response( + @console_ns.doc("get_agent_provider") + @console_ns.doc(description="Get specific agent provider details") + @console_ns.doc(params={"provider_name": "Agent provider name"}) + @console_ns.response( 200, "Success", fields.Raw(description="Agent provider details"), diff --git a/api/controllers/console/workspace/endpoint.py b/api/controllers/console/workspace/endpoint.py index ae870a630e..7216b5e0e7 100644 --- a/api/controllers/console/workspace/endpoint.py +++ b/api/controllers/console/workspace/endpoint.py @@ -1,6 +1,6 @@ from flask_restx import Resource, fields, reqparse -from controllers.console import api, console_ns +from controllers.console import console_ns from controllers.console.wraps import account_initialization_required, is_admin_or_owner_required, setup_required from core.model_runtime.utils.encoders import jsonable_encoder from core.plugin.impl.exc import PluginPermissionDeniedError @@ -10,10 +10,10 @@ from services.plugin.endpoint_service import EndpointService @console_ns.route("/workspaces/current/endpoints/create") class EndpointCreateApi(Resource): - @api.doc("create_endpoint") - @api.doc(description="Create a new plugin endpoint") - @api.expect( - api.model( + @console_ns.doc("create_endpoint") + @console_ns.doc(description="Create a new plugin endpoint") + @console_ns.expect( + console_ns.model( "EndpointCreateRequest", { "plugin_unique_identifier": fields.String(required=True, description="Plugin unique identifier"), @@ -22,12 +22,12 @@ class EndpointCreateApi(Resource): }, ) ) - @api.response( + @console_ns.response( 200, "Endpoint created successfully", - api.model("EndpointCreateResponse", {"success": fields.Boolean(description="Operation success")}), + console_ns.model("EndpointCreateResponse", {"success": fields.Boolean(description="Operation success")}), ) - @api.response(403, "Admin privileges required") + @console_ns.response(403, "Admin privileges required") @setup_required @login_required @is_admin_or_owner_required @@ -63,17 +63,19 @@ class EndpointCreateApi(Resource): @console_ns.route("/workspaces/current/endpoints/list") class EndpointListApi(Resource): - @api.doc("list_endpoints") - @api.doc(description="List plugin endpoints with pagination") - @api.expect( - api.parser() + @console_ns.doc("list_endpoints") + @console_ns.doc(description="List plugin endpoints with pagination") + @console_ns.expect( + console_ns.parser() .add_argument("page", type=int, required=True, location="args", help="Page number") .add_argument("page_size", type=int, required=True, location="args", help="Page size") ) - @api.response( + @console_ns.response( 200, "Success", - api.model("EndpointListResponse", {"endpoints": fields.List(fields.Raw(description="Endpoint information"))}), + console_ns.model( + "EndpointListResponse", {"endpoints": fields.List(fields.Raw(description="Endpoint information"))} + ), ) @setup_required @login_required @@ -105,18 +107,18 @@ class EndpointListApi(Resource): @console_ns.route("/workspaces/current/endpoints/list/plugin") class EndpointListForSinglePluginApi(Resource): - @api.doc("list_plugin_endpoints") - @api.doc(description="List endpoints for a specific plugin") - @api.expect( - api.parser() + @console_ns.doc("list_plugin_endpoints") + @console_ns.doc(description="List endpoints for a specific plugin") + @console_ns.expect( + console_ns.parser() .add_argument("page", type=int, required=True, location="args", help="Page number") .add_argument("page_size", type=int, required=True, location="args", help="Page size") .add_argument("plugin_id", type=str, required=True, location="args", help="Plugin ID") ) - @api.response( + @console_ns.response( 200, "Success", - api.model( + console_ns.model( "PluginEndpointListResponse", {"endpoints": fields.List(fields.Raw(description="Endpoint information"))} ), ) @@ -153,17 +155,19 @@ class EndpointListForSinglePluginApi(Resource): @console_ns.route("/workspaces/current/endpoints/delete") class EndpointDeleteApi(Resource): - @api.doc("delete_endpoint") - @api.doc(description="Delete a plugin endpoint") - @api.expect( - api.model("EndpointDeleteRequest", {"endpoint_id": fields.String(required=True, description="Endpoint ID")}) + @console_ns.doc("delete_endpoint") + @console_ns.doc(description="Delete a plugin endpoint") + @console_ns.expect( + console_ns.model( + "EndpointDeleteRequest", {"endpoint_id": fields.String(required=True, description="Endpoint ID")} + ) ) - @api.response( + @console_ns.response( 200, "Endpoint deleted successfully", - api.model("EndpointDeleteResponse", {"success": fields.Boolean(description="Operation success")}), + console_ns.model("EndpointDeleteResponse", {"success": fields.Boolean(description="Operation success")}), ) - @api.response(403, "Admin privileges required") + @console_ns.response(403, "Admin privileges required") @setup_required @login_required @is_admin_or_owner_required @@ -183,10 +187,10 @@ class EndpointDeleteApi(Resource): @console_ns.route("/workspaces/current/endpoints/update") class EndpointUpdateApi(Resource): - @api.doc("update_endpoint") - @api.doc(description="Update a plugin endpoint") - @api.expect( - api.model( + @console_ns.doc("update_endpoint") + @console_ns.doc(description="Update a plugin endpoint") + @console_ns.expect( + console_ns.model( "EndpointUpdateRequest", { "endpoint_id": fields.String(required=True, description="Endpoint ID"), @@ -195,12 +199,12 @@ class EndpointUpdateApi(Resource): }, ) ) - @api.response( + @console_ns.response( 200, "Endpoint updated successfully", - api.model("EndpointUpdateResponse", {"success": fields.Boolean(description="Operation success")}), + console_ns.model("EndpointUpdateResponse", {"success": fields.Boolean(description="Operation success")}), ) - @api.response(403, "Admin privileges required") + @console_ns.response(403, "Admin privileges required") @setup_required @login_required @is_admin_or_owner_required @@ -233,17 +237,19 @@ class EndpointUpdateApi(Resource): @console_ns.route("/workspaces/current/endpoints/enable") class EndpointEnableApi(Resource): - @api.doc("enable_endpoint") - @api.doc(description="Enable a plugin endpoint") - @api.expect( - api.model("EndpointEnableRequest", {"endpoint_id": fields.String(required=True, description="Endpoint ID")}) + @console_ns.doc("enable_endpoint") + @console_ns.doc(description="Enable a plugin endpoint") + @console_ns.expect( + console_ns.model( + "EndpointEnableRequest", {"endpoint_id": fields.String(required=True, description="Endpoint ID")} + ) ) - @api.response( + @console_ns.response( 200, "Endpoint enabled successfully", - api.model("EndpointEnableResponse", {"success": fields.Boolean(description="Operation success")}), + console_ns.model("EndpointEnableResponse", {"success": fields.Boolean(description="Operation success")}), ) - @api.response(403, "Admin privileges required") + @console_ns.response(403, "Admin privileges required") @setup_required @login_required @is_admin_or_owner_required @@ -263,17 +269,19 @@ class EndpointEnableApi(Resource): @console_ns.route("/workspaces/current/endpoints/disable") class EndpointDisableApi(Resource): - @api.doc("disable_endpoint") - @api.doc(description="Disable a plugin endpoint") - @api.expect( - api.model("EndpointDisableRequest", {"endpoint_id": fields.String(required=True, description="Endpoint ID")}) + @console_ns.doc("disable_endpoint") + @console_ns.doc(description="Disable a plugin endpoint") + @console_ns.expect( + console_ns.model( + "EndpointDisableRequest", {"endpoint_id": fields.String(required=True, description="Endpoint ID")} + ) ) - @api.response( + @console_ns.response( 200, "Endpoint disabled successfully", - api.model("EndpointDisableResponse", {"success": fields.Boolean(description="Operation success")}), + console_ns.model("EndpointDisableResponse", {"success": fields.Boolean(description="Operation success")}), ) - @api.response(403, "Admin privileges required") + @console_ns.response(403, "Admin privileges required") @setup_required @login_required @is_admin_or_owner_required diff --git a/api/controllers/console/workspace/members.py b/api/controllers/console/workspace/members.py index 3ca453f1da..f17f8e4bcf 100644 --- a/api/controllers/console/workspace/members.py +++ b/api/controllers/console/workspace/members.py @@ -5,7 +5,7 @@ from flask_restx import Resource, marshal_with, reqparse import services from configs import dify_config -from controllers.console import api, console_ns +from controllers.console import console_ns from controllers.console.auth.error import ( CannotTransferOwnerToSelfError, EmailCodeError, @@ -60,7 +60,7 @@ parser_invite = ( class MemberInviteEmailApi(Resource): """Invite a new member by email.""" - @api.expect(parser_invite) + @console_ns.expect(parser_invite) @setup_required @login_required @account_initialization_required @@ -153,7 +153,7 @@ parser_update = reqparse.RequestParser().add_argument("role", type=str, required class MemberUpdateRoleApi(Resource): """Update member role.""" - @api.expect(parser_update) + @console_ns.expect(parser_update) @setup_required @login_required @account_initialization_required @@ -204,7 +204,7 @@ parser_send = reqparse.RequestParser().add_argument("language", type=str, requir class SendOwnerTransferEmailApi(Resource): """Send owner transfer email.""" - @api.expect(parser_send) + @console_ns.expect(parser_send) @setup_required @login_required @account_initialization_required @@ -247,7 +247,7 @@ parser_owner = ( @console_ns.route("/workspaces/current/members/owner-transfer-check") class OwnerTransferCheckApi(Resource): - @api.expect(parser_owner) + @console_ns.expect(parser_owner) @setup_required @login_required @account_initialization_required @@ -295,7 +295,7 @@ parser_owner_transfer = reqparse.RequestParser().add_argument( @console_ns.route("/workspaces/current/members//owner-transfer") class OwnerTransfer(Resource): - @api.expect(parser_owner_transfer) + @console_ns.expect(parser_owner_transfer) @setup_required @login_required @account_initialization_required diff --git a/api/controllers/console/workspace/model_providers.py b/api/controllers/console/workspace/model_providers.py index 05731b3832..8ca69121bf 100644 --- a/api/controllers/console/workspace/model_providers.py +++ b/api/controllers/console/workspace/model_providers.py @@ -3,7 +3,7 @@ import io from flask import send_file from flask_restx import Resource, reqparse -from controllers.console import api, console_ns +from controllers.console import console_ns from controllers.console.wraps import account_initialization_required, is_admin_or_owner_required, setup_required from core.model_runtime.entities.model_entities import ModelType from core.model_runtime.errors.validate import CredentialsValidateFailedError @@ -25,7 +25,7 @@ parser_model = reqparse.RequestParser().add_argument( @console_ns.route("/workspaces/current/model-providers") class ModelProviderListApi(Resource): - @api.expect(parser_model) + @console_ns.expect(parser_model) @setup_required @login_required @account_initialization_required @@ -64,7 +64,7 @@ parser_delete_cred = reqparse.RequestParser().add_argument( @console_ns.route("/workspaces/current/model-providers//credentials") class ModelProviderCredentialApi(Resource): - @api.expect(parser_cred) + @console_ns.expect(parser_cred) @setup_required @login_required @account_initialization_required @@ -81,7 +81,7 @@ class ModelProviderCredentialApi(Resource): return {"credentials": credentials} - @api.expect(parser_post_cred) + @console_ns.expect(parser_post_cred) @setup_required @login_required @is_admin_or_owner_required @@ -104,7 +104,7 @@ class ModelProviderCredentialApi(Resource): return {"result": "success"}, 201 - @api.expect(parser_put_cred) + @console_ns.expect(parser_put_cred) @setup_required @login_required @is_admin_or_owner_required @@ -129,7 +129,7 @@ class ModelProviderCredentialApi(Resource): return {"result": "success"} - @api.expect(parser_delete_cred) + @console_ns.expect(parser_delete_cred) @setup_required @login_required @is_admin_or_owner_required @@ -153,7 +153,7 @@ parser_switch = reqparse.RequestParser().add_argument( @console_ns.route("/workspaces/current/model-providers//credentials/switch") class ModelProviderCredentialSwitchApi(Resource): - @api.expect(parser_switch) + @console_ns.expect(parser_switch) @setup_required @login_required @is_admin_or_owner_required @@ -178,7 +178,7 @@ parser_validate = reqparse.RequestParser().add_argument( @console_ns.route("/workspaces/current/model-providers//credentials/validate") class ModelProviderValidateApi(Resource): - @api.expect(parser_validate) + @console_ns.expect(parser_validate) @setup_required @login_required @account_initialization_required @@ -240,7 +240,7 @@ parser_preferred = reqparse.RequestParser().add_argument( @console_ns.route("/workspaces/current/model-providers//preferred-provider-type") class PreferredProviderTypeUpdateApi(Resource): - @api.expect(parser_preferred) + @console_ns.expect(parser_preferred) @setup_required @login_required @is_admin_or_owner_required diff --git a/api/controllers/console/workspace/models.py b/api/controllers/console/workspace/models.py index 79079f692e..2aca73806a 100644 --- a/api/controllers/console/workspace/models.py +++ b/api/controllers/console/workspace/models.py @@ -2,7 +2,7 @@ import logging from flask_restx import Resource, reqparse -from controllers.console import api, console_ns +from controllers.console import console_ns from controllers.console.wraps import account_initialization_required, is_admin_or_owner_required, setup_required from core.model_runtime.entities.model_entities import ModelType from core.model_runtime.errors.validate import CredentialsValidateFailedError @@ -30,7 +30,7 @@ parser_post_default = reqparse.RequestParser().add_argument( @console_ns.route("/workspaces/current/default-model") class DefaultModelApi(Resource): - @api.expect(parser_get_default) + @console_ns.expect(parser_get_default) @setup_required @login_required @account_initialization_required @@ -46,7 +46,7 @@ class DefaultModelApi(Resource): return jsonable_encoder({"data": default_model_entity}) - @api.expect(parser_post_default) + @console_ns.expect(parser_post_default) @setup_required @login_required @is_admin_or_owner_required @@ -127,7 +127,7 @@ class ModelProviderModelApi(Resource): return jsonable_encoder({"data": models}) - @api.expect(parser_post_models) + @console_ns.expect(parser_post_models) @setup_required @login_required @is_admin_or_owner_required @@ -173,7 +173,7 @@ class ModelProviderModelApi(Resource): return {"result": "success"}, 200 - @api.expect(parser_delete_models) + @console_ns.expect(parser_delete_models) @setup_required @login_required @is_admin_or_owner_required @@ -253,7 +253,7 @@ parser_delete_cred = ( @console_ns.route("/workspaces/current/model-providers//models/credentials") class ModelProviderModelCredentialApi(Resource): - @api.expect(parser_get_credentials) + @console_ns.expect(parser_get_credentials) @setup_required @login_required @account_initialization_required @@ -304,7 +304,7 @@ class ModelProviderModelCredentialApi(Resource): } ) - @api.expect(parser_post_cred) + @console_ns.expect(parser_post_cred) @setup_required @login_required @is_admin_or_owner_required @@ -336,7 +336,7 @@ class ModelProviderModelCredentialApi(Resource): return {"result": "success"}, 201 - @api.expect(parser_put_cred) + @console_ns.expect(parser_put_cred) @setup_required @login_required @is_admin_or_owner_required @@ -362,7 +362,7 @@ class ModelProviderModelCredentialApi(Resource): return {"result": "success"} - @api.expect(parser_delete_cred) + @console_ns.expect(parser_delete_cred) @setup_required @login_required @is_admin_or_owner_required @@ -400,7 +400,7 @@ parser_switch = ( @console_ns.route("/workspaces/current/model-providers//models/credentials/switch") class ModelProviderModelCredentialSwitchApi(Resource): - @api.expect(parser_switch) + @console_ns.expect(parser_switch) @setup_required @login_required @is_admin_or_owner_required @@ -439,7 +439,7 @@ parser_model_enable_disable = ( "/workspaces/current/model-providers//models/enable", endpoint="model-provider-model-enable" ) class ModelProviderModelEnableApi(Resource): - @api.expect(parser_model_enable_disable) + @console_ns.expect(parser_model_enable_disable) @setup_required @login_required @account_initialization_required @@ -460,7 +460,7 @@ class ModelProviderModelEnableApi(Resource): "/workspaces/current/model-providers//models/disable", endpoint="model-provider-model-disable" ) class ModelProviderModelDisableApi(Resource): - @api.expect(parser_model_enable_disable) + @console_ns.expect(parser_model_enable_disable) @setup_required @login_required @account_initialization_required @@ -494,7 +494,7 @@ parser_validate = ( @console_ns.route("/workspaces/current/model-providers//models/credentials/validate") class ModelProviderModelValidateApi(Resource): - @api.expect(parser_validate) + @console_ns.expect(parser_validate) @setup_required @login_required @account_initialization_required @@ -535,7 +535,7 @@ parser_parameter = reqparse.RequestParser().add_argument( @console_ns.route("/workspaces/current/model-providers//models/parameter-rules") class ModelProviderModelParameterRuleApi(Resource): - @api.expect(parser_parameter) + @console_ns.expect(parser_parameter) @setup_required @login_required @account_initialization_required diff --git a/api/controllers/console/workspace/plugin.py b/api/controllers/console/workspace/plugin.py index deae418e96..e3345033f8 100644 --- a/api/controllers/console/workspace/plugin.py +++ b/api/controllers/console/workspace/plugin.py @@ -5,7 +5,7 @@ from flask_restx import Resource, reqparse from werkzeug.exceptions import Forbidden from configs import dify_config -from controllers.console import api, console_ns +from controllers.console import console_ns from controllers.console.workspace import plugin_permission_required from controllers.console.wraps import account_initialization_required, is_admin_or_owner_required, setup_required from core.model_runtime.utils.encoders import jsonable_encoder @@ -46,7 +46,7 @@ parser_list = ( @console_ns.route("/workspaces/current/plugin/list") class PluginListApi(Resource): - @api.expect(parser_list) + @console_ns.expect(parser_list) @setup_required @login_required @account_initialization_required @@ -66,7 +66,7 @@ parser_latest = reqparse.RequestParser().add_argument("plugin_ids", type=list, r @console_ns.route("/workspaces/current/plugin/list/latest-versions") class PluginListLatestVersionsApi(Resource): - @api.expect(parser_latest) + @console_ns.expect(parser_latest) @setup_required @login_required @account_initialization_required @@ -86,7 +86,7 @@ parser_ids = reqparse.RequestParser().add_argument("plugin_ids", type=list, requ @console_ns.route("/workspaces/current/plugin/list/installations/ids") class PluginListInstallationsFromIdsApi(Resource): - @api.expect(parser_ids) + @console_ns.expect(parser_ids) @setup_required @login_required @account_initialization_required @@ -112,7 +112,7 @@ parser_icon = ( @console_ns.route("/workspaces/current/plugin/icon") class PluginIconApi(Resource): - @api.expect(parser_icon) + @console_ns.expect(parser_icon) @setup_required def get(self): args = parser_icon.parse_args() @@ -181,7 +181,7 @@ parser_github = ( @console_ns.route("/workspaces/current/plugin/upload/github") class PluginUploadFromGithubApi(Resource): - @api.expect(parser_github) + @console_ns.expect(parser_github) @setup_required @login_required @account_initialization_required @@ -230,7 +230,7 @@ parser_pkg = reqparse.RequestParser().add_argument( @console_ns.route("/workspaces/current/plugin/install/pkg") class PluginInstallFromPkgApi(Resource): - @api.expect(parser_pkg) + @console_ns.expect(parser_pkg) @setup_required @login_required @account_initialization_required @@ -263,7 +263,7 @@ parser_githubapi = ( @console_ns.route("/workspaces/current/plugin/install/github") class PluginInstallFromGithubApi(Resource): - @api.expect(parser_githubapi) + @console_ns.expect(parser_githubapi) @setup_required @login_required @account_initialization_required @@ -294,7 +294,7 @@ parser_marketplace = reqparse.RequestParser().add_argument( @console_ns.route("/workspaces/current/plugin/install/marketplace") class PluginInstallFromMarketplaceApi(Resource): - @api.expect(parser_marketplace) + @console_ns.expect(parser_marketplace) @setup_required @login_required @account_initialization_required @@ -324,7 +324,7 @@ parser_pkgapi = reqparse.RequestParser().add_argument( @console_ns.route("/workspaces/current/plugin/marketplace/pkg") class PluginFetchMarketplacePkgApi(Resource): - @api.expect(parser_pkgapi) + @console_ns.expect(parser_pkgapi) @setup_required @login_required @account_initialization_required @@ -353,7 +353,7 @@ parser_fetch = reqparse.RequestParser().add_argument( @console_ns.route("/workspaces/current/plugin/fetch-manifest") class PluginFetchManifestApi(Resource): - @api.expect(parser_fetch) + @console_ns.expect(parser_fetch) @setup_required @login_required @account_initialization_required @@ -384,7 +384,7 @@ parser_tasks = ( @console_ns.route("/workspaces/current/plugin/tasks") class PluginFetchInstallTasksApi(Resource): - @api.expect(parser_tasks) + @console_ns.expect(parser_tasks) @setup_required @login_required @account_initialization_required @@ -471,7 +471,7 @@ parser_marketplace_api = ( @console_ns.route("/workspaces/current/plugin/upgrade/marketplace") class PluginUpgradeFromMarketplaceApi(Resource): - @api.expect(parser_marketplace_api) + @console_ns.expect(parser_marketplace_api) @setup_required @login_required @account_initialization_required @@ -503,7 +503,7 @@ parser_github_post = ( @console_ns.route("/workspaces/current/plugin/upgrade/github") class PluginUpgradeFromGithubApi(Resource): - @api.expect(parser_github_post) + @console_ns.expect(parser_github_post) @setup_required @login_required @account_initialization_required @@ -535,7 +535,7 @@ parser_uninstall = reqparse.RequestParser().add_argument( @console_ns.route("/workspaces/current/plugin/uninstall") class PluginUninstallApi(Resource): - @api.expect(parser_uninstall) + @console_ns.expect(parser_uninstall) @setup_required @login_required @account_initialization_required @@ -560,7 +560,7 @@ parser_change_post = ( @console_ns.route("/workspaces/current/plugin/permission/change") class PluginChangePermissionApi(Resource): - @api.expect(parser_change_post) + @console_ns.expect(parser_change_post) @setup_required @login_required @account_initialization_required @@ -618,7 +618,7 @@ parser_dynamic = ( @console_ns.route("/workspaces/current/plugin/parameters/dynamic-options") class PluginFetchDynamicSelectOptionsApi(Resource): - @api.expect(parser_dynamic) + @console_ns.expect(parser_dynamic) @setup_required @login_required @is_admin_or_owner_required @@ -655,7 +655,7 @@ parser_change = ( @console_ns.route("/workspaces/current/plugin/preferences/change") class PluginChangePreferencesApi(Resource): - @api.expect(parser_change) + @console_ns.expect(parser_change) @setup_required @login_required @account_initialization_required @@ -749,7 +749,7 @@ parser_exclude = reqparse.RequestParser().add_argument("plugin_id", type=str, re @console_ns.route("/workspaces/current/plugin/preferences/autoupgrade/exclude") class PluginAutoUpgradeExcludePluginApi(Resource): - @api.expect(parser_exclude) + @console_ns.expect(parser_exclude) @setup_required @login_required @account_initialization_required diff --git a/api/controllers/console/workspace/tool_providers.py b/api/controllers/console/workspace/tool_providers.py index 917059bb4c..2c54aa5a20 100644 --- a/api/controllers/console/workspace/tool_providers.py +++ b/api/controllers/console/workspace/tool_providers.py @@ -10,7 +10,7 @@ from sqlalchemy.orm import Session from werkzeug.exceptions import Forbidden from configs import dify_config -from controllers.console import api, console_ns +from controllers.console import console_ns from controllers.console.wraps import ( account_initialization_required, enterprise_license_required, @@ -65,7 +65,7 @@ parser_tool = reqparse.RequestParser().add_argument( @console_ns.route("/workspaces/current/tool-providers") class ToolProviderListApi(Resource): - @api.expect(parser_tool) + @console_ns.expect(parser_tool) @setup_required @login_required @account_initialization_required @@ -113,7 +113,7 @@ parser_delete = reqparse.RequestParser().add_argument( @console_ns.route("/workspaces/current/tool-provider/builtin//delete") class ToolBuiltinProviderDeleteApi(Resource): - @api.expect(parser_delete) + @console_ns.expect(parser_delete) @setup_required @login_required @is_admin_or_owner_required @@ -140,7 +140,7 @@ parser_add = ( @console_ns.route("/workspaces/current/tool-provider/builtin//add") class ToolBuiltinProviderAddApi(Resource): - @api.expect(parser_add) + @console_ns.expect(parser_add) @setup_required @login_required @account_initialization_required @@ -174,7 +174,7 @@ parser_update = ( @console_ns.route("/workspaces/current/tool-provider/builtin//update") class ToolBuiltinProviderUpdateApi(Resource): - @api.expect(parser_update) + @console_ns.expect(parser_update) @setup_required @login_required @is_admin_or_owner_required @@ -236,7 +236,7 @@ parser_api_add = ( @console_ns.route("/workspaces/current/tool-provider/api/add") class ToolApiProviderAddApi(Resource): - @api.expect(parser_api_add) + @console_ns.expect(parser_api_add) @setup_required @login_required @is_admin_or_owner_required @@ -267,7 +267,7 @@ parser_remote = reqparse.RequestParser().add_argument("url", type=str, required= @console_ns.route("/workspaces/current/tool-provider/api/remote") class ToolApiProviderGetRemoteSchemaApi(Resource): - @api.expect(parser_remote) + @console_ns.expect(parser_remote) @setup_required @login_required @account_initialization_required @@ -292,7 +292,7 @@ parser_tools = reqparse.RequestParser().add_argument( @console_ns.route("/workspaces/current/tool-provider/api/tools") class ToolApiProviderListToolsApi(Resource): - @api.expect(parser_tools) + @console_ns.expect(parser_tools) @setup_required @login_required @account_initialization_required @@ -328,7 +328,7 @@ parser_api_update = ( @console_ns.route("/workspaces/current/tool-provider/api/update") class ToolApiProviderUpdateApi(Resource): - @api.expect(parser_api_update) + @console_ns.expect(parser_api_update) @setup_required @login_required @is_admin_or_owner_required @@ -362,7 +362,7 @@ parser_api_delete = reqparse.RequestParser().add_argument( @console_ns.route("/workspaces/current/tool-provider/api/delete") class ToolApiProviderDeleteApi(Resource): - @api.expect(parser_api_delete) + @console_ns.expect(parser_api_delete) @setup_required @login_required @is_admin_or_owner_required @@ -386,7 +386,7 @@ parser_get = reqparse.RequestParser().add_argument("provider", type=str, require @console_ns.route("/workspaces/current/tool-provider/api/get") class ToolApiProviderGetApi(Resource): - @api.expect(parser_get) + @console_ns.expect(parser_get) @setup_required @login_required @account_initialization_required @@ -426,7 +426,7 @@ parser_schema = reqparse.RequestParser().add_argument( @console_ns.route("/workspaces/current/tool-provider/api/schema") class ToolApiProviderSchemaApi(Resource): - @api.expect(parser_schema) + @console_ns.expect(parser_schema) @setup_required @login_required @account_initialization_required @@ -451,7 +451,7 @@ parser_pre = ( @console_ns.route("/workspaces/current/tool-provider/api/test/pre") class ToolApiProviderPreviousTestApi(Resource): - @api.expect(parser_pre) + @console_ns.expect(parser_pre) @setup_required @login_required @account_initialization_required @@ -484,7 +484,7 @@ parser_create = ( @console_ns.route("/workspaces/current/tool-provider/workflow/create") class ToolWorkflowProviderCreateApi(Resource): - @api.expect(parser_create) + @console_ns.expect(parser_create) @setup_required @login_required @is_admin_or_owner_required @@ -525,7 +525,7 @@ parser_workflow_update = ( @console_ns.route("/workspaces/current/tool-provider/workflow/update") class ToolWorkflowProviderUpdateApi(Resource): - @api.expect(parser_workflow_update) + @console_ns.expect(parser_workflow_update) @setup_required @login_required @is_admin_or_owner_required @@ -560,7 +560,7 @@ parser_workflow_delete = reqparse.RequestParser().add_argument( @console_ns.route("/workspaces/current/tool-provider/workflow/delete") class ToolWorkflowProviderDeleteApi(Resource): - @api.expect(parser_workflow_delete) + @console_ns.expect(parser_workflow_delete) @setup_required @login_required @is_admin_or_owner_required @@ -588,7 +588,7 @@ parser_wf_get = ( @console_ns.route("/workspaces/current/tool-provider/workflow/get") class ToolWorkflowProviderGetApi(Resource): - @api.expect(parser_wf_get) + @console_ns.expect(parser_wf_get) @setup_required @login_required @account_initialization_required @@ -624,7 +624,7 @@ parser_wf_tools = reqparse.RequestParser().add_argument( @console_ns.route("/workspaces/current/tool-provider/workflow/tools") class ToolWorkflowProviderListToolApi(Resource): - @api.expect(parser_wf_tools) + @console_ns.expect(parser_wf_tools) @setup_required @login_required @account_initialization_required @@ -813,7 +813,7 @@ parser_default_cred = reqparse.RequestParser().add_argument( @console_ns.route("/workspaces/current/tool-provider/builtin//default-credential") class ToolBuiltinProviderSetDefaultApi(Resource): - @api.expect(parser_default_cred) + @console_ns.expect(parser_default_cred) @setup_required @login_required @account_initialization_required @@ -834,7 +834,7 @@ parser_custom = ( @console_ns.route("/workspaces/current/tool-provider/builtin//oauth/custom-client") class ToolOAuthCustomClient(Resource): - @api.expect(parser_custom) + @console_ns.expect(parser_custom) @setup_required @login_required @is_admin_or_owner_required @@ -932,7 +932,7 @@ parser_mcp_delete = reqparse.RequestParser().add_argument( @console_ns.route("/workspaces/current/tool-provider/mcp") class ToolProviderMCPApi(Resource): - @api.expect(parser_mcp) + @console_ns.expect(parser_mcp) @setup_required @login_required @account_initialization_required @@ -962,7 +962,7 @@ class ToolProviderMCPApi(Resource): ) return jsonable_encoder(result) - @api.expect(parser_mcp_put) + @console_ns.expect(parser_mcp_put) @setup_required @login_required @account_initialization_required @@ -1001,7 +1001,7 @@ class ToolProviderMCPApi(Resource): ) return {"result": "success"} - @api.expect(parser_mcp_delete) + @console_ns.expect(parser_mcp_delete) @setup_required @login_required @account_initialization_required @@ -1024,7 +1024,7 @@ parser_auth = ( @console_ns.route("/workspaces/current/tool-provider/mcp/auth") class ToolMCPAuthApi(Resource): - @api.expect(parser_auth) + @console_ns.expect(parser_auth) @setup_required @login_required @account_initialization_required @@ -1142,7 +1142,7 @@ parser_cb = ( @console_ns.route("/mcp/oauth/callback") class ToolMCPCallbackApi(Resource): - @api.expect(parser_cb) + @console_ns.expect(parser_cb) def get(self): args = parser_cb.parse_args() state_key = args["state"] diff --git a/api/controllers/console/workspace/trigger_providers.py b/api/controllers/console/workspace/trigger_providers.py index b2abae0b3d..1bcd80c1a5 100644 --- a/api/controllers/console/workspace/trigger_providers.py +++ b/api/controllers/console/workspace/trigger_providers.py @@ -6,7 +6,7 @@ from sqlalchemy.orm import Session from werkzeug.exceptions import BadRequest, Forbidden from configs import dify_config -from controllers.console import api +from controllers.console import console_ns from controllers.console.wraps import account_initialization_required, is_admin_or_owner_required, setup_required from controllers.web.error import NotFoundError from core.model_runtime.utils.encoders import jsonable_encoder @@ -539,45 +539,49 @@ class TriggerOAuthClientManageApi(Resource): # Trigger Subscription -api.add_resource(TriggerProviderIconApi, "/workspaces/current/trigger-provider//icon") -api.add_resource(TriggerProviderListApi, "/workspaces/current/triggers") -api.add_resource(TriggerProviderInfoApi, "/workspaces/current/trigger-provider//info") -api.add_resource(TriggerSubscriptionListApi, "/workspaces/current/trigger-provider//subscriptions/list") -api.add_resource( +console_ns.add_resource(TriggerProviderIconApi, "/workspaces/current/trigger-provider//icon") +console_ns.add_resource(TriggerProviderListApi, "/workspaces/current/triggers") +console_ns.add_resource(TriggerProviderInfoApi, "/workspaces/current/trigger-provider//info") +console_ns.add_resource( + TriggerSubscriptionListApi, "/workspaces/current/trigger-provider//subscriptions/list" +) +console_ns.add_resource( TriggerSubscriptionDeleteApi, "/workspaces/current/trigger-provider//subscriptions/delete", ) # Trigger Subscription Builder -api.add_resource( +console_ns.add_resource( TriggerSubscriptionBuilderCreateApi, "/workspaces/current/trigger-provider//subscriptions/builder/create", ) -api.add_resource( +console_ns.add_resource( TriggerSubscriptionBuilderGetApi, "/workspaces/current/trigger-provider//subscriptions/builder/", ) -api.add_resource( +console_ns.add_resource( TriggerSubscriptionBuilderUpdateApi, "/workspaces/current/trigger-provider//subscriptions/builder/update/", ) -api.add_resource( +console_ns.add_resource( TriggerSubscriptionBuilderVerifyApi, "/workspaces/current/trigger-provider//subscriptions/builder/verify/", ) -api.add_resource( +console_ns.add_resource( TriggerSubscriptionBuilderBuildApi, "/workspaces/current/trigger-provider//subscriptions/builder/build/", ) -api.add_resource( +console_ns.add_resource( TriggerSubscriptionBuilderLogsApi, "/workspaces/current/trigger-provider//subscriptions/builder/logs/", ) # OAuth -api.add_resource( +console_ns.add_resource( TriggerOAuthAuthorizeApi, "/workspaces/current/trigger-provider//subscriptions/oauth/authorize" ) -api.add_resource(TriggerOAuthCallbackApi, "/oauth/plugin//trigger/callback") -api.add_resource(TriggerOAuthClientManageApi, "/workspaces/current/trigger-provider//oauth/client") +console_ns.add_resource(TriggerOAuthCallbackApi, "/oauth/plugin//trigger/callback") +console_ns.add_resource( + TriggerOAuthClientManageApi, "/workspaces/current/trigger-provider//oauth/client" +) diff --git a/api/controllers/console/workspace/workspace.py b/api/controllers/console/workspace/workspace.py index 1548a18b90..37c7dc3040 100644 --- a/api/controllers/console/workspace/workspace.py +++ b/api/controllers/console/workspace/workspace.py @@ -13,7 +13,7 @@ from controllers.common.errors import ( TooManyFilesError, UnsupportedFileTypeError, ) -from controllers.console import api, console_ns +from controllers.console import console_ns from controllers.console.admin import admin_required from controllers.console.error import AccountNotLinkTenantError from controllers.console.wraps import ( @@ -155,7 +155,7 @@ parser_switch = reqparse.RequestParser().add_argument("tenant_id", type=str, req @console_ns.route("/workspaces/switch") class SwitchWorkspaceApi(Resource): - @api.expect(parser_switch) + @console_ns.expect(parser_switch) @setup_required @login_required @account_initialization_required @@ -250,7 +250,7 @@ parser_info = reqparse.RequestParser().add_argument("name", type=str, required=T @console_ns.route("/workspaces/info") class WorkspaceInfoApi(Resource): - @api.expect(parser_info) + @console_ns.expect(parser_info) @setup_required @login_required @account_initialization_required From 3841e8578f387f7d38b2907666a3793eaa65cb1c Mon Sep 17 00:00:00 2001 From: Chen Jiaju <619507631@qq.com> Date: Mon, 24 Nov 2025 10:08:26 +0800 Subject: [PATCH 06/22] fix: use default values for optional workflow input variables (#28546) (#28527) Co-authored-by: Claude --- api/core/app/app_config/entities.py | 1 + api/core/app/apps/base_app_generator.py | 6 +- .../core/app/apps/test_base_app_generator.py | 215 ++++++++++++++++++ 3 files changed, 221 insertions(+), 1 deletion(-) diff --git a/api/core/app/app_config/entities.py b/api/core/app/app_config/entities.py index e836a46f8f..2aa36ddc49 100644 --- a/api/core/app/app_config/entities.py +++ b/api/core/app/app_config/entities.py @@ -112,6 +112,7 @@ class VariableEntity(BaseModel): type: VariableEntityType required: bool = False hide: bool = False + default: Any = None max_length: int | None = None options: Sequence[str] = Field(default_factory=list) allowed_file_types: Sequence[FileType] | None = Field(default_factory=list) diff --git a/api/core/app/apps/base_app_generator.py b/api/core/app/apps/base_app_generator.py index 01d025aca8..85be05fb69 100644 --- a/api/core/app/apps/base_app_generator.py +++ b/api/core/app/apps/base_app_generator.py @@ -93,7 +93,11 @@ class BaseAppGenerator: if value is None: if variable_entity.required: raise ValueError(f"{variable_entity.variable} is required in input form") - return value + # Use default value and continue validation to ensure type conversion + value = variable_entity.default + # If default is also None, return None directly + if value is None: + return None if variable_entity.type in { VariableEntityType.TEXT_INPUT, diff --git a/api/tests/unit_tests/core/app/apps/test_base_app_generator.py b/api/tests/unit_tests/core/app/apps/test_base_app_generator.py index a6bf43ab0c..fdab39f133 100644 --- a/api/tests/unit_tests/core/app/apps/test_base_app_generator.py +++ b/api/tests/unit_tests/core/app/apps/test_base_app_generator.py @@ -50,3 +50,218 @@ def test_validate_input_with_none_for_required_variable(): ) assert str(exc_info.value) == "test_var is required in input form" + + +def test_validate_inputs_with_default_value(): + """Test that default values are used when input is None for optional variables""" + base_app_generator = BaseAppGenerator() + + # Test with string default value for TEXT_INPUT + var_string = VariableEntity( + variable="test_var", + label="test_var", + type=VariableEntityType.TEXT_INPUT, + required=False, + default="default_string", + ) + + result = base_app_generator._validate_inputs( + variable_entity=var_string, + value=None, + ) + + assert result == "default_string" + + # Test with string default value for PARAGRAPH + var_paragraph = VariableEntity( + variable="test_paragraph", + label="test_paragraph", + type=VariableEntityType.PARAGRAPH, + required=False, + default="default paragraph text", + ) + + result = base_app_generator._validate_inputs( + variable_entity=var_paragraph, + value=None, + ) + + assert result == "default paragraph text" + + # Test with SELECT default value + var_select = VariableEntity( + variable="test_select", + label="test_select", + type=VariableEntityType.SELECT, + required=False, + default="option1", + options=["option1", "option2", "option3"], + ) + + result = base_app_generator._validate_inputs( + variable_entity=var_select, + value=None, + ) + + assert result == "option1" + + # Test with number default value (int) + var_number_int = VariableEntity( + variable="test_number_int", + label="test_number_int", + type=VariableEntityType.NUMBER, + required=False, + default=42, + ) + + result = base_app_generator._validate_inputs( + variable_entity=var_number_int, + value=None, + ) + + assert result == 42 + + # Test with number default value (float) + var_number_float = VariableEntity( + variable="test_number_float", + label="test_number_float", + type=VariableEntityType.NUMBER, + required=False, + default=3.14, + ) + + result = base_app_generator._validate_inputs( + variable_entity=var_number_float, + value=None, + ) + + assert result == 3.14 + + # Test with number default value as string (frontend sends as string) + var_number_string = VariableEntity( + variable="test_number_string", + label="test_number_string", + type=VariableEntityType.NUMBER, + required=False, + default="123", + ) + + result = base_app_generator._validate_inputs( + variable_entity=var_number_string, + value=None, + ) + + assert result == 123 + assert isinstance(result, int) + + # Test with float number default value as string + var_number_float_string = VariableEntity( + variable="test_number_float_string", + label="test_number_float_string", + type=VariableEntityType.NUMBER, + required=False, + default="45.67", + ) + + result = base_app_generator._validate_inputs( + variable_entity=var_number_float_string, + value=None, + ) + + assert result == 45.67 + assert isinstance(result, float) + + # Test with CHECKBOX default value (bool) + var_checkbox_true = VariableEntity( + variable="test_checkbox_true", + label="test_checkbox_true", + type=VariableEntityType.CHECKBOX, + required=False, + default=True, + ) + + result = base_app_generator._validate_inputs( + variable_entity=var_checkbox_true, + value=None, + ) + + assert result is True + + var_checkbox_false = VariableEntity( + variable="test_checkbox_false", + label="test_checkbox_false", + type=VariableEntityType.CHECKBOX, + required=False, + default=False, + ) + + result = base_app_generator._validate_inputs( + variable_entity=var_checkbox_false, + value=None, + ) + + assert result is False + + # Test with None as explicit default value + var_none_default = VariableEntity( + variable="test_none", + label="test_none", + type=VariableEntityType.TEXT_INPUT, + required=False, + default=None, + ) + + result = base_app_generator._validate_inputs( + variable_entity=var_none_default, + value=None, + ) + + assert result is None + + # Test that actual input value takes precedence over default + result = base_app_generator._validate_inputs( + variable_entity=var_string, + value="actual_value", + ) + + assert result == "actual_value" + + # Test that actual number input takes precedence over default + result = base_app_generator._validate_inputs( + variable_entity=var_number_int, + value=999, + ) + + assert result == 999 + + # Test with FILE default value (dict format from frontend) + var_file = VariableEntity( + variable="test_file", + label="test_file", + type=VariableEntityType.FILE, + required=False, + default={"id": "file123", "name": "default.pdf"}, + ) + + result = base_app_generator._validate_inputs( + variable_entity=var_file, + value=None, + ) + + assert result == {"id": "file123", "name": "default.pdf"} + + # Test with FILE_LIST default value (list of dicts) + var_file_list = VariableEntity( + variable="test_file_list", + label="test_file_list", + type=VariableEntityType.FILE_LIST, + required=False, + default=[{"id": "file1", "name": "doc1.pdf"}, {"id": "file2", "name": "doc2.pdf"}], + ) + + result = base_app_generator._validate_inputs( + variable_entity=var_file_list, + value=None, + ) + + assert result == [{"id": "file1", "name": "doc1.pdf"}, {"id": "file2", "name": "doc2.pdf"}] From a39b151d5fcca97e9fa653b985b67b2595ff5dbd Mon Sep 17 00:00:00 2001 From: aka James4u Date: Sun, 23 Nov 2025 18:08:43 -0800 Subject: [PATCH 07/22] =?UTF-8?q?feat:=20add=20comprehensive=20unit=20test?= =?UTF-8?q?s=20for=20dataset=20service=20retrieval/list=E2=80=A6=20(#28539?= =?UTF-8?q?)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> --- .../test_dataset_service_retrieval.py | 746 ++++++++++++++++++ 1 file changed, 746 insertions(+) create mode 100644 api/tests/unit_tests/services/test_dataset_service_retrieval.py diff --git a/api/tests/unit_tests/services/test_dataset_service_retrieval.py b/api/tests/unit_tests/services/test_dataset_service_retrieval.py new file mode 100644 index 0000000000..caf02c159f --- /dev/null +++ b/api/tests/unit_tests/services/test_dataset_service_retrieval.py @@ -0,0 +1,746 @@ +""" +Comprehensive unit tests for DatasetService retrieval/list methods. + +This test suite covers: +- get_datasets - pagination, search, filtering, permissions +- get_dataset - single dataset retrieval +- get_datasets_by_ids - bulk retrieval +- get_process_rules - dataset processing rules +- get_dataset_queries - dataset query history +- get_related_apps - apps using the dataset +""" + +from unittest.mock import Mock, create_autospec, patch +from uuid import uuid4 + +import pytest + +from models.account import Account, TenantAccountRole +from models.dataset import ( + AppDatasetJoin, + Dataset, + DatasetPermission, + DatasetPermissionEnum, + DatasetProcessRule, + DatasetQuery, +) +from services.dataset_service import DatasetService, DocumentService + + +class DatasetRetrievalTestDataFactory: + """Factory class for creating test data and mock objects for dataset retrieval tests.""" + + @staticmethod + def create_dataset_mock( + dataset_id: str = "dataset-123", + name: str = "Test Dataset", + tenant_id: str = "tenant-123", + created_by: str = "user-123", + permission: DatasetPermissionEnum = DatasetPermissionEnum.ONLY_ME, + **kwargs, + ) -> Mock: + """Create a mock dataset with specified attributes.""" + dataset = Mock(spec=Dataset) + dataset.id = dataset_id + dataset.name = name + dataset.tenant_id = tenant_id + dataset.created_by = created_by + dataset.permission = permission + for key, value in kwargs.items(): + setattr(dataset, key, value) + return dataset + + @staticmethod + def create_account_mock( + account_id: str = "account-123", + tenant_id: str = "tenant-123", + role: TenantAccountRole = TenantAccountRole.NORMAL, + **kwargs, + ) -> Mock: + """Create a mock account.""" + account = create_autospec(Account, instance=True) + account.id = account_id + account.current_tenant_id = tenant_id + account.current_role = role + for key, value in kwargs.items(): + setattr(account, key, value) + return account + + @staticmethod + def create_dataset_permission_mock( + dataset_id: str = "dataset-123", + account_id: str = "account-123", + **kwargs, + ) -> Mock: + """Create a mock dataset permission.""" + permission = Mock(spec=DatasetPermission) + permission.dataset_id = dataset_id + permission.account_id = account_id + for key, value in kwargs.items(): + setattr(permission, key, value) + return permission + + @staticmethod + def create_process_rule_mock( + dataset_id: str = "dataset-123", + mode: str = "automatic", + rules: dict | None = None, + **kwargs, + ) -> Mock: + """Create a mock dataset process rule.""" + process_rule = Mock(spec=DatasetProcessRule) + process_rule.dataset_id = dataset_id + process_rule.mode = mode + process_rule.rules_dict = rules or {} + for key, value in kwargs.items(): + setattr(process_rule, key, value) + return process_rule + + @staticmethod + def create_dataset_query_mock( + dataset_id: str = "dataset-123", + query_id: str = "query-123", + **kwargs, + ) -> Mock: + """Create a mock dataset query.""" + dataset_query = Mock(spec=DatasetQuery) + dataset_query.id = query_id + dataset_query.dataset_id = dataset_id + for key, value in kwargs.items(): + setattr(dataset_query, key, value) + return dataset_query + + @staticmethod + def create_app_dataset_join_mock( + app_id: str = "app-123", + dataset_id: str = "dataset-123", + **kwargs, + ) -> Mock: + """Create a mock app-dataset join.""" + join = Mock(spec=AppDatasetJoin) + join.app_id = app_id + join.dataset_id = dataset_id + for key, value in kwargs.items(): + setattr(join, key, value) + return join + + +class TestDatasetServiceGetDatasets: + """ + Comprehensive unit tests for DatasetService.get_datasets method. + + This test suite covers: + - Pagination + - Search functionality + - Tag filtering + - Permission-based filtering (ONLY_ME, ALL_TEAM, PARTIAL_TEAM) + - Role-based filtering (OWNER, DATASET_OPERATOR, NORMAL) + - include_all flag + """ + + @pytest.fixture + def mock_dependencies(self): + """Common mock setup for get_datasets tests.""" + with ( + patch("services.dataset_service.db.session") as mock_db, + patch("services.dataset_service.db.paginate") as mock_paginate, + patch("services.dataset_service.TagService") as mock_tag_service, + ): + yield { + "db_session": mock_db, + "paginate": mock_paginate, + "tag_service": mock_tag_service, + } + + # ==================== Basic Retrieval Tests ==================== + + def test_get_datasets_basic_pagination(self, mock_dependencies): + """Test basic pagination without user or filters.""" + # Arrange + tenant_id = str(uuid4()) + page = 1 + per_page = 20 + + # Mock pagination result + mock_paginate_result = Mock() + mock_paginate_result.items = [ + DatasetRetrievalTestDataFactory.create_dataset_mock( + dataset_id=f"dataset-{i}", name=f"Dataset {i}", tenant_id=tenant_id + ) + for i in range(5) + ] + mock_paginate_result.total = 5 + mock_dependencies["paginate"].return_value = mock_paginate_result + + # Act + datasets, total = DatasetService.get_datasets(page, per_page, tenant_id=tenant_id) + + # Assert + assert len(datasets) == 5 + assert total == 5 + mock_dependencies["paginate"].assert_called_once() + + def test_get_datasets_with_search(self, mock_dependencies): + """Test get_datasets with search keyword.""" + # Arrange + tenant_id = str(uuid4()) + page = 1 + per_page = 20 + search = "test" + + # Mock pagination result + mock_paginate_result = Mock() + mock_paginate_result.items = [ + DatasetRetrievalTestDataFactory.create_dataset_mock( + dataset_id="dataset-1", name="Test Dataset", tenant_id=tenant_id + ) + ] + mock_paginate_result.total = 1 + mock_dependencies["paginate"].return_value = mock_paginate_result + + # Act + datasets, total = DatasetService.get_datasets(page, per_page, tenant_id=tenant_id, search=search) + + # Assert + assert len(datasets) == 1 + assert total == 1 + mock_dependencies["paginate"].assert_called_once() + + def test_get_datasets_with_tag_filtering(self, mock_dependencies): + """Test get_datasets with tag_ids filtering.""" + # Arrange + tenant_id = str(uuid4()) + page = 1 + per_page = 20 + tag_ids = ["tag-1", "tag-2"] + + # Mock tag service + target_ids = ["dataset-1", "dataset-2"] + mock_dependencies["tag_service"].get_target_ids_by_tag_ids.return_value = target_ids + + # Mock pagination result + mock_paginate_result = Mock() + mock_paginate_result.items = [ + DatasetRetrievalTestDataFactory.create_dataset_mock(dataset_id=dataset_id, tenant_id=tenant_id) + for dataset_id in target_ids + ] + mock_paginate_result.total = 2 + mock_dependencies["paginate"].return_value = mock_paginate_result + + # Act + datasets, total = DatasetService.get_datasets(page, per_page, tenant_id=tenant_id, tag_ids=tag_ids) + + # Assert + assert len(datasets) == 2 + assert total == 2 + mock_dependencies["tag_service"].get_target_ids_by_tag_ids.assert_called_once_with( + "knowledge", tenant_id, tag_ids + ) + + def test_get_datasets_with_empty_tag_ids(self, mock_dependencies): + """Test get_datasets with empty tag_ids skips tag filtering and returns all matching datasets.""" + # Arrange + tenant_id = str(uuid4()) + page = 1 + per_page = 20 + tag_ids = [] + + # Mock pagination result - when tag_ids is empty, tag filtering is skipped + mock_paginate_result = Mock() + mock_paginate_result.items = [ + DatasetRetrievalTestDataFactory.create_dataset_mock(dataset_id=f"dataset-{i}", tenant_id=tenant_id) + for i in range(3) + ] + mock_paginate_result.total = 3 + mock_dependencies["paginate"].return_value = mock_paginate_result + + # Act + datasets, total = DatasetService.get_datasets(page, per_page, tenant_id=tenant_id, tag_ids=tag_ids) + + # Assert + # When tag_ids is empty, tag filtering is skipped, so normal query results are returned + assert len(datasets) == 3 + assert total == 3 + # Tag service should not be called when tag_ids is empty + mock_dependencies["tag_service"].get_target_ids_by_tag_ids.assert_not_called() + mock_dependencies["paginate"].assert_called_once() + + # ==================== Permission-Based Filtering Tests ==================== + + def test_get_datasets_without_user_shows_only_all_team(self, mock_dependencies): + """Test that without user, only ALL_TEAM datasets are shown.""" + # Arrange + tenant_id = str(uuid4()) + page = 1 + per_page = 20 + + # Mock pagination result + mock_paginate_result = Mock() + mock_paginate_result.items = [ + DatasetRetrievalTestDataFactory.create_dataset_mock( + dataset_id="dataset-1", + tenant_id=tenant_id, + permission=DatasetPermissionEnum.ALL_TEAM, + ) + ] + mock_paginate_result.total = 1 + mock_dependencies["paginate"].return_value = mock_paginate_result + + # Act + datasets, total = DatasetService.get_datasets(page, per_page, tenant_id=tenant_id, user=None) + + # Assert + assert len(datasets) == 1 + mock_dependencies["paginate"].assert_called_once() + + def test_get_datasets_owner_with_include_all(self, mock_dependencies): + """Test that OWNER with include_all=True sees all datasets.""" + # Arrange + tenant_id = str(uuid4()) + user = DatasetRetrievalTestDataFactory.create_account_mock( + account_id="owner-123", tenant_id=tenant_id, role=TenantAccountRole.OWNER + ) + + # Mock dataset permissions query (empty - owner doesn't need explicit permissions) + mock_query = Mock() + mock_query.filter_by.return_value.all.return_value = [] + mock_dependencies["db_session"].query.return_value = mock_query + + # Mock pagination result + mock_paginate_result = Mock() + mock_paginate_result.items = [ + DatasetRetrievalTestDataFactory.create_dataset_mock(dataset_id=f"dataset-{i}", tenant_id=tenant_id) + for i in range(3) + ] + mock_paginate_result.total = 3 + mock_dependencies["paginate"].return_value = mock_paginate_result + + # Act + datasets, total = DatasetService.get_datasets( + page=1, per_page=20, tenant_id=tenant_id, user=user, include_all=True + ) + + # Assert + assert len(datasets) == 3 + assert total == 3 + + def test_get_datasets_normal_user_only_me_permission(self, mock_dependencies): + """Test that normal user sees ONLY_ME datasets they created.""" + # Arrange + tenant_id = str(uuid4()) + user_id = "user-123" + user = DatasetRetrievalTestDataFactory.create_account_mock( + account_id=user_id, tenant_id=tenant_id, role=TenantAccountRole.NORMAL + ) + + # Mock dataset permissions query (no explicit permissions) + mock_query = Mock() + mock_query.filter_by.return_value.all.return_value = [] + mock_dependencies["db_session"].query.return_value = mock_query + + # Mock pagination result + mock_paginate_result = Mock() + mock_paginate_result.items = [ + DatasetRetrievalTestDataFactory.create_dataset_mock( + dataset_id="dataset-1", + tenant_id=tenant_id, + created_by=user_id, + permission=DatasetPermissionEnum.ONLY_ME, + ) + ] + mock_paginate_result.total = 1 + mock_dependencies["paginate"].return_value = mock_paginate_result + + # Act + datasets, total = DatasetService.get_datasets(page=1, per_page=20, tenant_id=tenant_id, user=user) + + # Assert + assert len(datasets) == 1 + assert total == 1 + + def test_get_datasets_normal_user_all_team_permission(self, mock_dependencies): + """Test that normal user sees ALL_TEAM datasets.""" + # Arrange + tenant_id = str(uuid4()) + user = DatasetRetrievalTestDataFactory.create_account_mock( + account_id="user-123", tenant_id=tenant_id, role=TenantAccountRole.NORMAL + ) + + # Mock dataset permissions query (no explicit permissions) + mock_query = Mock() + mock_query.filter_by.return_value.all.return_value = [] + mock_dependencies["db_session"].query.return_value = mock_query + + # Mock pagination result + mock_paginate_result = Mock() + mock_paginate_result.items = [ + DatasetRetrievalTestDataFactory.create_dataset_mock( + dataset_id="dataset-1", + tenant_id=tenant_id, + permission=DatasetPermissionEnum.ALL_TEAM, + ) + ] + mock_paginate_result.total = 1 + mock_dependencies["paginate"].return_value = mock_paginate_result + + # Act + datasets, total = DatasetService.get_datasets(page=1, per_page=20, tenant_id=tenant_id, user=user) + + # Assert + assert len(datasets) == 1 + assert total == 1 + + def test_get_datasets_normal_user_partial_team_with_permission(self, mock_dependencies): + """Test that normal user sees PARTIAL_TEAM datasets they have permission for.""" + # Arrange + tenant_id = str(uuid4()) + user_id = "user-123" + dataset_id = "dataset-1" + user = DatasetRetrievalTestDataFactory.create_account_mock( + account_id=user_id, tenant_id=tenant_id, role=TenantAccountRole.NORMAL + ) + + # Mock dataset permissions query - user has permission + permission = DatasetRetrievalTestDataFactory.create_dataset_permission_mock( + dataset_id=dataset_id, account_id=user_id + ) + mock_query = Mock() + mock_query.filter_by.return_value.all.return_value = [permission] + mock_dependencies["db_session"].query.return_value = mock_query + + # Mock pagination result + mock_paginate_result = Mock() + mock_paginate_result.items = [ + DatasetRetrievalTestDataFactory.create_dataset_mock( + dataset_id=dataset_id, + tenant_id=tenant_id, + permission=DatasetPermissionEnum.PARTIAL_TEAM, + ) + ] + mock_paginate_result.total = 1 + mock_dependencies["paginate"].return_value = mock_paginate_result + + # Act + datasets, total = DatasetService.get_datasets(page=1, per_page=20, tenant_id=tenant_id, user=user) + + # Assert + assert len(datasets) == 1 + assert total == 1 + + def test_get_datasets_dataset_operator_with_permissions(self, mock_dependencies): + """Test that DATASET_OPERATOR only sees datasets they have explicit permission for.""" + # Arrange + tenant_id = str(uuid4()) + user_id = "operator-123" + dataset_id = "dataset-1" + user = DatasetRetrievalTestDataFactory.create_account_mock( + account_id=user_id, tenant_id=tenant_id, role=TenantAccountRole.DATASET_OPERATOR + ) + + # Mock dataset permissions query - operator has permission + permission = DatasetRetrievalTestDataFactory.create_dataset_permission_mock( + dataset_id=dataset_id, account_id=user_id + ) + mock_query = Mock() + mock_query.filter_by.return_value.all.return_value = [permission] + mock_dependencies["db_session"].query.return_value = mock_query + + # Mock pagination result + mock_paginate_result = Mock() + mock_paginate_result.items = [ + DatasetRetrievalTestDataFactory.create_dataset_mock(dataset_id=dataset_id, tenant_id=tenant_id) + ] + mock_paginate_result.total = 1 + mock_dependencies["paginate"].return_value = mock_paginate_result + + # Act + datasets, total = DatasetService.get_datasets(page=1, per_page=20, tenant_id=tenant_id, user=user) + + # Assert + assert len(datasets) == 1 + assert total == 1 + + def test_get_datasets_dataset_operator_without_permissions(self, mock_dependencies): + """Test that DATASET_OPERATOR without permissions returns empty result.""" + # Arrange + tenant_id = str(uuid4()) + user_id = "operator-123" + user = DatasetRetrievalTestDataFactory.create_account_mock( + account_id=user_id, tenant_id=tenant_id, role=TenantAccountRole.DATASET_OPERATOR + ) + + # Mock dataset permissions query - no permissions + mock_query = Mock() + mock_query.filter_by.return_value.all.return_value = [] + mock_dependencies["db_session"].query.return_value = mock_query + + # Act + datasets, total = DatasetService.get_datasets(page=1, per_page=20, tenant_id=tenant_id, user=user) + + # Assert + assert datasets == [] + assert total == 0 + + +class TestDatasetServiceGetDataset: + """Comprehensive unit tests for DatasetService.get_dataset method.""" + + @pytest.fixture + def mock_dependencies(self): + """Common mock setup for get_dataset tests.""" + with patch("services.dataset_service.db.session") as mock_db: + yield {"db_session": mock_db} + + def test_get_dataset_success(self, mock_dependencies): + """Test successful retrieval of a single dataset.""" + # Arrange + dataset_id = str(uuid4()) + dataset = DatasetRetrievalTestDataFactory.create_dataset_mock(dataset_id=dataset_id) + + # Mock database query + mock_query = Mock() + mock_query.filter_by.return_value.first.return_value = dataset + mock_dependencies["db_session"].query.return_value = mock_query + + # Act + result = DatasetService.get_dataset(dataset_id) + + # Assert + assert result is not None + assert result.id == dataset_id + mock_query.filter_by.assert_called_once_with(id=dataset_id) + + def test_get_dataset_not_found(self, mock_dependencies): + """Test retrieval when dataset doesn't exist.""" + # Arrange + dataset_id = str(uuid4()) + + # Mock database query returning None + mock_query = Mock() + mock_query.filter_by.return_value.first.return_value = None + mock_dependencies["db_session"].query.return_value = mock_query + + # Act + result = DatasetService.get_dataset(dataset_id) + + # Assert + assert result is None + + +class TestDatasetServiceGetDatasetsByIds: + """Comprehensive unit tests for DatasetService.get_datasets_by_ids method.""" + + @pytest.fixture + def mock_dependencies(self): + """Common mock setup for get_datasets_by_ids tests.""" + with patch("services.dataset_service.db.paginate") as mock_paginate: + yield {"paginate": mock_paginate} + + def test_get_datasets_by_ids_success(self, mock_dependencies): + """Test successful bulk retrieval of datasets by IDs.""" + # Arrange + tenant_id = str(uuid4()) + dataset_ids = [str(uuid4()), str(uuid4()), str(uuid4())] + + # Mock pagination result + mock_paginate_result = Mock() + mock_paginate_result.items = [ + DatasetRetrievalTestDataFactory.create_dataset_mock(dataset_id=dataset_id, tenant_id=tenant_id) + for dataset_id in dataset_ids + ] + mock_paginate_result.total = len(dataset_ids) + mock_dependencies["paginate"].return_value = mock_paginate_result + + # Act + datasets, total = DatasetService.get_datasets_by_ids(dataset_ids, tenant_id) + + # Assert + assert len(datasets) == 3 + assert total == 3 + assert all(dataset.id in dataset_ids for dataset in datasets) + mock_dependencies["paginate"].assert_called_once() + + def test_get_datasets_by_ids_empty_list(self, mock_dependencies): + """Test get_datasets_by_ids with empty list returns empty result.""" + # Arrange + tenant_id = str(uuid4()) + dataset_ids = [] + + # Act + datasets, total = DatasetService.get_datasets_by_ids(dataset_ids, tenant_id) + + # Assert + assert datasets == [] + assert total == 0 + mock_dependencies["paginate"].assert_not_called() + + def test_get_datasets_by_ids_none_list(self, mock_dependencies): + """Test get_datasets_by_ids with None returns empty result.""" + # Arrange + tenant_id = str(uuid4()) + + # Act + datasets, total = DatasetService.get_datasets_by_ids(None, tenant_id) + + # Assert + assert datasets == [] + assert total == 0 + mock_dependencies["paginate"].assert_not_called() + + +class TestDatasetServiceGetProcessRules: + """Comprehensive unit tests for DatasetService.get_process_rules method.""" + + @pytest.fixture + def mock_dependencies(self): + """Common mock setup for get_process_rules tests.""" + with patch("services.dataset_service.db.session") as mock_db: + yield {"db_session": mock_db} + + def test_get_process_rules_with_existing_rule(self, mock_dependencies): + """Test retrieval of process rules when rule exists.""" + # Arrange + dataset_id = str(uuid4()) + rules_data = { + "pre_processing_rules": [{"id": "remove_extra_spaces", "enabled": True}], + "segmentation": {"delimiter": "\n", "max_tokens": 500}, + } + process_rule = DatasetRetrievalTestDataFactory.create_process_rule_mock( + dataset_id=dataset_id, mode="custom", rules=rules_data + ) + + # Mock database query + mock_query = Mock() + mock_query.where.return_value.order_by.return_value.limit.return_value.one_or_none.return_value = process_rule + mock_dependencies["db_session"].query.return_value = mock_query + + # Act + result = DatasetService.get_process_rules(dataset_id) + + # Assert + assert result["mode"] == "custom" + assert result["rules"] == rules_data + + def test_get_process_rules_without_existing_rule(self, mock_dependencies): + """Test retrieval of process rules when no rule exists (returns defaults).""" + # Arrange + dataset_id = str(uuid4()) + + # Mock database query returning None + mock_query = Mock() + mock_query.where.return_value.order_by.return_value.limit.return_value.one_or_none.return_value = None + mock_dependencies["db_session"].query.return_value = mock_query + + # Act + result = DatasetService.get_process_rules(dataset_id) + + # Assert + assert result["mode"] == DocumentService.DEFAULT_RULES["mode"] + assert "rules" in result + assert result["rules"] == DocumentService.DEFAULT_RULES["rules"] + + +class TestDatasetServiceGetDatasetQueries: + """Comprehensive unit tests for DatasetService.get_dataset_queries method.""" + + @pytest.fixture + def mock_dependencies(self): + """Common mock setup for get_dataset_queries tests.""" + with patch("services.dataset_service.db.paginate") as mock_paginate: + yield {"paginate": mock_paginate} + + def test_get_dataset_queries_success(self, mock_dependencies): + """Test successful retrieval of dataset queries.""" + # Arrange + dataset_id = str(uuid4()) + page = 1 + per_page = 20 + + # Mock pagination result + mock_paginate_result = Mock() + mock_paginate_result.items = [ + DatasetRetrievalTestDataFactory.create_dataset_query_mock(dataset_id=dataset_id, query_id=f"query-{i}") + for i in range(3) + ] + mock_paginate_result.total = 3 + mock_dependencies["paginate"].return_value = mock_paginate_result + + # Act + queries, total = DatasetService.get_dataset_queries(dataset_id, page, per_page) + + # Assert + assert len(queries) == 3 + assert total == 3 + assert all(query.dataset_id == dataset_id for query in queries) + mock_dependencies["paginate"].assert_called_once() + + def test_get_dataset_queries_empty_result(self, mock_dependencies): + """Test retrieval when no queries exist.""" + # Arrange + dataset_id = str(uuid4()) + page = 1 + per_page = 20 + + # Mock pagination result (empty) + mock_paginate_result = Mock() + mock_paginate_result.items = [] + mock_paginate_result.total = 0 + mock_dependencies["paginate"].return_value = mock_paginate_result + + # Act + queries, total = DatasetService.get_dataset_queries(dataset_id, page, per_page) + + # Assert + assert queries == [] + assert total == 0 + + +class TestDatasetServiceGetRelatedApps: + """Comprehensive unit tests for DatasetService.get_related_apps method.""" + + @pytest.fixture + def mock_dependencies(self): + """Common mock setup for get_related_apps tests.""" + with patch("services.dataset_service.db.session") as mock_db: + yield {"db_session": mock_db} + + def test_get_related_apps_success(self, mock_dependencies): + """Test successful retrieval of related apps.""" + # Arrange + dataset_id = str(uuid4()) + + # Mock app-dataset joins + app_joins = [ + DatasetRetrievalTestDataFactory.create_app_dataset_join_mock(app_id=f"app-{i}", dataset_id=dataset_id) + for i in range(2) + ] + + # Mock database query + mock_query = Mock() + mock_query.where.return_value.order_by.return_value.all.return_value = app_joins + mock_dependencies["db_session"].query.return_value = mock_query + + # Act + result = DatasetService.get_related_apps(dataset_id) + + # Assert + assert len(result) == 2 + assert all(join.dataset_id == dataset_id for join in result) + mock_query.where.assert_called_once() + mock_query.where.return_value.order_by.assert_called_once() + + def test_get_related_apps_empty_result(self, mock_dependencies): + """Test retrieval when no related apps exist.""" + # Arrange + dataset_id = str(uuid4()) + + # Mock database query returning empty list + mock_query = Mock() + mock_query.where.return_value.order_by.return_value.all.return_value = [] + mock_dependencies["db_session"].query.return_value = mock_query + + # Act + result = DatasetService.get_related_apps(dataset_id) + + # Assert + assert result == [] From a58986eb06bdd2fa44b90b3e1ed1fb2b7fc33d4b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 24 Nov 2025 10:11:00 +0800 Subject: [PATCH 08/22] chore(deps): bump clickhouse-connect from 0.7.19 to 0.10.0 in /api (#28559) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- api/pyproject.toml | 2 +- api/uv.lock | 42 +++++++++++++++++++----------------------- 2 files changed, 20 insertions(+), 24 deletions(-) diff --git a/api/pyproject.toml b/api/pyproject.toml index 98813ef42c..da421f5fc8 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -203,7 +203,7 @@ vdb = [ "alibabacloud_gpdb20160503~=3.8.0", "alibabacloud_tea_openapi~=0.3.9", "chromadb==0.5.20", - "clickhouse-connect~=0.7.16", + "clickhouse-connect~=0.10.0", "clickzetta-connector-python>=0.8.102", "couchbase~=4.3.0", "elasticsearch==8.14.0", diff --git a/api/uv.lock b/api/uv.lock index dab6bc5787..0c9f73ccf0 100644 --- a/api/uv.lock +++ b/api/uv.lock @@ -1003,7 +1003,7 @@ wheels = [ [[package]] name = "clickhouse-connect" -version = "0.7.19" +version = "0.10.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "certifi" }, @@ -1012,28 +1012,24 @@ dependencies = [ { name = "urllib3" }, { name = "zstandard" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f4/8e/bf6012f7b45dbb74e19ad5c881a7bbcd1e7dd2b990f12cc434294d917800/clickhouse-connect-0.7.19.tar.gz", hash = "sha256:ce8f21f035781c5ef6ff57dc162e8150779c009b59f14030ba61f8c9c10c06d0", size = 84918, upload-time = "2024-08-21T21:37:16.639Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7b/fd/f8bea1157d40f117248dcaa9abdbf68c729513fcf2098ab5cb4aa58768b8/clickhouse_connect-0.10.0.tar.gz", hash = "sha256:a0256328802c6e5580513e197cef7f9ba49a99fc98e9ba410922873427569564", size = 104753, upload-time = "2025-11-14T20:31:00.947Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/68/6f/a78cad40dc0f1fee19094c40abd7d23ff04bb491732c3a65b3661d426c89/clickhouse_connect-0.7.19-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ee47af8926a7ec3a970e0ebf29a82cbbe3b1b7eae43336a81b3a0ca18091de5f", size = 253530, upload-time = "2024-08-21T21:35:53.372Z" }, - { url = "https://files.pythonhosted.org/packages/40/82/419d110149900ace5eb0787c668d11e1657ac0eabb65c1404f039746f4ed/clickhouse_connect-0.7.19-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ce429233b2d21a8a149c8cd836a2555393cbcf23d61233520db332942ffb8964", size = 245691, upload-time = "2024-08-21T21:35:55.074Z" }, - { url = "https://files.pythonhosted.org/packages/e3/9c/ad6708ced6cf9418334d2bf19bbba3c223511ed852eb85f79b1e7c20cdbd/clickhouse_connect-0.7.19-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:617c04f5c46eed3344a7861cd96fb05293e70d3b40d21541b1e459e7574efa96", size = 1055273, upload-time = "2024-08-21T21:35:56.478Z" }, - { url = "https://files.pythonhosted.org/packages/ea/99/88c24542d6218100793cfb13af54d7ad4143d6515b0b3d621ba3b5a2d8af/clickhouse_connect-0.7.19-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f08e33b8cc2dc1873edc5ee4088d4fc3c0dbb69b00e057547bcdc7e9680b43e5", size = 1067030, upload-time = "2024-08-21T21:35:58.096Z" }, - { url = "https://files.pythonhosted.org/packages/c8/84/19eb776b4e760317c21214c811f04f612cba7eee0f2818a7d6806898a994/clickhouse_connect-0.7.19-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:921886b887f762e5cc3eef57ef784d419a3f66df85fd86fa2e7fbbf464c4c54a", size = 1027207, upload-time = "2024-08-21T21:35:59.832Z" }, - { url = "https://files.pythonhosted.org/packages/22/81/c2982a33b088b6c9af5d0bdc46413adc5fedceae063b1f8b56570bb28887/clickhouse_connect-0.7.19-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6ad0cf8552a9e985cfa6524b674ae7c8f5ba51df5bd3ecddbd86c82cdbef41a7", size = 1054850, upload-time = "2024-08-21T21:36:01.559Z" }, - { url = "https://files.pythonhosted.org/packages/7b/a4/4a84ed3e92323d12700011cc8c4039f00a8c888079d65e75a4d4758ba288/clickhouse_connect-0.7.19-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:70f838ef0861cdf0e2e198171a1f3fd2ee05cf58e93495eeb9b17dfafb278186", size = 1022784, upload-time = "2024-08-21T21:36:02.805Z" }, - { url = "https://files.pythonhosted.org/packages/5e/67/3f5cc6f78c9adbbd6a3183a3f9f3196a116be19e958d7eaa6e307b391fed/clickhouse_connect-0.7.19-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c5f0d207cb0dcc1adb28ced63f872d080924b7562b263a9d54d4693b670eb066", size = 1071084, upload-time = "2024-08-21T21:36:04.052Z" }, - { url = "https://files.pythonhosted.org/packages/01/8d/a294e1cc752e22bc6ee08aa421ea31ed9559b09d46d35499449140a5c374/clickhouse_connect-0.7.19-cp311-cp311-win32.whl", hash = "sha256:8c96c4c242b98fcf8005e678a26dbd4361748721b6fa158c1fe84ad15c7edbbe", size = 221156, upload-time = "2024-08-21T21:36:05.72Z" }, - { url = "https://files.pythonhosted.org/packages/68/69/09b3a4e53f5d3d770e9fa70f6f04642cdb37cc76d37279c55fd4e868f845/clickhouse_connect-0.7.19-cp311-cp311-win_amd64.whl", hash = "sha256:bda092bab224875ed7c7683707d63f8a2322df654c4716e6611893a18d83e908", size = 238826, upload-time = "2024-08-21T21:36:06.892Z" }, - { url = "https://files.pythonhosted.org/packages/af/f8/1d48719728bac33c1a9815e0a7230940e078fd985b09af2371715de78a3c/clickhouse_connect-0.7.19-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8f170d08166438d29f0dcfc8a91b672c783dc751945559e65eefff55096f9274", size = 256687, upload-time = "2024-08-21T21:36:08.245Z" }, - { url = "https://files.pythonhosted.org/packages/ed/0d/3cbbbd204be045c4727f9007679ad97d3d1d559b43ba844373a79af54d16/clickhouse_connect-0.7.19-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:26b80cb8f66bde9149a9a2180e2cc4895c1b7d34f9dceba81630a9b9a9ae66b2", size = 247631, upload-time = "2024-08-21T21:36:09.679Z" }, - { url = "https://files.pythonhosted.org/packages/b6/44/adb55285226d60e9c46331a9980c88dad8c8de12abb895c4e3149a088092/clickhouse_connect-0.7.19-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ba80e3598acf916c4d1b2515671f65d9efee612a783c17c56a5a646f4db59b9", size = 1053767, upload-time = "2024-08-21T21:36:11.361Z" }, - { url = "https://files.pythonhosted.org/packages/6c/f3/a109c26a41153768be57374cb823cac5daf74c9098a5c61081ffabeb4e59/clickhouse_connect-0.7.19-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d38c30bd847af0ce7ff738152478f913854db356af4d5824096394d0eab873d", size = 1072014, upload-time = "2024-08-21T21:36:12.752Z" }, - { url = "https://files.pythonhosted.org/packages/51/80/9c200e5e392a538f2444c9a6a93e1cf0e36588c7e8720882ac001e23b246/clickhouse_connect-0.7.19-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d41d4b159071c0e4f607563932d4fa5c2a8fc27d3ba1200d0929b361e5191864", size = 1027423, upload-time = "2024-08-21T21:36:14.483Z" }, - { url = "https://files.pythonhosted.org/packages/33/a3/219fcd1572f1ce198dcef86da8c6c526b04f56e8b7a82e21119677f89379/clickhouse_connect-0.7.19-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3682c2426f5dbda574611210e3c7c951b9557293a49eb60a7438552435873889", size = 1053683, upload-time = "2024-08-21T21:36:15.828Z" }, - { url = "https://files.pythonhosted.org/packages/5d/df/687d90fbc0fd8ce586c46400f3791deac120e4c080aa8b343c0f676dfb08/clickhouse_connect-0.7.19-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6d492064dca278eb61be3a2d70a5f082e2ebc8ceebd4f33752ae234116192020", size = 1021120, upload-time = "2024-08-21T21:36:17.184Z" }, - { url = "https://files.pythonhosted.org/packages/c8/3b/39ba71b103275df8ec90d424dbaca2dba82b28398c3d2aeac5a0141b6aae/clickhouse_connect-0.7.19-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:62612da163b934c1ff35df6155a47cf17ac0e2d2f9f0f8f913641e5c02cdf39f", size = 1073652, upload-time = "2024-08-21T21:36:19.053Z" }, - { url = "https://files.pythonhosted.org/packages/b3/92/06df8790a7d93d5d5f1098604fc7d79682784818030091966a3ce3f766a8/clickhouse_connect-0.7.19-cp312-cp312-win32.whl", hash = "sha256:196e48c977affc045794ec7281b4d711e169def00535ecab5f9fdeb8c177f149", size = 221589, upload-time = "2024-08-21T21:36:20.796Z" }, - { url = "https://files.pythonhosted.org/packages/42/1f/935d0810b73184a1d306f92458cb0a2e9b0de2377f536da874e063b8e422/clickhouse_connect-0.7.19-cp312-cp312-win_amd64.whl", hash = "sha256:b771ca6a473d65103dcae82810d3a62475c5372fc38d8f211513c72b954fb020", size = 239584, upload-time = "2024-08-21T21:36:22.105Z" }, + { url = "https://files.pythonhosted.org/packages/bf/4e/f90caf963d14865c7a3f0e5d80b77e67e0fe0bf39b3de84110707746fa6b/clickhouse_connect-0.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:195f1824405501b747b572e1365c6265bb1629eeb712ce91eda91da3c5794879", size = 272911, upload-time = "2025-11-14T20:29:57.129Z" }, + { url = "https://files.pythonhosted.org/packages/50/c7/e01bd2dd80ea4fbda8968e5022c60091a872fd9de0a123239e23851da231/clickhouse_connect-0.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7907624635fe7f28e1b85c7c8b125a72679a63ecdb0b9f4250b704106ef438f8", size = 265938, upload-time = "2025-11-14T20:29:58.443Z" }, + { url = "https://files.pythonhosted.org/packages/f4/07/8b567b949abca296e118331d13380bbdefa4225d7d1d32233c59d4b4b2e1/clickhouse_connect-0.10.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:60772faa54d56f0fa34650460910752a583f5948f44dddeabfafaecbca21fc54", size = 1113548, upload-time = "2025-11-14T20:29:59.781Z" }, + { url = "https://files.pythonhosted.org/packages/9c/13/11f2d37fc95e74d7e2d80702cde87666ce372486858599a61f5209e35fc5/clickhouse_connect-0.10.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7fe2a6cd98517330c66afe703fb242c0d3aa2c91f2f7dc9fb97c122c5c60c34b", size = 1135061, upload-time = "2025-11-14T20:30:01.244Z" }, + { url = "https://files.pythonhosted.org/packages/a0/d0/517181ea80060f84d84cff4d42d330c80c77bb352b728fb1f9681fbad291/clickhouse_connect-0.10.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a2427d312bc3526520a0be8c648479af3f6353da7a33a62db2368d6203b08efd", size = 1105105, upload-time = "2025-11-14T20:30:02.679Z" }, + { url = "https://files.pythonhosted.org/packages/7c/b2/4ad93e898562725b58c537cad83ab2694c9b1c1ef37fa6c3f674bdad366a/clickhouse_connect-0.10.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:63bbb5721bfece698e155c01b8fa95ce4377c584f4d04b43f383824e8a8fa129", size = 1150791, upload-time = "2025-11-14T20:30:03.824Z" }, + { url = "https://files.pythonhosted.org/packages/45/a4/fdfbfacc1fa67b8b1ce980adcf42f9e3202325586822840f04f068aff395/clickhouse_connect-0.10.0-cp311-cp311-win32.whl", hash = "sha256:48554e836c6b56fe0854d9a9f565569010583d4960094d60b68a53f9f83042f0", size = 244014, upload-time = "2025-11-14T20:30:05.157Z" }, + { url = "https://files.pythonhosted.org/packages/08/50/cf53f33f4546a9ce2ab1b9930db4850aa1ae53bff1e4e4fa97c566cdfa19/clickhouse_connect-0.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:9eb8df083e5fda78ac7249938691c2c369e8578b5df34c709467147e8289f1d9", size = 262356, upload-time = "2025-11-14T20:30:06.478Z" }, + { url = "https://files.pythonhosted.org/packages/9e/59/fadbbf64f4c6496cd003a0a3c9223772409a86d0eea9d4ff45d2aa88aabf/clickhouse_connect-0.10.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b090c7d8e602dd084b2795265cd30610461752284763d9ad93a5d619a0e0ff21", size = 276401, upload-time = "2025-11-14T20:30:07.469Z" }, + { url = "https://files.pythonhosted.org/packages/1c/e3/781f9970f2ef202410f0d64681e42b2aecd0010097481a91e4df186a36c7/clickhouse_connect-0.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b8a708d38b81dcc8c13bb85549c904817e304d2b7f461246fed2945524b7a31b", size = 268193, upload-time = "2025-11-14T20:30:08.503Z" }, + { url = "https://files.pythonhosted.org/packages/f0/e0/64ab66b38fce762b77b5203a4fcecc603595f2a2361ce1605fc7bb79c835/clickhouse_connect-0.10.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3646fc9184a5469b95cf4a0846e6954e6e9e85666f030a5d2acae58fa8afb37e", size = 1123810, upload-time = "2025-11-14T20:30:09.62Z" }, + { url = "https://files.pythonhosted.org/packages/f5/03/19121aecf11a30feaf19049be96988131798c54ac6ba646a38e5faecaa0a/clickhouse_connect-0.10.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fe7e6be0f40a8a77a90482944f5cc2aa39084c1570899e8d2d1191f62460365b", size = 1153409, upload-time = "2025-11-14T20:30:10.855Z" }, + { url = "https://files.pythonhosted.org/packages/ce/ee/63870fd8b666c6030393950ad4ee76b7b69430f5a49a5d3fa32a70b11942/clickhouse_connect-0.10.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:88b4890f13163e163bf6fa61f3a013bb974c95676853b7a4e63061faf33911ac", size = 1104696, upload-time = "2025-11-14T20:30:12.187Z" }, + { url = "https://files.pythonhosted.org/packages/e9/bc/fcd8da1c4d007ebce088783979c495e3d7360867cfa8c91327ed235778f5/clickhouse_connect-0.10.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6286832cc79affc6fddfbf5563075effa65f80e7cd1481cf2b771ce317c67d08", size = 1156389, upload-time = "2025-11-14T20:30:13.385Z" }, + { url = "https://files.pythonhosted.org/packages/4e/33/7cb99cc3fc503c23fd3a365ec862eb79cd81c8dc3037242782d709280fa9/clickhouse_connect-0.10.0-cp312-cp312-win32.whl", hash = "sha256:92b8b6691a92d2613ee35f5759317bd4be7ba66d39bf81c4deed620feb388ca6", size = 243682, upload-time = "2025-11-14T20:30:14.52Z" }, + { url = "https://files.pythonhosted.org/packages/48/5c/12eee6a1f5ecda2dfc421781fde653c6d6ca6f3080f24547c0af40485a5a/clickhouse_connect-0.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:1159ee2c33e7eca40b53dda917a8b6a2ed889cb4c54f3d83b303b31ddb4f351d", size = 262790, upload-time = "2025-11-14T20:30:15.555Z" }, ] [[package]] @@ -1703,7 +1699,7 @@ vdb = [ { name = "alibabacloud-gpdb20160503", specifier = "~=3.8.0" }, { name = "alibabacloud-tea-openapi", specifier = "~=0.3.9" }, { name = "chromadb", specifier = "==0.5.20" }, - { name = "clickhouse-connect", specifier = "~=0.7.16" }, + { name = "clickhouse-connect", specifier = "~=0.10.0" }, { name = "clickzetta-connector-python", specifier = ">=0.8.102" }, { name = "couchbase", specifier = "~=4.3.0" }, { name = "elasticsearch", specifier = "==8.14.0" }, From 2445d04d1965b321bec3061457315dce1a7d452f Mon Sep 17 00:00:00 2001 From: yyh <92089059+lyzno1@users.noreply.github.com> Date: Mon, 24 Nov 2025 10:11:19 +0800 Subject: [PATCH 09/22] chore: fix de-DE translations (#28552) --- web/i18n/de-DE/app.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/web/i18n/de-DE/app.ts b/web/i18n/de-DE/app.ts index ce606d5089..ad761e81b3 100644 --- a/web/i18n/de-DE/app.ts +++ b/web/i18n/de-DE/app.ts @@ -149,7 +149,7 @@ const translation = { password: 'Passwort', databricksHost: 'Databricks-Workspace-URL', clientSecret: 'OAuth-Client-Geheimnis', - personalAccessToken: 'Persönlicher Zugriffsschlüssel (Legacy)', + personalAccessToken: 'Persönliches Zugriffstoken (veraltet)', experimentId: 'Experiment-ID', username: 'Benutzername', trackingUri: 'Tracking-URI', From b12057b7e5c726a8f7f0f1aa899db654382f9405 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=AF=97=E6=B5=93?= <844670992@qq.com> Date: Mon, 24 Nov 2025 10:49:33 +0800 Subject: [PATCH 10/22] fix: add `COMPOSE_PROFILES` param to middleware.env.example file (#28541) Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com> Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com> Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> --- docker/README.md | 4 +++- docker/middleware.env.example | 7 +++++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/docker/README.md b/docker/README.md index b5c46eb9fc..375570f106 100644 --- a/docker/README.md +++ b/docker/README.md @@ -40,7 +40,9 @@ Welcome to the new `docker` directory for deploying Dify using Docker Compose. T - Ensure the `middleware.env` file is created by running `cp middleware.env.example middleware.env` (refer to the `middleware.env.example` file). 1. **Running Middleware Services**: - Navigate to the `docker` directory. - - Execute `docker compose -f docker-compose.middleware.yaml --profile weaviate -p dify up -d` to start the middleware services. (Change the profile to other vector database if you are not using weaviate) + - Execute `docker compose --env-file middleware.env -f docker-compose.middleware.yaml -p dify up -d` to start PostgreSQL/MySQL (per `DB_TYPE`) plus the bundled Weaviate instance. + +> Compose automatically loads `COMPOSE_PROFILES=${DB_TYPE:-postgresql},weaviate` from `middleware.env`, so no extra `--profile` flags are needed. Adjust variables in `middleware.env` if you want a different combination of services. ### Migration for Existing Users diff --git a/docker/middleware.env.example b/docker/middleware.env.example index 3374ddd537..dbfb75a8d6 100644 --- a/docker/middleware.env.example +++ b/docker/middleware.env.example @@ -134,6 +134,13 @@ WEAVIATE_AUTHORIZATION_ADMINLIST_ENABLED=true WEAVIATE_AUTHORIZATION_ADMINLIST_USERS=hello@dify.ai WEAVIATE_HOST_VOLUME=./volumes/weaviate +# ------------------------------ +# Docker Compose profile configuration +# ------------------------------ +# Loaded automatically when running `docker compose --env-file middleware.env ...`. +# Controls which DB/vector services start, so no extra `--profile` flag is needed. +COMPOSE_PROFILES=${DB_TYPE:-postgresql},weaviate + # ------------------------------ # Docker Compose Service Expose Host Port Configurations # ------------------------------ From 2c9e4355584c19fcb1daf7491c44357d0efe07fa Mon Sep 17 00:00:00 2001 From: Gritty_dev <101377478+codomposer@users.noreply.github.com> Date: Sun, 23 Nov 2025 21:50:09 -0500 Subject: [PATCH 11/22] feat: complete app modesls test script (#28549) Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com> Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com> --- .../unit_tests/models/test_app_models.py | 1151 +++++++++++++++++ 1 file changed, 1151 insertions(+) create mode 100644 api/tests/unit_tests/models/test_app_models.py diff --git a/api/tests/unit_tests/models/test_app_models.py b/api/tests/unit_tests/models/test_app_models.py new file mode 100644 index 0000000000..268ba1282a --- /dev/null +++ b/api/tests/unit_tests/models/test_app_models.py @@ -0,0 +1,1151 @@ +""" +Comprehensive unit tests for App models. + +This test suite covers: +- App configuration validation +- App-Message relationships +- Conversation model integrity +- Annotation model relationships +""" + +import json +from datetime import UTC, datetime +from decimal import Decimal +from unittest.mock import MagicMock, patch +from uuid import uuid4 + +import pytest + +from models.model import ( + App, + AppAnnotationHitHistory, + AppAnnotationSetting, + AppMode, + AppModelConfig, + Conversation, + IconType, + Message, + MessageAnnotation, + Site, +) + + +class TestAppModelValidation: + """Test suite for App model validation and basic operations.""" + + def test_app_creation_with_required_fields(self): + """Test creating an app with all required fields.""" + # Arrange + tenant_id = str(uuid4()) + created_by = str(uuid4()) + + # Act + app = App( + tenant_id=tenant_id, + name="Test App", + mode=AppMode.CHAT, + enable_site=True, + enable_api=False, + created_by=created_by, + ) + + # Assert + assert app.name == "Test App" + assert app.tenant_id == tenant_id + assert app.mode == AppMode.CHAT + assert app.enable_site is True + assert app.enable_api is False + assert app.created_by == created_by + + def test_app_creation_with_optional_fields(self): + """Test creating an app with optional fields.""" + # Arrange & Act + app = App( + tenant_id=str(uuid4()), + name="Test App", + mode=AppMode.COMPLETION, + enable_site=True, + enable_api=True, + created_by=str(uuid4()), + description="Test description", + icon_type=IconType.EMOJI, + icon="🤖", + icon_background="#FF5733", + is_demo=True, + is_public=False, + api_rpm=100, + api_rph=1000, + ) + + # Assert + assert app.description == "Test description" + assert app.icon_type == IconType.EMOJI + assert app.icon == "🤖" + assert app.icon_background == "#FF5733" + assert app.is_demo is True + assert app.is_public is False + assert app.api_rpm == 100 + assert app.api_rph == 1000 + + def test_app_mode_validation(self): + """Test app mode enum values.""" + # Assert + expected_modes = { + "chat", + "completion", + "workflow", + "advanced-chat", + "agent-chat", + "channel", + "rag-pipeline", + } + assert {mode.value for mode in AppMode} == expected_modes + + def test_app_mode_value_of(self): + """Test AppMode.value_of method.""" + # Act & Assert + assert AppMode.value_of("chat") == AppMode.CHAT + assert AppMode.value_of("completion") == AppMode.COMPLETION + assert AppMode.value_of("workflow") == AppMode.WORKFLOW + + with pytest.raises(ValueError, match="invalid mode value"): + AppMode.value_of("invalid_mode") + + def test_icon_type_validation(self): + """Test icon type enum values.""" + # Assert + assert {t.value for t in IconType} == {"image", "emoji"} + + def test_app_desc_or_prompt_with_description(self): + """Test desc_or_prompt property when description exists.""" + # Arrange + app = App( + tenant_id=str(uuid4()), + name="Test App", + mode=AppMode.CHAT, + enable_site=True, + enable_api=False, + created_by=str(uuid4()), + description="App description", + ) + + # Act + result = app.desc_or_prompt + + # Assert + assert result == "App description" + + def test_app_desc_or_prompt_without_description(self): + """Test desc_or_prompt property when description is empty.""" + # Arrange + app = App( + tenant_id=str(uuid4()), + name="Test App", + mode=AppMode.CHAT, + enable_site=True, + enable_api=False, + created_by=str(uuid4()), + description="", + ) + + # Mock app_model_config property + with patch.object(App, "app_model_config", new_callable=lambda: property(lambda self: None)): + # Act + result = app.desc_or_prompt + + # Assert + assert result == "" + + def test_app_is_agent_property_false(self): + """Test is_agent property returns False when not configured as agent.""" + # Arrange + app = App( + tenant_id=str(uuid4()), + name="Test App", + mode=AppMode.CHAT, + enable_site=True, + enable_api=False, + created_by=str(uuid4()), + ) + + # Mock app_model_config to return None + with patch.object(App, "app_model_config", new_callable=lambda: property(lambda self: None)): + # Act + result = app.is_agent + + # Assert + assert result is False + + def test_app_mode_compatible_with_agent(self): + """Test mode_compatible_with_agent property.""" + # Arrange + app = App( + tenant_id=str(uuid4()), + name="Test App", + mode=AppMode.CHAT, + enable_site=True, + enable_api=False, + created_by=str(uuid4()), + ) + + # Mock is_agent to return False + with patch.object(App, "is_agent", new_callable=lambda: property(lambda self: False)): + # Act + result = app.mode_compatible_with_agent + + # Assert + assert result == AppMode.CHAT + + +class TestAppModelConfig: + """Test suite for AppModelConfig model.""" + + def test_app_model_config_creation(self): + """Test creating an AppModelConfig.""" + # Arrange + app_id = str(uuid4()) + created_by = str(uuid4()) + + # Act + config = AppModelConfig( + app_id=app_id, + provider="openai", + model_id="gpt-4", + created_by=created_by, + ) + + # Assert + assert config.app_id == app_id + assert config.provider == "openai" + assert config.model_id == "gpt-4" + assert config.created_by == created_by + + def test_app_model_config_with_configs_json(self): + """Test AppModelConfig with JSON configs.""" + # Arrange + configs = {"temperature": 0.7, "max_tokens": 1000} + + # Act + config = AppModelConfig( + app_id=str(uuid4()), + provider="openai", + model_id="gpt-4", + created_by=str(uuid4()), + configs=configs, + ) + + # Assert + assert config.configs == configs + + def test_app_model_config_model_dict_property(self): + """Test model_dict property.""" + # Arrange + model_data = {"provider": "openai", "name": "gpt-4"} + config = AppModelConfig( + app_id=str(uuid4()), + provider="openai", + model_id="gpt-4", + created_by=str(uuid4()), + model=json.dumps(model_data), + ) + + # Act + result = config.model_dict + + # Assert + assert result == model_data + + def test_app_model_config_model_dict_empty(self): + """Test model_dict property when model is None.""" + # Arrange + config = AppModelConfig( + app_id=str(uuid4()), + provider="openai", + model_id="gpt-4", + created_by=str(uuid4()), + model=None, + ) + + # Act + result = config.model_dict + + # Assert + assert result == {} + + def test_app_model_config_suggested_questions_list(self): + """Test suggested_questions_list property.""" + # Arrange + questions = ["What can you do?", "How does this work?"] + config = AppModelConfig( + app_id=str(uuid4()), + provider="openai", + model_id="gpt-4", + created_by=str(uuid4()), + suggested_questions=json.dumps(questions), + ) + + # Act + result = config.suggested_questions_list + + # Assert + assert result == questions + + def test_app_model_config_annotation_reply_dict_disabled(self): + """Test annotation_reply_dict when annotation is disabled.""" + # Arrange + config = AppModelConfig( + app_id=str(uuid4()), + provider="openai", + model_id="gpt-4", + created_by=str(uuid4()), + ) + + # Mock database query to return None + with patch("models.model.db.session.query") as mock_query: + mock_query.return_value.where.return_value.first.return_value = None + + # Act + result = config.annotation_reply_dict + + # Assert + assert result == {"enabled": False} + + +class TestConversationModel: + """Test suite for Conversation model integrity.""" + + def test_conversation_creation_with_required_fields(self): + """Test creating a conversation with required fields.""" + # Arrange + app_id = str(uuid4()) + from_end_user_id = str(uuid4()) + + # Act + conversation = Conversation( + app_id=app_id, + mode=AppMode.CHAT, + name="Test Conversation", + status="normal", + from_source="api", + from_end_user_id=from_end_user_id, + ) + + # Assert + assert conversation.app_id == app_id + assert conversation.mode == AppMode.CHAT + assert conversation.name == "Test Conversation" + assert conversation.status == "normal" + assert conversation.from_source == "api" + assert conversation.from_end_user_id == from_end_user_id + + def test_conversation_with_inputs(self): + """Test conversation inputs property.""" + # Arrange + inputs = {"query": "Hello", "context": "test"} + conversation = Conversation( + app_id=str(uuid4()), + mode=AppMode.CHAT, + name="Test Conversation", + status="normal", + from_source="api", + from_end_user_id=str(uuid4()), + ) + conversation._inputs = inputs + + # Act + result = conversation.inputs + + # Assert + assert result == inputs + + def test_conversation_inputs_setter(self): + """Test conversation inputs setter.""" + # Arrange + conversation = Conversation( + app_id=str(uuid4()), + mode=AppMode.CHAT, + name="Test Conversation", + status="normal", + from_source="api", + from_end_user_id=str(uuid4()), + ) + inputs = {"query": "Hello", "context": "test"} + + # Act + conversation.inputs = inputs + + # Assert + assert conversation._inputs == inputs + + def test_conversation_summary_or_query_with_summary(self): + """Test summary_or_query property when summary exists.""" + # Arrange + conversation = Conversation( + app_id=str(uuid4()), + mode=AppMode.CHAT, + name="Test Conversation", + status="normal", + from_source="api", + from_end_user_id=str(uuid4()), + summary="Test summary", + ) + + # Act + result = conversation.summary_or_query + + # Assert + assert result == "Test summary" + + def test_conversation_summary_or_query_without_summary(self): + """Test summary_or_query property when summary is empty.""" + # Arrange + conversation = Conversation( + app_id=str(uuid4()), + mode=AppMode.CHAT, + name="Test Conversation", + status="normal", + from_source="api", + from_end_user_id=str(uuid4()), + summary=None, + ) + + # Mock first_message to return a message with query + mock_message = MagicMock() + mock_message.query = "First message query" + with patch.object(Conversation, "first_message", new_callable=lambda: property(lambda self: mock_message)): + # Act + result = conversation.summary_or_query + + # Assert + assert result == "First message query" + + def test_conversation_in_debug_mode(self): + """Test in_debug_mode property.""" + # Arrange + conversation = Conversation( + app_id=str(uuid4()), + mode=AppMode.CHAT, + name="Test Conversation", + status="normal", + from_source="api", + from_end_user_id=str(uuid4()), + override_model_configs='{"model": "gpt-4"}', + ) + + # Act + result = conversation.in_debug_mode + + # Assert + assert result is True + + def test_conversation_to_dict_serialization(self): + """Test conversation to_dict method.""" + # Arrange + app_id = str(uuid4()) + from_end_user_id = str(uuid4()) + conversation = Conversation( + app_id=app_id, + mode=AppMode.CHAT, + name="Test Conversation", + status="normal", + from_source="api", + from_end_user_id=from_end_user_id, + dialogue_count=5, + ) + conversation.id = str(uuid4()) + conversation._inputs = {"query": "test"} + + # Act + result = conversation.to_dict() + + # Assert + assert result["id"] == conversation.id + assert result["app_id"] == app_id + assert result["mode"] == AppMode.CHAT + assert result["name"] == "Test Conversation" + assert result["status"] == "normal" + assert result["from_source"] == "api" + assert result["from_end_user_id"] == from_end_user_id + assert result["dialogue_count"] == 5 + assert result["inputs"] == {"query": "test"} + + +class TestMessageModel: + """Test suite for Message model and App-Message relationships.""" + + def test_message_creation_with_required_fields(self): + """Test creating a message with required fields.""" + # Arrange + app_id = str(uuid4()) + conversation_id = str(uuid4()) + + # Act + message = Message( + app_id=app_id, + conversation_id=conversation_id, + query="What is AI?", + message={"role": "user", "content": "What is AI?"}, + answer="AI stands for Artificial Intelligence.", + message_unit_price=Decimal("0.0001"), + answer_unit_price=Decimal("0.0002"), + currency="USD", + from_source="api", + ) + + # Assert + assert message.app_id == app_id + assert message.conversation_id == conversation_id + assert message.query == "What is AI?" + assert message.answer == "AI stands for Artificial Intelligence." + assert message.currency == "USD" + assert message.from_source == "api" + + def test_message_with_inputs(self): + """Test message inputs property.""" + # Arrange + inputs = {"query": "Hello", "context": "test"} + message = Message( + app_id=str(uuid4()), + conversation_id=str(uuid4()), + query="Test query", + message={"role": "user", "content": "Test"}, + answer="Test answer", + message_unit_price=Decimal("0.0001"), + answer_unit_price=Decimal("0.0002"), + currency="USD", + from_source="api", + ) + message._inputs = inputs + + # Act + result = message.inputs + + # Assert + assert result == inputs + + def test_message_inputs_setter(self): + """Test message inputs setter.""" + # Arrange + message = Message( + app_id=str(uuid4()), + conversation_id=str(uuid4()), + query="Test query", + message={"role": "user", "content": "Test"}, + answer="Test answer", + message_unit_price=Decimal("0.0001"), + answer_unit_price=Decimal("0.0002"), + currency="USD", + from_source="api", + ) + inputs = {"query": "Hello", "context": "test"} + + # Act + message.inputs = inputs + + # Assert + assert message._inputs == inputs + + def test_message_in_debug_mode(self): + """Test message in_debug_mode property.""" + # Arrange + message = Message( + app_id=str(uuid4()), + conversation_id=str(uuid4()), + query="Test query", + message={"role": "user", "content": "Test"}, + answer="Test answer", + message_unit_price=Decimal("0.0001"), + answer_unit_price=Decimal("0.0002"), + currency="USD", + from_source="api", + override_model_configs='{"model": "gpt-4"}', + ) + + # Act + result = message.in_debug_mode + + # Assert + assert result is True + + def test_message_metadata_dict_property(self): + """Test message_metadata_dict property.""" + # Arrange + metadata = {"retriever_resources": ["doc1", "doc2"], "usage": {"tokens": 100}} + message = Message( + app_id=str(uuid4()), + conversation_id=str(uuid4()), + query="Test query", + message={"role": "user", "content": "Test"}, + answer="Test answer", + message_unit_price=Decimal("0.0001"), + answer_unit_price=Decimal("0.0002"), + currency="USD", + from_source="api", + message_metadata=json.dumps(metadata), + ) + + # Act + result = message.message_metadata_dict + + # Assert + assert result == metadata + + def test_message_metadata_dict_empty(self): + """Test message_metadata_dict when metadata is None.""" + # Arrange + message = Message( + app_id=str(uuid4()), + conversation_id=str(uuid4()), + query="Test query", + message={"role": "user", "content": "Test"}, + answer="Test answer", + message_unit_price=Decimal("0.0001"), + answer_unit_price=Decimal("0.0002"), + currency="USD", + from_source="api", + message_metadata=None, + ) + + # Act + result = message.message_metadata_dict + + # Assert + assert result == {} + + def test_message_to_dict_serialization(self): + """Test message to_dict method.""" + # Arrange + app_id = str(uuid4()) + conversation_id = str(uuid4()) + now = datetime.now(UTC) + + message = Message( + app_id=app_id, + conversation_id=conversation_id, + query="Test query", + message={"role": "user", "content": "Test"}, + answer="Test answer", + message_unit_price=Decimal("0.0001"), + answer_unit_price=Decimal("0.0002"), + total_price=Decimal("0.0003"), + currency="USD", + from_source="api", + status="normal", + ) + message.id = str(uuid4()) + message._inputs = {"query": "test"} + message.created_at = now + message.updated_at = now + + # Act + result = message.to_dict() + + # Assert + assert result["id"] == message.id + assert result["app_id"] == app_id + assert result["conversation_id"] == conversation_id + assert result["query"] == "Test query" + assert result["answer"] == "Test answer" + assert result["status"] == "normal" + assert result["from_source"] == "api" + assert result["inputs"] == {"query": "test"} + assert "created_at" in result + assert "updated_at" in result + + def test_message_from_dict_deserialization(self): + """Test message from_dict method.""" + # Arrange + message_id = str(uuid4()) + app_id = str(uuid4()) + conversation_id = str(uuid4()) + data = { + "id": message_id, + "app_id": app_id, + "conversation_id": conversation_id, + "model_id": "gpt-4", + "inputs": {"query": "test"}, + "query": "Test query", + "message": {"role": "user", "content": "Test"}, + "answer": "Test answer", + "total_price": Decimal("0.0003"), + "status": "normal", + "error": None, + "message_metadata": {"usage": {"tokens": 100}}, + "from_source": "api", + "from_end_user_id": None, + "from_account_id": None, + "created_at": "2024-01-01T00:00:00", + "updated_at": "2024-01-01T00:00:00", + "agent_based": False, + "workflow_run_id": None, + } + + # Act + message = Message.from_dict(data) + + # Assert + assert message.id == message_id + assert message.app_id == app_id + assert message.conversation_id == conversation_id + assert message.query == "Test query" + assert message.answer == "Test answer" + + +class TestMessageAnnotation: + """Test suite for MessageAnnotation and annotation relationships.""" + + def test_message_annotation_creation(self): + """Test creating a message annotation.""" + # Arrange + app_id = str(uuid4()) + conversation_id = str(uuid4()) + message_id = str(uuid4()) + account_id = str(uuid4()) + + # Act + annotation = MessageAnnotation( + app_id=app_id, + conversation_id=conversation_id, + message_id=message_id, + question="What is AI?", + content="AI stands for Artificial Intelligence.", + account_id=account_id, + ) + + # Assert + assert annotation.app_id == app_id + assert annotation.conversation_id == conversation_id + assert annotation.message_id == message_id + assert annotation.question == "What is AI?" + assert annotation.content == "AI stands for Artificial Intelligence." + assert annotation.account_id == account_id + + def test_message_annotation_without_message_id(self): + """Test creating annotation without message_id.""" + # Arrange + app_id = str(uuid4()) + account_id = str(uuid4()) + + # Act + annotation = MessageAnnotation( + app_id=app_id, + question="What is AI?", + content="AI stands for Artificial Intelligence.", + account_id=account_id, + ) + + # Assert + assert annotation.app_id == app_id + assert annotation.message_id is None + assert annotation.conversation_id is None + assert annotation.question == "What is AI?" + assert annotation.content == "AI stands for Artificial Intelligence." + + def test_message_annotation_hit_count_default(self): + """Test annotation hit_count default value.""" + # Arrange + annotation = MessageAnnotation( + app_id=str(uuid4()), + question="Test question", + content="Test content", + account_id=str(uuid4()), + ) + + # Act & Assert - default value is set by database + # Model instantiation doesn't set server defaults + assert hasattr(annotation, "hit_count") + + +class TestAppAnnotationSetting: + """Test suite for AppAnnotationSetting model.""" + + def test_app_annotation_setting_creation(self): + """Test creating an app annotation setting.""" + # Arrange + app_id = str(uuid4()) + collection_binding_id = str(uuid4()) + created_user_id = str(uuid4()) + updated_user_id = str(uuid4()) + + # Act + setting = AppAnnotationSetting( + app_id=app_id, + score_threshold=0.8, + collection_binding_id=collection_binding_id, + created_user_id=created_user_id, + updated_user_id=updated_user_id, + ) + + # Assert + assert setting.app_id == app_id + assert setting.score_threshold == 0.8 + assert setting.collection_binding_id == collection_binding_id + assert setting.created_user_id == created_user_id + assert setting.updated_user_id == updated_user_id + + def test_app_annotation_setting_score_threshold_validation(self): + """Test score threshold values.""" + # Arrange & Act + setting_high = AppAnnotationSetting( + app_id=str(uuid4()), + score_threshold=0.95, + collection_binding_id=str(uuid4()), + created_user_id=str(uuid4()), + updated_user_id=str(uuid4()), + ) + setting_low = AppAnnotationSetting( + app_id=str(uuid4()), + score_threshold=0.5, + collection_binding_id=str(uuid4()), + created_user_id=str(uuid4()), + updated_user_id=str(uuid4()), + ) + + # Assert + assert setting_high.score_threshold == 0.95 + assert setting_low.score_threshold == 0.5 + + +class TestAppAnnotationHitHistory: + """Test suite for AppAnnotationHitHistory model.""" + + def test_app_annotation_hit_history_creation(self): + """Test creating an annotation hit history.""" + # Arrange + app_id = str(uuid4()) + annotation_id = str(uuid4()) + message_id = str(uuid4()) + account_id = str(uuid4()) + + # Act + history = AppAnnotationHitHistory( + app_id=app_id, + annotation_id=annotation_id, + source="api", + question="What is AI?", + account_id=account_id, + score=0.95, + message_id=message_id, + annotation_question="What is AI?", + annotation_content="AI stands for Artificial Intelligence.", + ) + + # Assert + assert history.app_id == app_id + assert history.annotation_id == annotation_id + assert history.source == "api" + assert history.question == "What is AI?" + assert history.account_id == account_id + assert history.score == 0.95 + assert history.message_id == message_id + assert history.annotation_question == "What is AI?" + assert history.annotation_content == "AI stands for Artificial Intelligence." + + def test_app_annotation_hit_history_score_values(self): + """Test annotation hit history with different score values.""" + # Arrange & Act + history_high = AppAnnotationHitHistory( + app_id=str(uuid4()), + annotation_id=str(uuid4()), + source="api", + question="Test", + account_id=str(uuid4()), + score=0.99, + message_id=str(uuid4()), + annotation_question="Test", + annotation_content="Content", + ) + history_low = AppAnnotationHitHistory( + app_id=str(uuid4()), + annotation_id=str(uuid4()), + source="api", + question="Test", + account_id=str(uuid4()), + score=0.6, + message_id=str(uuid4()), + annotation_question="Test", + annotation_content="Content", + ) + + # Assert + assert history_high.score == 0.99 + assert history_low.score == 0.6 + + +class TestSiteModel: + """Test suite for Site model.""" + + def test_site_creation_with_required_fields(self): + """Test creating a site with required fields.""" + # Arrange + app_id = str(uuid4()) + + # Act + site = Site( + app_id=app_id, + title="Test Site", + default_language="en-US", + customize_token_strategy="uuid", + ) + + # Assert + assert site.app_id == app_id + assert site.title == "Test Site" + assert site.default_language == "en-US" + assert site.customize_token_strategy == "uuid" + + def test_site_creation_with_optional_fields(self): + """Test creating a site with optional fields.""" + # Arrange & Act + site = Site( + app_id=str(uuid4()), + title="Test Site", + default_language="en-US", + customize_token_strategy="uuid", + icon_type=IconType.EMOJI, + icon="🌐", + icon_background="#0066CC", + description="Test site description", + copyright="© 2024 Test", + privacy_policy="https://example.com/privacy", + ) + + # Assert + assert site.icon_type == IconType.EMOJI + assert site.icon == "🌐" + assert site.icon_background == "#0066CC" + assert site.description == "Test site description" + assert site.copyright == "© 2024 Test" + assert site.privacy_policy == "https://example.com/privacy" + + def test_site_custom_disclaimer_setter(self): + """Test site custom_disclaimer setter.""" + # Arrange + site = Site( + app_id=str(uuid4()), + title="Test Site", + default_language="en-US", + customize_token_strategy="uuid", + ) + + # Act + site.custom_disclaimer = "This is a test disclaimer" + + # Assert + assert site.custom_disclaimer == "This is a test disclaimer" + + def test_site_custom_disclaimer_exceeds_limit(self): + """Test site custom_disclaimer with excessive length.""" + # Arrange + site = Site( + app_id=str(uuid4()), + title="Test Site", + default_language="en-US", + customize_token_strategy="uuid", + ) + long_disclaimer = "x" * 513 # Exceeds 512 character limit + + # Act & Assert + with pytest.raises(ValueError, match="Custom disclaimer cannot exceed 512 characters"): + site.custom_disclaimer = long_disclaimer + + def test_site_generate_code(self): + """Test Site.generate_code static method.""" + # Mock database query to return 0 (no existing codes) + with patch("models.model.db.session.query") as mock_query: + mock_query.return_value.where.return_value.count.return_value = 0 + + # Act + code = Site.generate_code(8) + + # Assert + assert isinstance(code, str) + assert len(code) == 8 + + +class TestModelIntegration: + """Test suite for model integration scenarios.""" + + def test_complete_app_conversation_message_hierarchy(self): + """Test complete hierarchy from app to message.""" + # Arrange + tenant_id = str(uuid4()) + app_id = str(uuid4()) + conversation_id = str(uuid4()) + message_id = str(uuid4()) + created_by = str(uuid4()) + + # Create app + app = App( + tenant_id=tenant_id, + name="Test App", + mode=AppMode.CHAT, + enable_site=True, + enable_api=True, + created_by=created_by, + ) + app.id = app_id + + # Create conversation + conversation = Conversation( + app_id=app_id, + mode=AppMode.CHAT, + name="Test Conversation", + status="normal", + from_source="api", + from_end_user_id=str(uuid4()), + ) + conversation.id = conversation_id + + # Create message + message = Message( + app_id=app_id, + conversation_id=conversation_id, + query="Test query", + message={"role": "user", "content": "Test"}, + answer="Test answer", + message_unit_price=Decimal("0.0001"), + answer_unit_price=Decimal("0.0002"), + currency="USD", + from_source="api", + ) + message.id = message_id + + # Assert + assert app.id == app_id + assert conversation.app_id == app_id + assert message.app_id == app_id + assert message.conversation_id == conversation_id + assert app.mode == AppMode.CHAT + assert conversation.mode == AppMode.CHAT + + def test_app_with_annotation_setting(self): + """Test app with annotation setting.""" + # Arrange + app_id = str(uuid4()) + collection_binding_id = str(uuid4()) + created_user_id = str(uuid4()) + + # Create app + app = App( + tenant_id=str(uuid4()), + name="Test App", + mode=AppMode.CHAT, + enable_site=True, + enable_api=True, + created_by=created_user_id, + ) + app.id = app_id + + # Create annotation setting + setting = AppAnnotationSetting( + app_id=app_id, + score_threshold=0.85, + collection_binding_id=collection_binding_id, + created_user_id=created_user_id, + updated_user_id=created_user_id, + ) + + # Assert + assert setting.app_id == app.id + assert setting.score_threshold == 0.85 + + def test_message_with_annotation(self): + """Test message with annotation.""" + # Arrange + app_id = str(uuid4()) + conversation_id = str(uuid4()) + message_id = str(uuid4()) + account_id = str(uuid4()) + + # Create message + message = Message( + app_id=app_id, + conversation_id=conversation_id, + query="What is AI?", + message={"role": "user", "content": "What is AI?"}, + answer="AI stands for Artificial Intelligence.", + message_unit_price=Decimal("0.0001"), + answer_unit_price=Decimal("0.0002"), + currency="USD", + from_source="api", + ) + message.id = message_id + + # Create annotation + annotation = MessageAnnotation( + app_id=app_id, + conversation_id=conversation_id, + message_id=message_id, + question="What is AI?", + content="AI stands for Artificial Intelligence.", + account_id=account_id, + ) + + # Assert + assert annotation.app_id == message.app_id + assert annotation.conversation_id == message.conversation_id + assert annotation.message_id == message.id + + def test_annotation_hit_history_tracking(self): + """Test annotation hit history tracking.""" + # Arrange + app_id = str(uuid4()) + annotation_id = str(uuid4()) + message_id = str(uuid4()) + account_id = str(uuid4()) + + # Create annotation + annotation = MessageAnnotation( + app_id=app_id, + question="What is AI?", + content="AI stands for Artificial Intelligence.", + account_id=account_id, + ) + annotation.id = annotation_id + + # Create hit history + history = AppAnnotationHitHistory( + app_id=app_id, + annotation_id=annotation_id, + source="api", + question="What is AI?", + account_id=account_id, + score=0.92, + message_id=message_id, + annotation_question="What is AI?", + annotation_content="AI stands for Artificial Intelligence.", + ) + + # Assert + assert history.app_id == annotation.app_id + assert history.annotation_id == annotation.id + assert history.score == 0.92 + + def test_app_with_site(self): + """Test app with site.""" + # Arrange + app_id = str(uuid4()) + + # Create app + app = App( + tenant_id=str(uuid4()), + name="Test App", + mode=AppMode.CHAT, + enable_site=True, + enable_api=True, + created_by=str(uuid4()), + ) + app.id = app_id + + # Create site + site = Site( + app_id=app_id, + title="Test Site", + default_language="en-US", + customize_token_strategy="uuid", + ) + + # Assert + assert site.app_id == app.id + assert app.enable_site is True From 6241b87f905c2380111a95ae619ac83fe7e4cc77 Mon Sep 17 00:00:00 2001 From: Asuka Minato Date: Mon, 24 Nov 2025 11:50:20 +0900 Subject: [PATCH 12/22] more typed orm (#28519) Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com> --- api/models/dataset.py | 28 ++-- api/models/model.py | 88 +++++----- api/models/provider.py | 42 +++-- api/models/trigger.py | 11 +- .../services/test_agent_service.py | 1 + .../services/test_annotation_service.py | 157 ++++++++++-------- .../tasks/test_add_document_to_index_task.py | 2 +- .../unit_tests/core/test_provider_manager.py | 11 +- 8 files changed, 179 insertions(+), 161 deletions(-) diff --git a/api/models/dataset.py b/api/models/dataset.py index 4bc802bb1c..3f2d16d3bd 100644 --- a/api/models/dataset.py +++ b/api/models/dataset.py @@ -1026,19 +1026,21 @@ class Embedding(Base): return cast(list[float], pickle.loads(self.embedding)) # noqa: S301 -class DatasetCollectionBinding(Base): +class DatasetCollectionBinding(TypeBase): __tablename__ = "dataset_collection_bindings" __table_args__ = ( sa.PrimaryKeyConstraint("id", name="dataset_collection_bindings_pkey"), sa.Index("provider_model_name_idx", "provider_name", "model_name"), ) - id = mapped_column(StringUUID, primary_key=True, default=lambda: str(uuid4())) + id: Mapped[str] = mapped_column(StringUUID, primary_key=True, default=lambda: str(uuid4()), init=False) provider_name: Mapped[str] = mapped_column(String(255), nullable=False) model_name: Mapped[str] = mapped_column(String(255), nullable=False) - type = mapped_column(String(40), server_default=sa.text("'dataset'"), nullable=False) - collection_name = mapped_column(String(64), nullable=False) - created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) + type: Mapped[str] = mapped_column(String(40), server_default=sa.text("'dataset'"), nullable=False) + collection_name: Mapped[str] = mapped_column(String(64), nullable=False) + created_at: Mapped[datetime] = mapped_column( + DateTime, nullable=False, server_default=func.current_timestamp(), init=False + ) class TidbAuthBinding(Base): @@ -1176,7 +1178,7 @@ class ExternalKnowledgeBindings(TypeBase): ) -class DatasetAutoDisableLog(Base): +class DatasetAutoDisableLog(TypeBase): __tablename__ = "dataset_auto_disable_logs" __table_args__ = ( sa.PrimaryKeyConstraint("id", name="dataset_auto_disable_log_pkey"), @@ -1185,12 +1187,14 @@ class DatasetAutoDisableLog(Base): sa.Index("dataset_auto_disable_log_created_atx", "created_at"), ) - id = mapped_column(StringUUID, default=lambda: str(uuid4())) - tenant_id = mapped_column(StringUUID, nullable=False) - dataset_id = mapped_column(StringUUID, nullable=False) - document_id = mapped_column(StringUUID, nullable=False) - notified: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=sa.text("false")) - created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=sa.func.current_timestamp()) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False) + tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) + dataset_id: Mapped[str] = mapped_column(StringUUID, nullable=False) + document_id: Mapped[str] = mapped_column(StringUUID, nullable=False) + notified: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=sa.text("false"), default=False) + created_at: Mapped[datetime] = mapped_column( + DateTime, nullable=False, server_default=sa.func.current_timestamp(), init=False + ) class RateLimitLog(TypeBase): diff --git a/api/models/model.py b/api/models/model.py index b0bf46e7d7..e2b9da46f1 100644 --- a/api/models/model.py +++ b/api/models/model.py @@ -16,7 +16,7 @@ from sqlalchemy.orm import Mapped, Session, mapped_column from configs import dify_config from constants import DEFAULT_FILE_NUMBER_LIMITS -from core.file import FILE_MODEL_IDENTITY, File, FileTransferMethod, FileType +from core.file import FILE_MODEL_IDENTITY, File, FileTransferMethod from core.file import helpers as file_helpers from core.tools.signature import sign_tool_file from core.workflow.enums import WorkflowExecutionStatus @@ -594,7 +594,7 @@ class InstalledApp(TypeBase): return tenant -class OAuthProviderApp(Base): +class OAuthProviderApp(TypeBase): """ Globally shared OAuth provider app information. Only for Dify Cloud. @@ -606,18 +606,21 @@ class OAuthProviderApp(Base): sa.Index("oauth_provider_app_client_id_idx", "client_id"), ) - id = mapped_column(StringUUID, default=lambda: str(uuidv7())) - app_icon = mapped_column(String(255), nullable=False) - app_label = mapped_column(sa.JSON, nullable=False, default="{}") - client_id = mapped_column(String(255), nullable=False) - client_secret = mapped_column(String(255), nullable=False) - redirect_uris = mapped_column(sa.JSON, nullable=False, default="[]") - scope = mapped_column( + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuidv7()), init=False) + app_icon: Mapped[str] = mapped_column(String(255), nullable=False) + client_id: Mapped[str] = mapped_column(String(255), nullable=False) + client_secret: Mapped[str] = mapped_column(String(255), nullable=False) + app_label: Mapped[dict] = mapped_column(sa.JSON, nullable=False, default_factory=dict) + redirect_uris: Mapped[list] = mapped_column(sa.JSON, nullable=False, default_factory=list) + scope: Mapped[str] = mapped_column( String(255), nullable=False, server_default=sa.text("'read:name read:email read:avatar read:interface_language read:timezone'"), + default="read:name read:email read:avatar read:interface_language read:timezone", + ) + created_at: Mapped[datetime] = mapped_column( + sa.DateTime, nullable=False, server_default=func.current_timestamp(), init=False ) - created_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) class Conversation(Base): @@ -1335,7 +1338,7 @@ class MessageFeedback(Base): } -class MessageFile(Base): +class MessageFile(TypeBase): __tablename__ = "message_files" __table_args__ = ( sa.PrimaryKeyConstraint("id", name="message_file_pkey"), @@ -1343,37 +1346,18 @@ class MessageFile(Base): sa.Index("message_file_created_by_idx", "created_by"), ) - def __init__( - self, - *, - message_id: str, - type: FileType, - transfer_method: FileTransferMethod, - url: str | None = None, - belongs_to: Literal["user", "assistant"] | None = None, - upload_file_id: str | None = None, - created_by_role: CreatorUserRole, - created_by: str, - ): - self.message_id = message_id - self.type = type - self.transfer_method = transfer_method - self.url = url - self.belongs_to = belongs_to - self.upload_file_id = upload_file_id - self.created_by_role = created_by_role.value - self.created_by = created_by - - id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4())) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False) message_id: Mapped[str] = mapped_column(StringUUID, nullable=False) type: Mapped[str] = mapped_column(String(255), nullable=False) - transfer_method: Mapped[str] = mapped_column(String(255), nullable=False) - url: Mapped[str | None] = mapped_column(LongText, nullable=True) - belongs_to: Mapped[str | None] = mapped_column(String(255), nullable=True) - upload_file_id: Mapped[str | None] = mapped_column(StringUUID, nullable=True) - created_by_role: Mapped[str] = mapped_column(String(255), nullable=False) + transfer_method: Mapped[FileTransferMethod] = mapped_column(String(255), nullable=False) + created_by_role: Mapped[CreatorUserRole] = mapped_column(String(255), nullable=False) created_by: Mapped[str] = mapped_column(StringUUID, nullable=False) - created_at: Mapped[datetime] = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) + belongs_to: Mapped[Literal["user", "assistant"] | None] = mapped_column(String(255), nullable=True, default=None) + url: Mapped[str | None] = mapped_column(LongText, nullable=True, default=None) + upload_file_id: Mapped[str | None] = mapped_column(StringUUID, nullable=True, default=None) + created_at: Mapped[datetime] = mapped_column( + sa.DateTime, nullable=False, server_default=func.current_timestamp(), init=False + ) class MessageAnnotation(Base): @@ -1447,22 +1431,28 @@ class AppAnnotationHitHistory(Base): return account -class AppAnnotationSetting(Base): +class AppAnnotationSetting(TypeBase): __tablename__ = "app_annotation_settings" __table_args__ = ( sa.PrimaryKeyConstraint("id", name="app_annotation_settings_pkey"), sa.Index("app_annotation_settings_app_idx", "app_id"), ) - id = mapped_column(StringUUID, default=lambda: str(uuid4())) - app_id = mapped_column(StringUUID, nullable=False) - score_threshold = mapped_column(Float, nullable=False, server_default=sa.text("0")) - collection_binding_id = mapped_column(StringUUID, nullable=False) - created_user_id = mapped_column(StringUUID, nullable=False) - created_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) - updated_user_id = mapped_column(StringUUID, nullable=False) - updated_at = mapped_column( - sa.DateTime, nullable=False, server_default=func.current_timestamp(), onupdate=func.current_timestamp() + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False) + app_id: Mapped[str] = mapped_column(StringUUID, nullable=False) + score_threshold: Mapped[float] = mapped_column(Float, nullable=False, server_default=sa.text("0")) + collection_binding_id: Mapped[str] = mapped_column(StringUUID, nullable=False) + created_user_id: Mapped[str] = mapped_column(StringUUID, nullable=False) + created_at: Mapped[datetime] = mapped_column( + sa.DateTime, nullable=False, server_default=func.current_timestamp(), init=False + ) + updated_user_id: Mapped[str] = mapped_column(StringUUID, nullable=False) + updated_at: Mapped[datetime] = mapped_column( + sa.DateTime, + nullable=False, + server_default=func.current_timestamp(), + onupdate=func.current_timestamp(), + init=False, ) @property diff --git a/api/models/provider.py b/api/models/provider.py index a840a483ab..577e098a2e 100644 --- a/api/models/provider.py +++ b/api/models/provider.py @@ -9,7 +9,7 @@ from sqlalchemy.orm import Mapped, mapped_column from libs.uuid_utils import uuidv7 -from .base import Base, TypeBase +from .base import TypeBase from .engine import db from .types import LongText, StringUUID @@ -262,7 +262,7 @@ class ProviderModelSetting(TypeBase): ) -class LoadBalancingModelConfig(Base): +class LoadBalancingModelConfig(TypeBase): """ Configurations for load balancing models. """ @@ -273,23 +273,25 @@ class LoadBalancingModelConfig(Base): sa.Index("load_balancing_model_config_tenant_provider_model_idx", "tenant_id", "provider_name", "model_type"), ) - id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4())) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False) tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) provider_name: Mapped[str] = mapped_column(String(255), nullable=False) model_name: Mapped[str] = mapped_column(String(255), nullable=False) model_type: Mapped[str] = mapped_column(String(40), nullable=False) name: Mapped[str] = mapped_column(String(255), nullable=False) - encrypted_config: Mapped[str | None] = mapped_column(LongText, nullable=True) - credential_id: Mapped[str | None] = mapped_column(StringUUID, nullable=True) - credential_source_type: Mapped[str | None] = mapped_column(String(40), nullable=True) - enabled: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=text("true")) - created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) + encrypted_config: Mapped[str | None] = mapped_column(LongText, nullable=True, default=None) + credential_id: Mapped[str | None] = mapped_column(StringUUID, nullable=True, default=None) + credential_source_type: Mapped[str | None] = mapped_column(String(40), nullable=True, default=None) + enabled: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=text("true"), default=True) + created_at: Mapped[datetime] = mapped_column( + DateTime, nullable=False, server_default=func.current_timestamp(), init=False + ) updated_at: Mapped[datetime] = mapped_column( - DateTime, nullable=False, server_default=func.current_timestamp(), onupdate=func.current_timestamp() + DateTime, nullable=False, server_default=func.current_timestamp(), onupdate=func.current_timestamp(), init=False ) -class ProviderCredential(Base): +class ProviderCredential(TypeBase): """ Provider credential - stores multiple named credentials for each provider """ @@ -300,18 +302,20 @@ class ProviderCredential(Base): sa.Index("provider_credential_tenant_provider_idx", "tenant_id", "provider_name"), ) - id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuidv7())) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuidv7()), init=False) tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) provider_name: Mapped[str] = mapped_column(String(255), nullable=False) credential_name: Mapped[str] = mapped_column(String(255), nullable=False) encrypted_config: Mapped[str] = mapped_column(LongText, nullable=False) - created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) + created_at: Mapped[datetime] = mapped_column( + DateTime, nullable=False, server_default=func.current_timestamp(), init=False + ) updated_at: Mapped[datetime] = mapped_column( - DateTime, nullable=False, server_default=func.current_timestamp(), onupdate=func.current_timestamp() + DateTime, nullable=False, server_default=func.current_timestamp(), onupdate=func.current_timestamp(), init=False ) -class ProviderModelCredential(Base): +class ProviderModelCredential(TypeBase): """ Provider model credential - stores multiple named credentials for each provider model """ @@ -328,14 +332,16 @@ class ProviderModelCredential(Base): ), ) - id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuidv7())) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuidv7()), init=False) tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) provider_name: Mapped[str] = mapped_column(String(255), nullable=False) model_name: Mapped[str] = mapped_column(String(255), nullable=False) model_type: Mapped[str] = mapped_column(String(40), nullable=False) credential_name: Mapped[str] = mapped_column(String(255), nullable=False) encrypted_config: Mapped[str] = mapped_column(LongText, nullable=False) - created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) - updated_at: Mapped[datetime] = mapped_column( - DateTime, nullable=False, server_default=func.current_timestamp(), onupdate=func.current_timestamp() + created_at: Mapped[datetime] = mapped_column( + DateTime, nullable=False, server_default=func.current_timestamp(), init=False + ) + updated_at: Mapped[datetime] = mapped_column( + DateTime, nullable=False, server_default=func.current_timestamp(), onupdate=func.current_timestamp(), init=False ) diff --git a/api/models/trigger.py b/api/models/trigger.py index 753fdb227b..e89309551a 100644 --- a/api/models/trigger.py +++ b/api/models/trigger.py @@ -129,27 +129,30 @@ class TriggerOAuthSystemClient(TypeBase): # tenant level trigger oauth client params (client_id, client_secret, etc.) -class TriggerOAuthTenantClient(Base): +class TriggerOAuthTenantClient(TypeBase): __tablename__ = "trigger_oauth_tenant_clients" __table_args__ = ( sa.PrimaryKeyConstraint("id", name="trigger_oauth_tenant_client_pkey"), sa.UniqueConstraint("tenant_id", "plugin_id", "provider", name="unique_trigger_oauth_tenant_client"), ) - id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4())) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False) # tenant id tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) plugin_id: Mapped[str] = mapped_column(String(255), nullable=False) provider: Mapped[str] = mapped_column(String(255), nullable=False) enabled: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=sa.text("true"), default=True) # oauth params of the trigger provider - encrypted_oauth_params: Mapped[str] = mapped_column(LongText, nullable=False) - created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) + encrypted_oauth_params: Mapped[str] = mapped_column(LongText, nullable=False, default="{}") + created_at: Mapped[datetime] = mapped_column( + DateTime, nullable=False, server_default=func.current_timestamp(), init=False + ) updated_at: Mapped[datetime] = mapped_column( DateTime, nullable=False, server_default=func.current_timestamp(), server_onupdate=func.current_timestamp(), + init=False, ) @property diff --git a/api/tests/test_containers_integration_tests/services/test_agent_service.py b/api/tests/test_containers_integration_tests/services/test_agent_service.py index ca513319b2..3be2798085 100644 --- a/api/tests/test_containers_integration_tests/services/test_agent_service.py +++ b/api/tests/test_containers_integration_tests/services/test_agent_service.py @@ -852,6 +852,7 @@ class TestAgentService: # Add files to message from models.model import MessageFile + assert message.from_account_id is not None message_file1 = MessageFile( message_id=message.id, type=FileType.IMAGE, diff --git a/api/tests/test_containers_integration_tests/services/test_annotation_service.py b/api/tests/test_containers_integration_tests/services/test_annotation_service.py index 2b03ec1c26..da73122cd7 100644 --- a/api/tests/test_containers_integration_tests/services/test_annotation_service.py +++ b/api/tests/test_containers_integration_tests/services/test_annotation_service.py @@ -860,22 +860,24 @@ class TestAnnotationService: from models.model import AppAnnotationSetting # Create a collection binding first - collection_binding = DatasetCollectionBinding() - collection_binding.id = fake.uuid4() - collection_binding.provider_name = "openai" - collection_binding.model_name = "text-embedding-ada-002" - collection_binding.type = "annotation" - collection_binding.collection_name = f"annotation_collection_{fake.uuid4()}" + collection_binding = DatasetCollectionBinding( + provider_name="openai", + model_name="text-embedding-ada-002", + type="annotation", + collection_name=f"annotation_collection_{fake.uuid4()}", + ) + collection_binding.id = str(fake.uuid4()) db.session.add(collection_binding) db.session.flush() # Create annotation setting - annotation_setting = AppAnnotationSetting() - annotation_setting.app_id = app.id - annotation_setting.score_threshold = 0.8 - annotation_setting.collection_binding_id = collection_binding.id - annotation_setting.created_user_id = account.id - annotation_setting.updated_user_id = account.id + annotation_setting = AppAnnotationSetting( + app_id=app.id, + score_threshold=0.8, + collection_binding_id=collection_binding.id, + created_user_id=account.id, + updated_user_id=account.id, + ) db.session.add(annotation_setting) db.session.commit() @@ -919,22 +921,24 @@ class TestAnnotationService: from models.model import AppAnnotationSetting # Create a collection binding first - collection_binding = DatasetCollectionBinding() - collection_binding.id = fake.uuid4() - collection_binding.provider_name = "openai" - collection_binding.model_name = "text-embedding-ada-002" - collection_binding.type = "annotation" - collection_binding.collection_name = f"annotation_collection_{fake.uuid4()}" + collection_binding = DatasetCollectionBinding( + provider_name="openai", + model_name="text-embedding-ada-002", + type="annotation", + collection_name=f"annotation_collection_{fake.uuid4()}", + ) + collection_binding.id = str(fake.uuid4()) db.session.add(collection_binding) db.session.flush() # Create annotation setting - annotation_setting = AppAnnotationSetting() - annotation_setting.app_id = app.id - annotation_setting.score_threshold = 0.8 - annotation_setting.collection_binding_id = collection_binding.id - annotation_setting.created_user_id = account.id - annotation_setting.updated_user_id = account.id + annotation_setting = AppAnnotationSetting( + app_id=app.id, + score_threshold=0.8, + collection_binding_id=collection_binding.id, + created_user_id=account.id, + updated_user_id=account.id, + ) db.session.add(annotation_setting) db.session.commit() @@ -1020,22 +1024,24 @@ class TestAnnotationService: from models.model import AppAnnotationSetting # Create a collection binding first - collection_binding = DatasetCollectionBinding() - collection_binding.id = fake.uuid4() - collection_binding.provider_name = "openai" - collection_binding.model_name = "text-embedding-ada-002" - collection_binding.type = "annotation" - collection_binding.collection_name = f"annotation_collection_{fake.uuid4()}" + collection_binding = DatasetCollectionBinding( + provider_name="openai", + model_name="text-embedding-ada-002", + type="annotation", + collection_name=f"annotation_collection_{fake.uuid4()}", + ) + collection_binding.id = str(fake.uuid4()) db.session.add(collection_binding) db.session.flush() # Create annotation setting - annotation_setting = AppAnnotationSetting() - annotation_setting.app_id = app.id - annotation_setting.score_threshold = 0.8 - annotation_setting.collection_binding_id = collection_binding.id - annotation_setting.created_user_id = account.id - annotation_setting.updated_user_id = account.id + annotation_setting = AppAnnotationSetting( + app_id=app.id, + score_threshold=0.8, + collection_binding_id=collection_binding.id, + created_user_id=account.id, + updated_user_id=account.id, + ) db.session.add(annotation_setting) db.session.commit() @@ -1080,22 +1086,24 @@ class TestAnnotationService: from models.model import AppAnnotationSetting # Create a collection binding first - collection_binding = DatasetCollectionBinding() - collection_binding.id = fake.uuid4() - collection_binding.provider_name = "openai" - collection_binding.model_name = "text-embedding-ada-002" - collection_binding.type = "annotation" - collection_binding.collection_name = f"annotation_collection_{fake.uuid4()}" + collection_binding = DatasetCollectionBinding( + provider_name="openai", + model_name="text-embedding-ada-002", + type="annotation", + collection_name=f"annotation_collection_{fake.uuid4()}", + ) + collection_binding.id = str(fake.uuid4()) db.session.add(collection_binding) db.session.flush() # Create annotation setting - annotation_setting = AppAnnotationSetting() - annotation_setting.app_id = app.id - annotation_setting.score_threshold = 0.8 - annotation_setting.collection_binding_id = collection_binding.id - annotation_setting.created_user_id = account.id - annotation_setting.updated_user_id = account.id + annotation_setting = AppAnnotationSetting( + app_id=app.id, + score_threshold=0.8, + collection_binding_id=collection_binding.id, + created_user_id=account.id, + updated_user_id=account.id, + ) db.session.add(annotation_setting) db.session.commit() @@ -1151,22 +1159,25 @@ class TestAnnotationService: from models.model import AppAnnotationSetting # Create a collection binding first - collection_binding = DatasetCollectionBinding() - collection_binding.id = fake.uuid4() - collection_binding.provider_name = "openai" - collection_binding.model_name = "text-embedding-ada-002" - collection_binding.type = "annotation" - collection_binding.collection_name = f"annotation_collection_{fake.uuid4()}" + collection_binding = DatasetCollectionBinding( + provider_name="openai", + model_name="text-embedding-ada-002", + type="annotation", + collection_name=f"annotation_collection_{fake.uuid4()}", + ) + collection_binding.id = str(fake.uuid4()) db.session.add(collection_binding) db.session.flush() # Create annotation setting - annotation_setting = AppAnnotationSetting() - annotation_setting.app_id = app.id - annotation_setting.score_threshold = 0.8 - annotation_setting.collection_binding_id = collection_binding.id - annotation_setting.created_user_id = account.id - annotation_setting.updated_user_id = account.id + annotation_setting = AppAnnotationSetting( + app_id=app.id, + score_threshold=0.8, + collection_binding_id=collection_binding.id, + created_user_id=account.id, + updated_user_id=account.id, + ) + db.session.add(annotation_setting) db.session.commit() @@ -1211,22 +1222,24 @@ class TestAnnotationService: from models.model import AppAnnotationSetting # Create a collection binding first - collection_binding = DatasetCollectionBinding() - collection_binding.id = fake.uuid4() - collection_binding.provider_name = "openai" - collection_binding.model_name = "text-embedding-ada-002" - collection_binding.type = "annotation" - collection_binding.collection_name = f"annotation_collection_{fake.uuid4()}" + collection_binding = DatasetCollectionBinding( + provider_name="openai", + model_name="text-embedding-ada-002", + type="annotation", + collection_name=f"annotation_collection_{fake.uuid4()}", + ) + collection_binding.id = str(fake.uuid4()) db.session.add(collection_binding) db.session.flush() # Create annotation setting - annotation_setting = AppAnnotationSetting() - annotation_setting.app_id = app.id - annotation_setting.score_threshold = 0.8 - annotation_setting.collection_binding_id = collection_binding.id - annotation_setting.created_user_id = account.id - annotation_setting.updated_user_id = account.id + annotation_setting = AppAnnotationSetting( + app_id=app.id, + score_threshold=0.8, + collection_binding_id=collection_binding.id, + created_user_id=account.id, + updated_user_id=account.id, + ) db.session.add(annotation_setting) db.session.commit() diff --git a/api/tests/test_containers_integration_tests/tasks/test_add_document_to_index_task.py b/api/tests/test_containers_integration_tests/tasks/test_add_document_to_index_task.py index f1530bcac6..9478bb9ddb 100644 --- a/api/tests/test_containers_integration_tests/tasks/test_add_document_to_index_task.py +++ b/api/tests/test_containers_integration_tests/tasks/test_add_document_to_index_task.py @@ -502,11 +502,11 @@ class TestAddDocumentToIndexTask: auto_disable_logs = [] for _ in range(2): log_entry = DatasetAutoDisableLog( - id=fake.uuid4(), tenant_id=document.tenant_id, dataset_id=dataset.id, document_id=document.id, ) + log_entry.id = str(fake.uuid4()) db.session.add(log_entry) auto_disable_logs.append(log_entry) diff --git a/api/tests/unit_tests/core/test_provider_manager.py b/api/tests/unit_tests/core/test_provider_manager.py index dbbda5f74c..3163d53b87 100644 --- a/api/tests/unit_tests/core/test_provider_manager.py +++ b/api/tests/unit_tests/core/test_provider_manager.py @@ -39,9 +39,9 @@ def test__to_model_settings(mocker: MockerFixture, mock_provider_entity): ps.id = "id" provider_model_settings = [ps] + load_balancing_model_configs = [ LoadBalancingModelConfig( - id="id1", tenant_id="tenant_id", provider_name="openai", model_name="gpt-4", @@ -51,7 +51,6 @@ def test__to_model_settings(mocker: MockerFixture, mock_provider_entity): enabled=True, ), LoadBalancingModelConfig( - id="id2", tenant_id="tenant_id", provider_name="openai", model_name="gpt-4", @@ -61,6 +60,8 @@ def test__to_model_settings(mocker: MockerFixture, mock_provider_entity): enabled=True, ), ] + load_balancing_model_configs[0].id = "id1" + load_balancing_model_configs[1].id = "id2" mocker.patch( "core.helper.model_provider_cache.ProviderCredentialsCache.get", return_value={"openai_api_key": "fake_key"} @@ -101,7 +102,6 @@ def test__to_model_settings_only_one_lb(mocker: MockerFixture, mock_provider_ent provider_model_settings = [ps] load_balancing_model_configs = [ LoadBalancingModelConfig( - id="id1", tenant_id="tenant_id", provider_name="openai", model_name="gpt-4", @@ -111,6 +111,7 @@ def test__to_model_settings_only_one_lb(mocker: MockerFixture, mock_provider_ent enabled=True, ) ] + load_balancing_model_configs[0].id = "id1" mocker.patch( "core.helper.model_provider_cache.ProviderCredentialsCache.get", return_value={"openai_api_key": "fake_key"} @@ -148,7 +149,6 @@ def test__to_model_settings_lb_disabled(mocker: MockerFixture, mock_provider_ent provider_model_settings = [ps] load_balancing_model_configs = [ LoadBalancingModelConfig( - id="id1", tenant_id="tenant_id", provider_name="openai", model_name="gpt-4", @@ -158,7 +158,6 @@ def test__to_model_settings_lb_disabled(mocker: MockerFixture, mock_provider_ent enabled=True, ), LoadBalancingModelConfig( - id="id2", tenant_id="tenant_id", provider_name="openai", model_name="gpt-4", @@ -168,6 +167,8 @@ def test__to_model_settings_lb_disabled(mocker: MockerFixture, mock_provider_ent enabled=True, ), ] + load_balancing_model_configs[0].id = "id1" + load_balancing_model_configs[1].id = "id2" mocker.patch( "core.helper.model_provider_cache.ProviderCredentialsCache.get", return_value={"openai_api_key": "fake_key"} From 8a995d0c21f44f0fef67ed3fa7f0227a3d7fcfbd Mon Sep 17 00:00:00 2001 From: wangxiaolei Date: Mon, 24 Nov 2025 11:06:06 +0800 Subject: [PATCH 13/22] chore: not using db.session.get (#28555) Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> --- api/models/workflow.py | 12 +- .../models/test_workflow_trigger_log.py | 188 ++++++++++++++++++ 2 files changed, 196 insertions(+), 4 deletions(-) create mode 100644 api/tests/unit_tests/models/test_workflow_trigger_log.py diff --git a/api/models/workflow.py b/api/models/workflow.py index 3ebc36bee3..0280353d45 100644 --- a/api/models/workflow.py +++ b/api/models/workflow.py @@ -869,16 +869,20 @@ class WorkflowNodeExecutionModel(Base): # This model is expected to have `offlo @property def created_by_account(self): created_by_role = CreatorUserRole(self.created_by_role) - # TODO(-LAN-): Avoid using db.session.get() here. - return db.session.get(Account, self.created_by) if created_by_role == CreatorUserRole.ACCOUNT else None + if created_by_role == CreatorUserRole.ACCOUNT: + stmt = select(Account).where(Account.id == self.created_by) + return db.session.scalar(stmt) + return None @property def created_by_end_user(self): from .model import EndUser created_by_role = CreatorUserRole(self.created_by_role) - # TODO(-LAN-): Avoid using db.session.get() here. - return db.session.get(EndUser, self.created_by) if created_by_role == CreatorUserRole.END_USER else None + if created_by_role == CreatorUserRole.END_USER: + stmt = select(EndUser).where(EndUser.id == self.created_by) + return db.session.scalar(stmt) + return None @property def inputs_dict(self): diff --git a/api/tests/unit_tests/models/test_workflow_trigger_log.py b/api/tests/unit_tests/models/test_workflow_trigger_log.py new file mode 100644 index 0000000000..7fdad92fb6 --- /dev/null +++ b/api/tests/unit_tests/models/test_workflow_trigger_log.py @@ -0,0 +1,188 @@ +import types + +import pytest + +from models.engine import db +from models.enums import CreatorUserRole +from models.workflow import WorkflowNodeExecutionModel + + +@pytest.fixture +def fake_db_scalar(monkeypatch): + """Provide a controllable fake for db.session.scalar (SQLAlchemy 2.0 style).""" + calls = [] + + def _install(side_effect): + def _fake_scalar(statement): + calls.append(statement) + return side_effect(statement) + + # Patch the modern API used by the model implementation + monkeypatch.setattr(db.session, "scalar", _fake_scalar) + + # Backward-compatibility: if the implementation still uses db.session.get, + # make it delegate to the same side_effect so tests remain valid on older code. + if hasattr(db.session, "get"): + + def _fake_get(*_args, **_kwargs): + return side_effect(None) + + monkeypatch.setattr(db.session, "get", _fake_get) + + return calls + + return _install + + +def make_account(id_: str = "acc-1"): + # Use a simple object to avoid constructing a full SQLAlchemy model instance + # Python 3.12 forbids reassigning __class__ for SimpleNamespace; not needed here. + obj = types.SimpleNamespace() + obj.id = id_ + return obj + + +def make_end_user(id_: str = "user-1"): + # Lightweight stand-in object; no need to spoof class identity. + obj = types.SimpleNamespace() + obj.id = id_ + return obj + + +def test_created_by_account_returns_account_when_role_account(fake_db_scalar): + account = make_account("acc-1") + + # The implementation uses db.session.scalar(select(Account)...). We only need to + # return the expected object when called; the exact SQL is irrelevant for this unit test. + def side_effect(_statement): + return account + + fake_db_scalar(side_effect) + + log = WorkflowNodeExecutionModel( + tenant_id="t1", + app_id="a1", + workflow_id="w1", + triggered_from="workflow-run", + workflow_run_id=None, + index=1, + predecessor_node_id=None, + node_execution_id=None, + node_id="n1", + node_type="start", + title="Start", + inputs=None, + process_data=None, + outputs=None, + status="succeeded", + error=None, + elapsed_time=0.0, + execution_metadata=None, + created_by_role=CreatorUserRole.ACCOUNT.value, + created_by="acc-1", + ) + + assert log.created_by_account is account + + +def test_created_by_account_returns_none_when_role_not_account(fake_db_scalar): + # Even if an Account with matching id exists, property should return None when role is END_USER + account = make_account("acc-1") + + def side_effect(_statement): + return account + + fake_db_scalar(side_effect) + + log = WorkflowNodeExecutionModel( + tenant_id="t1", + app_id="a1", + workflow_id="w1", + triggered_from="workflow-run", + workflow_run_id=None, + index=1, + predecessor_node_id=None, + node_execution_id=None, + node_id="n1", + node_type="start", + title="Start", + inputs=None, + process_data=None, + outputs=None, + status="succeeded", + error=None, + elapsed_time=0.0, + execution_metadata=None, + created_by_role=CreatorUserRole.END_USER.value, + created_by="acc-1", + ) + + assert log.created_by_account is None + + +def test_created_by_end_user_returns_end_user_when_role_end_user(fake_db_scalar): + end_user = make_end_user("user-1") + + def side_effect(_statement): + return end_user + + fake_db_scalar(side_effect) + + log = WorkflowNodeExecutionModel( + tenant_id="t1", + app_id="a1", + workflow_id="w1", + triggered_from="workflow-run", + workflow_run_id=None, + index=1, + predecessor_node_id=None, + node_execution_id=None, + node_id="n1", + node_type="start", + title="Start", + inputs=None, + process_data=None, + outputs=None, + status="succeeded", + error=None, + elapsed_time=0.0, + execution_metadata=None, + created_by_role=CreatorUserRole.END_USER.value, + created_by="user-1", + ) + + assert log.created_by_end_user is end_user + + +def test_created_by_end_user_returns_none_when_role_not_end_user(fake_db_scalar): + end_user = make_end_user("user-1") + + def side_effect(_statement): + return end_user + + fake_db_scalar(side_effect) + + log = WorkflowNodeExecutionModel( + tenant_id="t1", + app_id="a1", + workflow_id="w1", + triggered_from="workflow-run", + workflow_run_id=None, + index=1, + predecessor_node_id=None, + node_execution_id=None, + node_id="n1", + node_type="start", + title="Start", + inputs=None, + process_data=None, + outputs=None, + status="succeeded", + error=None, + elapsed_time=0.0, + execution_metadata=None, + created_by_role=CreatorUserRole.ACCOUNT.value, + created_by="user-1", + ) + + assert log.created_by_end_user is None From e1d11681c03ea47bbe01d5fccab808196ee8a50e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=9D=9E=E6=B3=95=E6=93=8D=E4=BD=9C?= Date: Mon, 24 Nov 2025 11:08:40 +0800 Subject: [PATCH 14/22] fix: plugin auto update display issues (#28564) --- .../auto-update-setting/no-plugin-selected.tsx | 2 +- .../auto-update-setting/plugins-picker.tsx | 2 +- .../auto-update-setting/tool-picker.tsx | 11 ++++++++++- 3 files changed, 12 insertions(+), 3 deletions(-) diff --git a/web/app/components/plugins/reference-setting-modal/auto-update-setting/no-plugin-selected.tsx b/web/app/components/plugins/reference-setting-modal/auto-update-setting/no-plugin-selected.tsx index e255be0525..2338014232 100644 --- a/web/app/components/plugins/reference-setting-modal/auto-update-setting/no-plugin-selected.tsx +++ b/web/app/components/plugins/reference-setting-modal/auto-update-setting/no-plugin-selected.tsx @@ -14,7 +14,7 @@ const NoPluginSelected: FC = ({ const { t } = useTranslation() const text = `${t(`plugin.autoUpdate.upgradeModePlaceholder.${updateMode === AUTO_UPDATE_MODE.partial ? 'partial' : 'exclude'}`)}` return ( -
+
{text}
) diff --git a/web/app/components/plugins/reference-setting-modal/auto-update-setting/plugins-picker.tsx b/web/app/components/plugins/reference-setting-modal/auto-update-setting/plugins-picker.tsx index 77ffd66670..097592c1c0 100644 --- a/web/app/components/plugins/reference-setting-modal/auto-update-setting/plugins-picker.tsx +++ b/web/app/components/plugins/reference-setting-modal/auto-update-setting/plugins-picker.tsx @@ -53,7 +53,7 @@ const PluginsPicker: FC = ({ + diff --git a/web/app/components/plugins/reference-setting-modal/auto-update-setting/tool-picker.tsx b/web/app/components/plugins/reference-setting-modal/auto-update-setting/tool-picker.tsx index 0e48a07f46..ed8ae6411e 100644 --- a/web/app/components/plugins/reference-setting-modal/auto-update-setting/tool-picker.tsx +++ b/web/app/components/plugins/reference-setting-modal/auto-update-setting/tool-picker.tsx @@ -58,6 +58,14 @@ const ToolPicker: FC = ({ key: PLUGIN_TYPE_SEARCH_MAP.extension, name: t('plugin.category.extensions'), }, + { + key: PLUGIN_TYPE_SEARCH_MAP.datasource, + name: t('plugin.category.datasources'), + }, + { + key: PLUGIN_TYPE_SEARCH_MAP.trigger, + name: t('plugin.category.triggers'), + }, { key: PLUGIN_TYPE_SEARCH_MAP.bundle, name: t('plugin.category.bundles'), @@ -119,12 +127,13 @@ const ToolPicker: FC = ({ onOpenChange={onShowChange} > {trigger} -
+
Date: Mon, 24 Nov 2025 12:42:04 +0800 Subject: [PATCH 15/22] feat(seo): add meaningful

headings across all public pages (#28569) Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- web/app/components/header/index.tsx | 36 ++++++++++++++--------------- 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/web/app/components/header/index.tsx b/web/app/components/header/index.tsx index ef24d471e0..e43a8bfa25 100644 --- a/web/app/components/header/index.tsx +++ b/web/app/components/header/index.tsx @@ -35,6 +35,7 @@ const Header = () => { const { setShowPricingModal, setShowAccountSettingModal } = useModalContext() const systemFeatures = useGlobalPublicStore(s => s.systemFeatures) const isFreePlan = plan.type === Plan.sandbox + const isBrandingEnabled = systemFeatures.branding.enabled const handlePlanClick = useCallback(() => { if (isFreePlan) setShowPricingModal() @@ -42,20 +43,27 @@ const Header = () => { setShowAccountSettingModal({ payload: ACCOUNT_SETTING_TAB.BILLING }) }, [isFreePlan, setShowAccountSettingModal, setShowPricingModal]) + const renderLogo = () => ( +

+ + {systemFeatures.branding.enabled && systemFeatures.branding.workspace_logo + ? logo + : } + {isBrandingEnabled && systemFeatures.branding.application_title ? systemFeatures.branding.application_title : 'dify'} + +

+ ) + if (isMobile) { return (
- - {systemFeatures.branding.enabled && systemFeatures.branding.workspace_logo - ? logo - : } - + {renderLogo()}
/
@@ -82,15 +90,7 @@ const Header = () => { return (
- - {systemFeatures.branding.enabled && systemFeatures.branding.workspace_logo - ? logo - : } - + {renderLogo()}
/
From bcbd3de33647f308385ce998a92980dfb0d00e6b Mon Sep 17 00:00:00 2001 From: NeatGuyCoding <15627489+NeatGuyCoding@users.noreply.github.com> Date: Mon, 24 Nov 2025 12:45:06 +0800 Subject: [PATCH 16/22] fix: i18n: stop running translation (#28571) Signed-off-by: NeatGuyCoding <15627489+NeatGuyCoding@users.noreply.github.com> --- web/i18n/de-DE/share.ts | 2 +- web/i18n/es-ES/share.ts | 2 +- web/i18n/fa-IR/share.ts | 2 +- web/i18n/fr-FR/share.ts | 2 +- web/i18n/hi-IN/share.ts | 2 +- web/i18n/id-ID/share.ts | 2 +- web/i18n/it-IT/share.ts | 2 +- web/i18n/ja-JP/share.ts | 2 +- web/i18n/ko-KR/share.ts | 2 +- web/i18n/pl-PL/share.ts | 2 +- web/i18n/pt-BR/share.ts | 2 +- web/i18n/ro-RO/share.ts | 2 +- web/i18n/ru-RU/share.ts | 2 +- web/i18n/sl-SI/share.ts | 2 +- web/i18n/th-TH/share.ts | 2 +- web/i18n/tr-TR/share.ts | 2 +- web/i18n/uk-UA/share.ts | 2 +- web/i18n/vi-VN/share.ts | 2 +- web/i18n/zh-Hans/share.ts | 2 +- web/i18n/zh-Hant/share.ts | 2 +- web/i18n/zh-Hant/workflow.ts | 2 +- 21 files changed, 21 insertions(+), 21 deletions(-) diff --git a/web/i18n/de-DE/share.ts b/web/i18n/de-DE/share.ts index 8d828229d4..466a3041c7 100644 --- a/web/i18n/de-DE/share.ts +++ b/web/i18n/de-DE/share.ts @@ -76,7 +76,7 @@ const translation = { }, executions: '{{num}} HINRICHTUNGEN', execution: 'AUSFÜHRUNG', - stopRun: 'Stopp Lauf', + stopRun: 'Ausführung stoppen', }, login: { backToHome: 'Zurück zur Startseite', diff --git a/web/i18n/es-ES/share.ts b/web/i18n/es-ES/share.ts index 45c6bb1c71..fe76f6f7c1 100644 --- a/web/i18n/es-ES/share.ts +++ b/web/i18n/es-ES/share.ts @@ -76,7 +76,7 @@ const translation = { }, execution: 'EJECUCIÓN', executions: '{{num}} EJECUCIONES', - stopRun: 'Detener carrera', + stopRun: 'Detener ejecución', }, login: { backToHome: 'Volver a Inicio', diff --git a/web/i18n/fa-IR/share.ts b/web/i18n/fa-IR/share.ts index 17ae970838..9df503252c 100644 --- a/web/i18n/fa-IR/share.ts +++ b/web/i18n/fa-IR/share.ts @@ -72,7 +72,7 @@ const translation = { }, executions: '{{num}} اعدام', execution: 'اجرا', - stopRun: 'توقف کن، بدو', + stopRun: 'توقف اجرا', }, login: { backToHome: 'بازگشت به خانه', diff --git a/web/i18n/fr-FR/share.ts b/web/i18n/fr-FR/share.ts index c2c0d262db..84286e752d 100644 --- a/web/i18n/fr-FR/share.ts +++ b/web/i18n/fr-FR/share.ts @@ -76,7 +76,7 @@ const translation = { }, executions: '{{num}} EXÉCUTIONS', execution: 'EXÉCUTION', - stopRun: 'Arrêtez de courir', + stopRun: 'Arrêter l\'exécution', }, login: { backToHome: 'Retour à l\'accueil', diff --git a/web/i18n/hi-IN/share.ts b/web/i18n/hi-IN/share.ts index 760fc36d1d..cb5a6e0933 100644 --- a/web/i18n/hi-IN/share.ts +++ b/web/i18n/hi-IN/share.ts @@ -76,7 +76,7 @@ const translation = { }, execution: 'अनु执行', executions: '{{num}} फाँसी', - stopRun: 'रोकें या दौड़ना बंद करें', + stopRun: 'निष्पादन रोकें', }, login: { backToHome: 'होम पर वापस', diff --git a/web/i18n/id-ID/share.ts b/web/i18n/id-ID/share.ts index eb2bd67df5..85a3f4a8b4 100644 --- a/web/i18n/id-ID/share.ts +++ b/web/i18n/id-ID/share.ts @@ -67,7 +67,7 @@ const translation = { queryPlaceholder: 'Tulis konten kueri Anda...', resultTitle: 'Penyelesaian AI', browse: 'ramban', - stopRun: 'Berhenti Lari', + stopRun: 'Hentikan eksekusi', }, login: { backToHome: 'Kembali ke Beranda', diff --git a/web/i18n/it-IT/share.ts b/web/i18n/it-IT/share.ts index 8226355ba7..034cbea7f5 100644 --- a/web/i18n/it-IT/share.ts +++ b/web/i18n/it-IT/share.ts @@ -78,7 +78,7 @@ const translation = { }, execution: 'ESECUZIONE', executions: '{{num}} ESECUZIONI', - stopRun: 'Ferma la corsa', + stopRun: 'Ferma l\'esecuzione', }, login: { backToHome: 'Torna alla home', diff --git a/web/i18n/ja-JP/share.ts b/web/i18n/ja-JP/share.ts index 8ae9bc1728..1c219c83a9 100644 --- a/web/i18n/ja-JP/share.ts +++ b/web/i18n/ja-JP/share.ts @@ -72,7 +72,7 @@ const translation = { moreThanMaxLengthLine: '{{rowIndex}}行目:{{varName}}が制限長({{maxLength}})を超過', atLeastOne: '1 行以上のデータが必要です', }, - stopRun: '走るのをやめろ', + stopRun: '実行を停止', }, login: { backToHome: 'ホームに戻る', diff --git a/web/i18n/ko-KR/share.ts b/web/i18n/ko-KR/share.ts index 41821c19c1..43d3b967f6 100644 --- a/web/i18n/ko-KR/share.ts +++ b/web/i18n/ko-KR/share.ts @@ -72,7 +72,7 @@ const translation = { }, execution: '실행', executions: '{{num}} 처형', - stopRun: '멈춰 달려', + stopRun: '실행 중지', }, login: { backToHome: '홈으로 돌아가기', diff --git a/web/i18n/pl-PL/share.ts b/web/i18n/pl-PL/share.ts index dbc5612963..03306137a2 100644 --- a/web/i18n/pl-PL/share.ts +++ b/web/i18n/pl-PL/share.ts @@ -77,7 +77,7 @@ const translation = { }, executions: '{{num}} EGZEKUCJI', execution: 'WYKONANIE', - stopRun: 'Zatrzymaj bieg', + stopRun: 'Zatrzymaj wykonanie', }, login: { backToHome: 'Powrót do strony głównej', diff --git a/web/i18n/pt-BR/share.ts b/web/i18n/pt-BR/share.ts index 4b33f6df1c..df41ff7dd2 100644 --- a/web/i18n/pt-BR/share.ts +++ b/web/i18n/pt-BR/share.ts @@ -76,7 +76,7 @@ const translation = { }, executions: '{{num}} EXECUÇÕES', execution: 'EXECUÇÃO', - stopRun: 'Pare de correr', + stopRun: 'Parar execução', }, login: { backToHome: 'Voltar para a página inicial', diff --git a/web/i18n/ro-RO/share.ts b/web/i18n/ro-RO/share.ts index 63d383c8f7..f7797ccfdf 100644 --- a/web/i18n/ro-RO/share.ts +++ b/web/i18n/ro-RO/share.ts @@ -76,7 +76,7 @@ const translation = { }, execution: 'EXECUȚIE', executions: '{{num}} EXECUȚII', - stopRun: 'Oprește alergarea', + stopRun: 'Oprește execuția', }, login: { backToHome: 'Înapoi la Acasă', diff --git a/web/i18n/ru-RU/share.ts b/web/i18n/ru-RU/share.ts index 3b9f109ebf..190e7c0b6f 100644 --- a/web/i18n/ru-RU/share.ts +++ b/web/i18n/ru-RU/share.ts @@ -76,7 +76,7 @@ const translation = { }, execution: 'ИСПОЛНЕНИЕ', executions: '{{num}} ВЫПОЛНЕНИЯ', - stopRun: 'Остановись, убегая', + stopRun: 'Остановить выполнение', }, login: { backToHome: 'Назад на главную', diff --git a/web/i18n/sl-SI/share.ts b/web/i18n/sl-SI/share.ts index 21da9bf090..3793582ec0 100644 --- a/web/i18n/sl-SI/share.ts +++ b/web/i18n/sl-SI/share.ts @@ -73,7 +73,7 @@ const translation = { }, execution: 'IZVEDBA', executions: '{{num}} IZVRŠITEV', - stopRun: 'Ustavi teči', + stopRun: 'Ustavi izvajanje', }, login: { backToHome: 'Nazaj na začetno stran', diff --git a/web/i18n/th-TH/share.ts b/web/i18n/th-TH/share.ts index cfcffcda52..04371405ee 100644 --- a/web/i18n/th-TH/share.ts +++ b/web/i18n/th-TH/share.ts @@ -72,7 +72,7 @@ const translation = { }, execution: 'การดำเนินการ', executions: '{{num}} การประหารชีวิต', - stopRun: 'หยุดวิ่ง', + stopRun: 'หยุดการทำงาน', }, login: { backToHome: 'กลับไปที่หน้าแรก', diff --git a/web/i18n/tr-TR/share.ts b/web/i18n/tr-TR/share.ts index ae2804e22c..a12973df0b 100644 --- a/web/i18n/tr-TR/share.ts +++ b/web/i18n/tr-TR/share.ts @@ -72,7 +72,7 @@ const translation = { }, execution: 'İFRAZAT', executions: '{{num}} İDAM', - stopRun: 'Dur Koş', + stopRun: 'Çalışmayı durdur', }, login: { backToHome: 'Ana Sayfaya Dön', diff --git a/web/i18n/uk-UA/share.ts b/web/i18n/uk-UA/share.ts index 9ac8f95ba5..5e1142caa5 100644 --- a/web/i18n/uk-UA/share.ts +++ b/web/i18n/uk-UA/share.ts @@ -72,7 +72,7 @@ const translation = { }, execution: 'ВИКОНАННЯ', executions: '{{num}} ВИКОНАНЬ', - stopRun: 'Зупинись, не біжи', + stopRun: 'Зупинити виконання', }, login: { backToHome: 'Повернутися на головну', diff --git a/web/i18n/vi-VN/share.ts b/web/i18n/vi-VN/share.ts index 9138a78caa..faa5049dc3 100644 --- a/web/i18n/vi-VN/share.ts +++ b/web/i18n/vi-VN/share.ts @@ -72,7 +72,7 @@ const translation = { }, executions: '{{num}} ÁN TỬ HÌNH', execution: 'THI HÀNH', - stopRun: 'Dừng lại', + stopRun: 'Dừng thực thi', }, login: { backToHome: 'Trở về Trang Chủ', diff --git a/web/i18n/zh-Hans/share.ts b/web/i18n/zh-Hans/share.ts index 4db47e9241..db67295b02 100644 --- a/web/i18n/zh-Hans/share.ts +++ b/web/i18n/zh-Hans/share.ts @@ -72,7 +72,7 @@ const translation = { moreThanMaxLengthLine: '第 {{rowIndex}} 行:{{varName}}值超过最大长度 {{maxLength}}', atLeastOne: '上传文件的内容不能少于一条', }, - stopRun: '停止跑', + stopRun: '停止运行', }, login: { backToHome: '返回首页', diff --git a/web/i18n/zh-Hant/share.ts b/web/i18n/zh-Hant/share.ts index eaecd4f000..af87666941 100644 --- a/web/i18n/zh-Hant/share.ts +++ b/web/i18n/zh-Hant/share.ts @@ -72,7 +72,7 @@ const translation = { }, execution: '執行', executions: '{{num}} 執行', - stopRun: '停止奔跑', + stopRun: '停止運行', }, login: { backToHome: '返回首頁', diff --git a/web/i18n/zh-Hant/workflow.ts b/web/i18n/zh-Hant/workflow.ts index 5917eb95ed..a12f348f93 100644 --- a/web/i18n/zh-Hant/workflow.ts +++ b/web/i18n/zh-Hant/workflow.ts @@ -1042,7 +1042,7 @@ const translation = { }, trigger: { cached: '查看快取的變數', - stop: '停止跑步', + stop: '停止運行', clear: '清晰', running: '快取運行狀態', normal: '變數檢查', From 15ea27868e0512b7dbca633d93b1808fbdb7d575 Mon Sep 17 00:00:00 2001 From: Joel Date: Mon, 24 Nov 2025 17:02:18 +0800 Subject: [PATCH 17/22] pref: workflow (#28591) --- .../nodes/_base/components/node-control.tsx | 13 ++----------- .../nodes/_base/components/workflow-panel/index.tsx | 9 ++------- 2 files changed, 4 insertions(+), 18 deletions(-) diff --git a/web/app/components/workflow/nodes/_base/components/node-control.tsx b/web/app/components/workflow/nodes/_base/components/node-control.tsx index 544e595ecf..2a52737bbd 100644 --- a/web/app/components/workflow/nodes/_base/components/node-control.tsx +++ b/web/app/components/workflow/nodes/_base/components/node-control.tsx @@ -19,8 +19,6 @@ import { } from '@/app/components/base/icons/src/vender/line/mediaAndDevices' import Tooltip from '@/app/components/base/tooltip' import { useWorkflowStore } from '@/app/components/workflow/store' -import { useWorkflowRunValidation } from '@/app/components/workflow/hooks/use-checklist' -import Toast from '@/app/components/base/toast' type NodeControlProps = Pick const NodeControl: FC = ({ @@ -32,8 +30,6 @@ const NodeControl: FC = ({ const { handleNodeSelect } = useNodesInteractions() const workflowStore = useWorkflowStore() const isSingleRunning = data._singleRunningStatus === NodeRunningStatus.Running - const { warningNodes } = useWorkflowRunValidation() - const warningForNode = warningNodes.find(item => item.id === id) const handleOpenChange = useCallback((newOpen: boolean) => { setOpen(newOpen) }, []) @@ -55,14 +51,9 @@ const NodeControl: FC = ({ { canRunBySingle(data.type, isChildNode) && (
{ const action = isSingleRunning ? 'stop' : 'run' - if (!isSingleRunning && warningForNode) { - const message = warningForNode.errorMessage || t('workflow.panel.checklistTip') - Toast.notify({ type: 'error', message }) - return - } const store = workflowStore.getState() store.setInitShowLastRunTab(true) @@ -78,7 +69,7 @@ const NodeControl: FC = ({ ? : ( diff --git a/web/app/components/workflow/nodes/_base/components/workflow-panel/index.tsx b/web/app/components/workflow/nodes/_base/components/workflow-panel/index.tsx index bc33a05f58..0d3aebd06d 100644 --- a/web/app/components/workflow/nodes/_base/components/workflow-panel/index.tsx +++ b/web/app/components/workflow/nodes/_base/components/workflow-panel/index.tsx @@ -110,13 +110,8 @@ const BasePanel: FC = ({ const nodePanelWidth = useStore(s => s.nodePanelWidth) const otherPanelWidth = useStore(s => s.otherPanelWidth) const setNodePanelWidth = useStore(s => s.setNodePanelWidth) - const { - pendingSingleRun, - setPendingSingleRun, - } = useStore(s => ({ - pendingSingleRun: s.pendingSingleRun, - setPendingSingleRun: s.setPendingSingleRun, - })) + const pendingSingleRun = useStore(s => s.pendingSingleRun) + const setPendingSingleRun = useStore(s => s.setPendingSingleRun) const reservedCanvasWidth = 400 // Reserve the minimum visible width for the canvas From aab95d06263e01e4605ef1eb88e6892b57221840 Mon Sep 17 00:00:00 2001 From: changkeke <33918095+changkeke@users.noreply.github.com> Date: Mon, 24 Nov 2025 20:44:09 +0800 Subject: [PATCH 18/22] fix: Failed to load API definition (#28509) Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> Co-authored-by: Asuka Minato --- api/controllers/console/apikey.py | 18 +- api/controllers/console/app/annotation.py | 22 +- api/controllers/console/app/app.py | 147 +++++++-- api/controllers/console/app/app_import.py | 27 +- api/controllers/console/app/conversation.py | 284 +++++++++++++++++- .../console/app/conversation_variables.py | 24 +- api/controllers/console/app/mcp_server.py | 19 +- api/controllers/console/app/message.py | 135 ++++++++- api/controllers/console/app/site.py | 11 +- api/controllers/console/app/workflow.py | 75 ++++- .../console/app/workflow_app_log.py | 9 +- .../console/app/workflow_draft_variable.py | 59 +++- api/controllers/console/app/workflow_run.py | 96 +++++- api/controllers/console/datasets/datasets.py | 93 +++++- .../console/datasets/datasets_document.py | 41 ++- api/controllers/console/datasets/external.py | 61 +++- api/controllers/console/extension.py | 20 +- 17 files changed, 996 insertions(+), 145 deletions(-) diff --git a/api/controllers/console/apikey.py b/api/controllers/console/apikey.py index d93858d3fc..9b0d4b1a78 100644 --- a/api/controllers/console/apikey.py +++ b/api/controllers/console/apikey.py @@ -24,6 +24,12 @@ api_key_fields = { api_key_list = {"data": fields.List(fields.Nested(api_key_fields), attribute="items")} +api_key_item_model = console_ns.model("ApiKeyItem", api_key_fields) + +api_key_list_model = console_ns.model( + "ApiKeyList", {"data": fields.List(fields.Nested(api_key_item_model), attribute="items")} +) + def _get_resource(resource_id, tenant_id, resource_model): if resource_model == App: @@ -52,7 +58,7 @@ class BaseApiKeyListResource(Resource): token_prefix: str | None = None max_keys = 10 - @marshal_with(api_key_list) + @marshal_with(api_key_list_model) def get(self, resource_id): assert self.resource_id_field is not None, "resource_id_field must be set" resource_id = str(resource_id) @@ -66,7 +72,7 @@ class BaseApiKeyListResource(Resource): ).all() return {"items": keys} - @marshal_with(api_key_fields) + @marshal_with(api_key_item_model) @edit_permission_required def post(self, resource_id): assert self.resource_id_field is not None, "resource_id_field must be set" @@ -136,7 +142,7 @@ class AppApiKeyListResource(BaseApiKeyListResource): @console_ns.doc("get_app_api_keys") @console_ns.doc(description="Get all API keys for an app") @console_ns.doc(params={"resource_id": "App ID"}) - @console_ns.response(200, "Success", api_key_list) + @console_ns.response(200, "Success", api_key_list_model) def get(self, resource_id): # type: ignore """Get all API keys for an app""" return super().get(resource_id) @@ -144,7 +150,7 @@ class AppApiKeyListResource(BaseApiKeyListResource): @console_ns.doc("create_app_api_key") @console_ns.doc(description="Create a new API key for an app") @console_ns.doc(params={"resource_id": "App ID"}) - @console_ns.response(201, "API key created successfully", api_key_fields) + @console_ns.response(201, "API key created successfully", api_key_item_model) @console_ns.response(400, "Maximum keys exceeded") def post(self, resource_id): # type: ignore """Create a new API key for an app""" @@ -176,7 +182,7 @@ class DatasetApiKeyListResource(BaseApiKeyListResource): @console_ns.doc("get_dataset_api_keys") @console_ns.doc(description="Get all API keys for a dataset") @console_ns.doc(params={"resource_id": "Dataset ID"}) - @console_ns.response(200, "Success", api_key_list) + @console_ns.response(200, "Success", api_key_list_model) def get(self, resource_id): # type: ignore """Get all API keys for a dataset""" return super().get(resource_id) @@ -184,7 +190,7 @@ class DatasetApiKeyListResource(BaseApiKeyListResource): @console_ns.doc("create_dataset_api_key") @console_ns.doc(description="Create a new API key for a dataset") @console_ns.doc(params={"resource_id": "Dataset ID"}) - @console_ns.response(201, "API key created successfully", api_key_fields) + @console_ns.response(201, "API key created successfully", api_key_item_model) @console_ns.response(400, "Maximum keys exceeded") def post(self, resource_id): # type: ignore """Create a new API key for a dataset""" diff --git a/api/controllers/console/app/annotation.py b/api/controllers/console/app/annotation.py index 0be39c9178..edf0cc2cec 100644 --- a/api/controllers/console/app/annotation.py +++ b/api/controllers/console/app/annotation.py @@ -15,6 +15,7 @@ from extensions.ext_redis import redis_client from fields.annotation_fields import ( annotation_fields, annotation_hit_history_fields, + build_annotation_model, ) from libs.helper import uuid_value from libs.login import login_required @@ -184,7 +185,7 @@ class AnnotationApi(Resource): }, ) ) - @console_ns.response(201, "Annotation created successfully", annotation_fields) + @console_ns.response(201, "Annotation created successfully", build_annotation_model(console_ns)) @console_ns.response(403, "Insufficient permissions") @setup_required @login_required @@ -238,7 +239,11 @@ class AnnotationExportApi(Resource): @console_ns.doc("export_annotations") @console_ns.doc(description="Export all annotations for an app") @console_ns.doc(params={"app_id": "Application ID"}) - @console_ns.response(200, "Annotations exported successfully", fields.List(fields.Nested(annotation_fields))) + @console_ns.response( + 200, + "Annotations exported successfully", + console_ns.model("AnnotationList", {"data": fields.List(fields.Nested(build_annotation_model(console_ns)))}), + ) @console_ns.response(403, "Insufficient permissions") @setup_required @login_required @@ -263,7 +268,7 @@ class AnnotationUpdateDeleteApi(Resource): @console_ns.doc("update_delete_annotation") @console_ns.doc(description="Update or delete an annotation") @console_ns.doc(params={"app_id": "Application ID", "annotation_id": "Annotation ID"}) - @console_ns.response(200, "Annotation updated successfully", annotation_fields) + @console_ns.response(200, "Annotation updated successfully", build_annotation_model(console_ns)) @console_ns.response(204, "Annotation deleted successfully") @console_ns.response(403, "Insufficient permissions") @console_ns.expect(parser) @@ -359,7 +364,16 @@ class AnnotationHitHistoryListApi(Resource): .add_argument("limit", type=int, location="args", default=20, help="Page size") ) @console_ns.response( - 200, "Hit histories retrieved successfully", fields.List(fields.Nested(annotation_hit_history_fields)) + 200, + "Hit histories retrieved successfully", + console_ns.model( + "AnnotationHitHistoryList", + { + "data": fields.List( + fields.Nested(console_ns.model("AnnotationHitHistoryItem", annotation_hit_history_fields)) + ) + }, + ), ) @console_ns.response(403, "Insufficient permissions") @setup_required diff --git a/api/controllers/console/app/app.py b/api/controllers/console/app/app.py index 85a46aa9c3..e6687de03e 100644 --- a/api/controllers/console/app/app.py +++ b/api/controllers/console/app/app.py @@ -18,7 +18,15 @@ from controllers.console.wraps import ( from core.ops.ops_trace_manager import OpsTraceManager from core.workflow.enums import NodeType from extensions.ext_database import db -from fields.app_fields import app_detail_fields, app_detail_fields_with_site, app_pagination_fields +from fields.app_fields import ( + deleted_tool_fields, + model_config_fields, + model_config_partial_fields, + site_fields, + tag_fields, +) +from fields.workflow_fields import workflow_partial_fields as _workflow_partial_fields_dict +from libs.helper import AppIconUrlField, TimestampField from libs.login import current_account_with_tenant, login_required from libs.validators import validate_description_length from models import App, Workflow @@ -29,6 +37,111 @@ from services.feature_service import FeatureService ALLOW_CREATE_APP_MODES = ["chat", "agent-chat", "advanced-chat", "workflow", "completion"] +# Register models for flask_restx to avoid dict type issues in Swagger +# Register base models first +tag_model = console_ns.model("Tag", tag_fields) + +workflow_partial_model = console_ns.model("WorkflowPartial", _workflow_partial_fields_dict) + +model_config_model = console_ns.model("ModelConfig", model_config_fields) + +model_config_partial_model = console_ns.model("ModelConfigPartial", model_config_partial_fields) + +deleted_tool_model = console_ns.model("DeletedTool", deleted_tool_fields) + +site_model = console_ns.model("Site", site_fields) + +app_partial_model = console_ns.model( + "AppPartial", + { + "id": fields.String, + "name": fields.String, + "max_active_requests": fields.Raw(), + "description": fields.String(attribute="desc_or_prompt"), + "mode": fields.String(attribute="mode_compatible_with_agent"), + "icon_type": fields.String, + "icon": fields.String, + "icon_background": fields.String, + "icon_url": AppIconUrlField, + "model_config": fields.Nested(model_config_partial_model, attribute="app_model_config", allow_null=True), + "workflow": fields.Nested(workflow_partial_model, allow_null=True), + "use_icon_as_answer_icon": fields.Boolean, + "created_by": fields.String, + "created_at": TimestampField, + "updated_by": fields.String, + "updated_at": TimestampField, + "tags": fields.List(fields.Nested(tag_model)), + "access_mode": fields.String, + "create_user_name": fields.String, + "author_name": fields.String, + "has_draft_trigger": fields.Boolean, + }, +) + +app_detail_model = console_ns.model( + "AppDetail", + { + "id": fields.String, + "name": fields.String, + "description": fields.String, + "mode": fields.String(attribute="mode_compatible_with_agent"), + "icon": fields.String, + "icon_background": fields.String, + "enable_site": fields.Boolean, + "enable_api": fields.Boolean, + "model_config": fields.Nested(model_config_model, attribute="app_model_config", allow_null=True), + "workflow": fields.Nested(workflow_partial_model, allow_null=True), + "tracing": fields.Raw, + "use_icon_as_answer_icon": fields.Boolean, + "created_by": fields.String, + "created_at": TimestampField, + "updated_by": fields.String, + "updated_at": TimestampField, + "access_mode": fields.String, + "tags": fields.List(fields.Nested(tag_model)), + }, +) + +app_detail_with_site_model = console_ns.model( + "AppDetailWithSite", + { + "id": fields.String, + "name": fields.String, + "description": fields.String, + "mode": fields.String(attribute="mode_compatible_with_agent"), + "icon_type": fields.String, + "icon": fields.String, + "icon_background": fields.String, + "icon_url": AppIconUrlField, + "enable_site": fields.Boolean, + "enable_api": fields.Boolean, + "model_config": fields.Nested(model_config_model, attribute="app_model_config", allow_null=True), + "workflow": fields.Nested(workflow_partial_model, allow_null=True), + "api_base_url": fields.String, + "use_icon_as_answer_icon": fields.Boolean, + "max_active_requests": fields.Integer, + "created_by": fields.String, + "created_at": TimestampField, + "updated_by": fields.String, + "updated_at": TimestampField, + "deleted_tools": fields.List(fields.Nested(deleted_tool_model)), + "access_mode": fields.String, + "tags": fields.List(fields.Nested(tag_model)), + "site": fields.Nested(site_model), + }, +) + +app_pagination_model = console_ns.model( + "AppPagination", + { + "page": fields.Integer, + "limit": fields.Integer(attribute="per_page"), + "total": fields.Integer, + "has_more": fields.Boolean(attribute="has_next"), + "data": fields.List(fields.Nested(app_partial_model), attribute="items"), + }, +) + @console_ns.route("/apps") class AppListApi(Resource): @@ -50,7 +163,7 @@ class AppListApi(Resource): .add_argument("tag_ids", type=str, location="args", help="Comma-separated tag IDs") .add_argument("is_created_by_me", type=bool, location="args", help="Filter by creator") ) - @console_ns.response(200, "Success", app_pagination_fields) + @console_ns.response(200, "Success", app_pagination_model) @setup_required @login_required @account_initialization_required @@ -137,7 +250,7 @@ class AppListApi(Resource): for app in app_pagination.items: app.has_draft_trigger = str(app.id) in draft_trigger_app_ids - return marshal(app_pagination, app_pagination_fields), 200 + return marshal(app_pagination, app_pagination_model), 200 @console_ns.doc("create_app") @console_ns.doc(description="Create a new application") @@ -154,13 +267,13 @@ class AppListApi(Resource): }, ) ) - @console_ns.response(201, "App created successfully", app_detail_fields) + @console_ns.response(201, "App created successfully", app_detail_model) @console_ns.response(403, "Insufficient permissions") @console_ns.response(400, "Invalid request parameters") @setup_required @login_required @account_initialization_required - @marshal_with(app_detail_fields) + @marshal_with(app_detail_model) @cloud_edition_billing_resource_check("apps") @edit_permission_required def post(self): @@ -191,13 +304,13 @@ class AppApi(Resource): @console_ns.doc("get_app_detail") @console_ns.doc(description="Get application details") @console_ns.doc(params={"app_id": "Application ID"}) - @console_ns.response(200, "Success", app_detail_fields_with_site) + @console_ns.response(200, "Success", app_detail_with_site_model) @setup_required @login_required @account_initialization_required @enterprise_license_required @get_app_model - @marshal_with(app_detail_fields_with_site) + @marshal_with(app_detail_with_site_model) def get(self, app_model): """Get app detail""" app_service = AppService() @@ -227,7 +340,7 @@ class AppApi(Resource): }, ) ) - @console_ns.response(200, "App updated successfully", app_detail_fields_with_site) + @console_ns.response(200, "App updated successfully", app_detail_with_site_model) @console_ns.response(403, "Insufficient permissions") @console_ns.response(400, "Invalid request parameters") @setup_required @@ -235,7 +348,7 @@ class AppApi(Resource): @account_initialization_required @get_app_model @edit_permission_required - @marshal_with(app_detail_fields_with_site) + @marshal_with(app_detail_with_site_model) def put(self, app_model): """Update app""" parser = ( @@ -300,14 +413,14 @@ class AppCopyApi(Resource): }, ) ) - @console_ns.response(201, "App copied successfully", app_detail_fields_with_site) + @console_ns.response(201, "App copied successfully", app_detail_with_site_model) @console_ns.response(403, "Insufficient permissions") @setup_required @login_required @account_initialization_required @get_app_model @edit_permission_required - @marshal_with(app_detail_fields_with_site) + @marshal_with(app_detail_with_site_model) def post(self, app_model): """Copy app""" # The role of the current user in the ta table must be admin, owner, or editor @@ -396,7 +509,7 @@ class AppNameApi(Resource): @login_required @account_initialization_required @get_app_model - @marshal_with(app_detail_fields) + @marshal_with(app_detail_model) @edit_permission_required def post(self, app_model): args = parser.parse_args() @@ -428,7 +541,7 @@ class AppIconApi(Resource): @login_required @account_initialization_required @get_app_model - @marshal_with(app_detail_fields) + @marshal_with(app_detail_model) @edit_permission_required def post(self, app_model): parser = ( @@ -454,13 +567,13 @@ class AppSiteStatus(Resource): "AppSiteStatusRequest", {"enable_site": fields.Boolean(required=True, description="Enable or disable site")} ) ) - @console_ns.response(200, "Site status updated successfully", app_detail_fields) + @console_ns.response(200, "Site status updated successfully", app_detail_model) @console_ns.response(403, "Insufficient permissions") @setup_required @login_required @account_initialization_required @get_app_model - @marshal_with(app_detail_fields) + @marshal_with(app_detail_model) @edit_permission_required def post(self, app_model): parser = reqparse.RequestParser().add_argument("enable_site", type=bool, required=True, location="json") @@ -482,14 +595,14 @@ class AppApiStatus(Resource): "AppApiStatusRequest", {"enable_api": fields.Boolean(required=True, description="Enable or disable API")} ) ) - @console_ns.response(200, "API status updated successfully", app_detail_fields) + @console_ns.response(200, "API status updated successfully", app_detail_model) @console_ns.response(403, "Insufficient permissions") @setup_required @login_required @is_admin_or_owner_required @account_initialization_required @get_app_model - @marshal_with(app_detail_fields) + @marshal_with(app_detail_model) def post(self, app_model): parser = reqparse.RequestParser().add_argument("enable_api", type=bool, required=True, location="json") args = parser.parse_args() diff --git a/api/controllers/console/app/app_import.py b/api/controllers/console/app/app_import.py index 35a3393742..1b02edd489 100644 --- a/api/controllers/console/app/app_import.py +++ b/api/controllers/console/app/app_import.py @@ -1,4 +1,4 @@ -from flask_restx import Resource, marshal_with, reqparse +from flask_restx import Resource, fields, marshal_with, reqparse from sqlalchemy.orm import Session from controllers.console.app.wraps import get_app_model @@ -9,7 +9,11 @@ from controllers.console.wraps import ( setup_required, ) from extensions.ext_database import db -from fields.app_fields import app_import_check_dependencies_fields, app_import_fields +from fields.app_fields import ( + app_import_check_dependencies_fields, + app_import_fields, + leaked_dependency_fields, +) from libs.login import current_account_with_tenant, login_required from models.model import App from services.app_dsl_service import AppDslService, ImportStatus @@ -18,6 +22,19 @@ from services.feature_service import FeatureService from .. import console_ns +# Register models for flask_restx to avoid dict type issues in Swagger +# Register base model first +leaked_dependency_model = console_ns.model("LeakedDependency", leaked_dependency_fields) + +app_import_model = console_ns.model("AppImport", app_import_fields) + +# For nested models, need to replace nested dict with registered model +app_import_check_dependencies_fields_copy = app_import_check_dependencies_fields.copy() +app_import_check_dependencies_fields_copy["leaked_dependencies"] = fields.List(fields.Nested(leaked_dependency_model)) +app_import_check_dependencies_model = console_ns.model( + "AppImportCheckDependencies", app_import_check_dependencies_fields_copy +) + parser = ( reqparse.RequestParser() .add_argument("mode", type=str, required=True, location="json") @@ -38,7 +55,7 @@ class AppImportApi(Resource): @setup_required @login_required @account_initialization_required - @marshal_with(app_import_fields) + @marshal_with(app_import_model) @cloud_edition_billing_resource_check("apps") @edit_permission_required def post(self): @@ -81,7 +98,7 @@ class AppImportConfirmApi(Resource): @setup_required @login_required @account_initialization_required - @marshal_with(app_import_fields) + @marshal_with(app_import_model) @edit_permission_required def post(self, import_id): # Check user role first @@ -107,7 +124,7 @@ class AppImportCheckDependenciesApi(Resource): @login_required @get_app_model @account_initialization_required - @marshal_with(app_import_check_dependencies_fields) + @marshal_with(app_import_check_dependencies_model) @edit_permission_required def get(self, app_model: App): with Session(db.engine) as session: diff --git a/api/controllers/console/app/conversation.py b/api/controllers/console/app/conversation.py index e102300438..3d92c46756 100644 --- a/api/controllers/console/app/conversation.py +++ b/api/controllers/console/app/conversation.py @@ -1,6 +1,6 @@ import sqlalchemy as sa from flask import abort -from flask_restx import Resource, marshal_with, reqparse +from flask_restx import Resource, fields, marshal_with, reqparse from flask_restx.inputs import int_range from sqlalchemy import func, or_ from sqlalchemy.orm import joinedload @@ -11,20 +11,272 @@ from controllers.console.app.wraps import get_app_model from controllers.console.wraps import account_initialization_required, edit_permission_required, setup_required from core.app.entities.app_invoke_entities import InvokeFrom from extensions.ext_database import db -from fields.conversation_fields import ( - conversation_detail_fields, - conversation_message_detail_fields, - conversation_pagination_fields, - conversation_with_summary_pagination_fields, -) +from fields.conversation_fields import MessageTextField +from fields.raws import FilesContainedField from libs.datetime_utils import naive_utc_now, parse_time_range -from libs.helper import DatetimeString +from libs.helper import DatetimeString, TimestampField from libs.login import current_account_with_tenant, login_required from models import Conversation, EndUser, Message, MessageAnnotation from models.model import AppMode from services.conversation_service import ConversationService from services.errors.conversation import ConversationNotExistsError +# Register models for flask_restx to avoid dict type issues in Swagger +# Register in dependency order: base models first, then dependent models + +# Base models +simple_account_model = console_ns.model( + "SimpleAccount", + { + "id": fields.String, + "name": fields.String, + "email": fields.String, + }, +) + +feedback_stat_model = console_ns.model( + "FeedbackStat", + { + "like": fields.Integer, + "dislike": fields.Integer, + }, +) + +status_count_model = console_ns.model( + "StatusCount", + { + "success": fields.Integer, + "failed": fields.Integer, + "partial_success": fields.Integer, + }, +) + +message_file_model = console_ns.model( + "MessageFile", + { + "id": fields.String, + "filename": fields.String, + "type": fields.String, + "url": fields.String, + "mime_type": fields.String, + "size": fields.Integer, + "transfer_method": fields.String, + "belongs_to": fields.String(default="user"), + "upload_file_id": fields.String(default=None), + }, +) + +agent_thought_model = console_ns.model( + "AgentThought", + { + "id": fields.String, + "chain_id": fields.String, + "message_id": fields.String, + "position": fields.Integer, + "thought": fields.String, + "tool": fields.String, + "tool_labels": fields.Raw, + "tool_input": fields.String, + "created_at": TimestampField, + "observation": fields.String, + "files": fields.List(fields.String), + }, +) + +simple_model_config_model = console_ns.model( + "SimpleModelConfig", + { + "model": fields.Raw(attribute="model_dict"), + "pre_prompt": fields.String, + }, +) + +model_config_model = console_ns.model( + "ModelConfig", + { + "opening_statement": fields.String, + "suggested_questions": fields.Raw, + "model": fields.Raw, + "user_input_form": fields.Raw, + "pre_prompt": fields.String, + "agent_mode": fields.Raw, + }, +) + +# Models that depend on simple_account_model +feedback_model = console_ns.model( + "Feedback", + { + "rating": fields.String, + "content": fields.String, + "from_source": fields.String, + "from_end_user_id": fields.String, + "from_account": fields.Nested(simple_account_model, allow_null=True), + }, +) + +annotation_model = console_ns.model( + "Annotation", + { + "id": fields.String, + "question": fields.String, + "content": fields.String, + "account": fields.Nested(simple_account_model, allow_null=True), + "created_at": TimestampField, + }, +) + +annotation_hit_history_model = console_ns.model( + "AnnotationHitHistory", + { + "annotation_id": fields.String(attribute="id"), + "annotation_create_account": fields.Nested(simple_account_model, allow_null=True), + "created_at": TimestampField, + }, +) + +# Simple message detail model +simple_message_detail_model = console_ns.model( + "SimpleMessageDetail", + { + "inputs": FilesContainedField, + "query": fields.String, + "message": MessageTextField, + "answer": fields.String, + }, +) + +# Message detail model that depends on multiple models +message_detail_model = console_ns.model( + "MessageDetail", + { + "id": fields.String, + "conversation_id": fields.String, + "inputs": FilesContainedField, + "query": fields.String, + "message": fields.Raw, + "message_tokens": fields.Integer, + "answer": fields.String(attribute="re_sign_file_url_answer"), + "answer_tokens": fields.Integer, + "provider_response_latency": fields.Float, + "from_source": fields.String, + "from_end_user_id": fields.String, + "from_account_id": fields.String, + "feedbacks": fields.List(fields.Nested(feedback_model)), + "workflow_run_id": fields.String, + "annotation": fields.Nested(annotation_model, allow_null=True), + "annotation_hit_history": fields.Nested(annotation_hit_history_model, allow_null=True), + "created_at": TimestampField, + "agent_thoughts": fields.List(fields.Nested(agent_thought_model)), + "message_files": fields.List(fields.Nested(message_file_model)), + "metadata": fields.Raw(attribute="message_metadata_dict"), + "status": fields.String, + "error": fields.String, + "parent_message_id": fields.String, + }, +) + +# Conversation models +conversation_fields_model = console_ns.model( + "Conversation", + { + "id": fields.String, + "status": fields.String, + "from_source": fields.String, + "from_end_user_id": fields.String, + "from_end_user_session_id": fields.String(), + "from_account_id": fields.String, + "from_account_name": fields.String, + "read_at": TimestampField, + "created_at": TimestampField, + "updated_at": TimestampField, + "annotation": fields.Nested(annotation_model, allow_null=True), + "model_config": fields.Nested(simple_model_config_model), + "user_feedback_stats": fields.Nested(feedback_stat_model), + "admin_feedback_stats": fields.Nested(feedback_stat_model), + "message": fields.Nested(simple_message_detail_model, attribute="first_message"), + }, +) + +conversation_pagination_model = console_ns.model( + "ConversationPagination", + { + "page": fields.Integer, + "limit": fields.Integer(attribute="per_page"), + "total": fields.Integer, + "has_more": fields.Boolean(attribute="has_next"), + "data": fields.List(fields.Nested(conversation_fields_model), attribute="items"), + }, +) + +conversation_message_detail_model = console_ns.model( + "ConversationMessageDetail", + { + "id": fields.String, + "status": fields.String, + "from_source": fields.String, + "from_end_user_id": fields.String, + "from_account_id": fields.String, + "created_at": TimestampField, + "model_config": fields.Nested(model_config_model), + "message": fields.Nested(message_detail_model, attribute="first_message"), + }, +) + +conversation_with_summary_model = console_ns.model( + "ConversationWithSummary", + { + "id": fields.String, + "status": fields.String, + "from_source": fields.String, + "from_end_user_id": fields.String, + "from_end_user_session_id": fields.String, + "from_account_id": fields.String, + "from_account_name": fields.String, + "name": fields.String, + "summary": fields.String(attribute="summary_or_query"), + "read_at": TimestampField, + "created_at": TimestampField, + "updated_at": TimestampField, + "annotated": fields.Boolean, + "model_config": fields.Nested(simple_model_config_model), + "message_count": fields.Integer, + "user_feedback_stats": fields.Nested(feedback_stat_model), + "admin_feedback_stats": fields.Nested(feedback_stat_model), + "status_count": fields.Nested(status_count_model), + }, +) + +conversation_with_summary_pagination_model = console_ns.model( + "ConversationWithSummaryPagination", + { + "page": fields.Integer, + "limit": fields.Integer(attribute="per_page"), + "total": fields.Integer, + "has_more": fields.Boolean(attribute="has_next"), + "data": fields.List(fields.Nested(conversation_with_summary_model), attribute="items"), + }, +) + +conversation_detail_model = console_ns.model( + "ConversationDetail", + { + "id": fields.String, + "status": fields.String, + "from_source": fields.String, + "from_end_user_id": fields.String, + "from_account_id": fields.String, + "created_at": TimestampField, + "updated_at": TimestampField, + "annotated": fields.Boolean, + "introduction": fields.String, + "model_config": fields.Nested(model_config_model), + "message_count": fields.Integer, + "user_feedback_stats": fields.Nested(feedback_stat_model), + "admin_feedback_stats": fields.Nested(feedback_stat_model), + }, +) + @console_ns.route("/apps//completion-conversations") class CompletionConversationApi(Resource): @@ -47,13 +299,13 @@ class CompletionConversationApi(Resource): .add_argument("page", type=int, location="args", default=1, help="Page number") .add_argument("limit", type=int, location="args", default=20, help="Page size (1-100)") ) - @console_ns.response(200, "Success", conversation_pagination_fields) + @console_ns.response(200, "Success", conversation_pagination_model) @console_ns.response(403, "Insufficient permissions") @setup_required @login_required @account_initialization_required @get_app_model(mode=AppMode.COMPLETION) - @marshal_with(conversation_pagination_fields) + @marshal_with(conversation_pagination_model) @edit_permission_required def get(self, app_model): current_user, _ = current_account_with_tenant() @@ -125,14 +377,14 @@ class CompletionConversationDetailApi(Resource): @console_ns.doc("get_completion_conversation") @console_ns.doc(description="Get completion conversation details with messages") @console_ns.doc(params={"app_id": "Application ID", "conversation_id": "Conversation ID"}) - @console_ns.response(200, "Success", conversation_message_detail_fields) + @console_ns.response(200, "Success", conversation_message_detail_model) @console_ns.response(403, "Insufficient permissions") @console_ns.response(404, "Conversation not found") @setup_required @login_required @account_initialization_required @get_app_model(mode=AppMode.COMPLETION) - @marshal_with(conversation_message_detail_fields) + @marshal_with(conversation_message_detail_model) @edit_permission_required def get(self, app_model, conversation_id): conversation_id = str(conversation_id) @@ -192,13 +444,13 @@ class ChatConversationApi(Resource): help="Sort field and direction", ) ) - @console_ns.response(200, "Success", conversation_with_summary_pagination_fields) + @console_ns.response(200, "Success", conversation_with_summary_pagination_model) @console_ns.response(403, "Insufficient permissions") @setup_required @login_required @account_initialization_required @get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT]) - @marshal_with(conversation_with_summary_pagination_fields) + @marshal_with(conversation_with_summary_pagination_model) @edit_permission_required def get(self, app_model): current_user, _ = current_account_with_tenant() @@ -325,14 +577,14 @@ class ChatConversationDetailApi(Resource): @console_ns.doc("get_chat_conversation") @console_ns.doc(description="Get chat conversation details") @console_ns.doc(params={"app_id": "Application ID", "conversation_id": "Conversation ID"}) - @console_ns.response(200, "Success", conversation_detail_fields) + @console_ns.response(200, "Success", conversation_detail_model) @console_ns.response(403, "Insufficient permissions") @console_ns.response(404, "Conversation not found") @setup_required @login_required @account_initialization_required @get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT]) - @marshal_with(conversation_detail_fields) + @marshal_with(conversation_detail_model) @edit_permission_required def get(self, app_model, conversation_id): conversation_id = str(conversation_id) diff --git a/api/controllers/console/app/conversation_variables.py b/api/controllers/console/app/conversation_variables.py index 15ea004143..c612041fab 100644 --- a/api/controllers/console/app/conversation_variables.py +++ b/api/controllers/console/app/conversation_variables.py @@ -1,4 +1,4 @@ -from flask_restx import Resource, marshal_with, reqparse +from flask_restx import Resource, fields, marshal_with, reqparse from sqlalchemy import select from sqlalchemy.orm import Session @@ -6,11 +6,27 @@ from controllers.console import console_ns from controllers.console.app.wraps import get_app_model from controllers.console.wraps import account_initialization_required, setup_required from extensions.ext_database import db -from fields.conversation_variable_fields import paginated_conversation_variable_fields +from fields.conversation_variable_fields import ( + conversation_variable_fields, + paginated_conversation_variable_fields, +) from libs.login import login_required from models import ConversationVariable from models.model import AppMode +# Register models for flask_restx to avoid dict type issues in Swagger +# Register base model first +conversation_variable_model = console_ns.model("ConversationVariable", conversation_variable_fields) + +# For nested models, need to replace nested dict with registered model +paginated_conversation_variable_fields_copy = paginated_conversation_variable_fields.copy() +paginated_conversation_variable_fields_copy["data"] = fields.List( + fields.Nested(conversation_variable_model), attribute="data" +) +paginated_conversation_variable_model = console_ns.model( + "PaginatedConversationVariable", paginated_conversation_variable_fields_copy +) + @console_ns.route("/apps//conversation-variables") class ConversationVariablesApi(Resource): @@ -22,12 +38,12 @@ class ConversationVariablesApi(Resource): "conversation_id", type=str, location="args", help="Conversation ID to filter variables" ) ) - @console_ns.response(200, "Conversation variables retrieved successfully", paginated_conversation_variable_fields) + @console_ns.response(200, "Conversation variables retrieved successfully", paginated_conversation_variable_model) @setup_required @login_required @account_initialization_required @get_app_model(mode=AppMode.ADVANCED_CHAT) - @marshal_with(paginated_conversation_variable_fields) + @marshal_with(paginated_conversation_variable_model) def get(self, app_model): parser = reqparse.RequestParser().add_argument("conversation_id", type=str, location="args") args = parser.parse_args() diff --git a/api/controllers/console/app/mcp_server.py b/api/controllers/console/app/mcp_server.py index 7454d87068..58d1fb4a2d 100644 --- a/api/controllers/console/app/mcp_server.py +++ b/api/controllers/console/app/mcp_server.py @@ -12,6 +12,9 @@ from fields.app_fields import app_server_fields from libs.login import current_account_with_tenant, login_required from models.model import AppMCPServer +# Register model for flask_restx to avoid dict type issues in Swagger +app_server_model = console_ns.model("AppServer", app_server_fields) + class AppMCPServerStatus(StrEnum): ACTIVE = "active" @@ -23,12 +26,12 @@ class AppMCPServerController(Resource): @console_ns.doc("get_app_mcp_server") @console_ns.doc(description="Get MCP server configuration for an application") @console_ns.doc(params={"app_id": "Application ID"}) - @console_ns.response(200, "MCP server configuration retrieved successfully", app_server_fields) + @console_ns.response(200, "MCP server configuration retrieved successfully", app_server_model) @login_required @account_initialization_required @setup_required @get_app_model - @marshal_with(app_server_fields) + @marshal_with(app_server_model) def get(self, app_model): server = db.session.query(AppMCPServer).where(AppMCPServer.app_id == app_model.id).first() return server @@ -45,13 +48,13 @@ class AppMCPServerController(Resource): }, ) ) - @console_ns.response(201, "MCP server configuration created successfully", app_server_fields) + @console_ns.response(201, "MCP server configuration created successfully", app_server_model) @console_ns.response(403, "Insufficient permissions") @account_initialization_required @get_app_model @login_required @setup_required - @marshal_with(app_server_fields) + @marshal_with(app_server_model) @edit_permission_required def post(self, app_model): _, current_tenant_id = current_account_with_tenant() @@ -93,14 +96,14 @@ class AppMCPServerController(Resource): }, ) ) - @console_ns.response(200, "MCP server configuration updated successfully", app_server_fields) + @console_ns.response(200, "MCP server configuration updated successfully", app_server_model) @console_ns.response(403, "Insufficient permissions") @console_ns.response(404, "Server not found") @get_app_model @login_required @setup_required @account_initialization_required - @marshal_with(app_server_fields) + @marshal_with(app_server_model) @edit_permission_required def put(self, app_model): parser = ( @@ -137,13 +140,13 @@ class AppMCPServerRefreshController(Resource): @console_ns.doc("refresh_app_mcp_server") @console_ns.doc(description="Refresh MCP server configuration and regenerate server code") @console_ns.doc(params={"server_id": "Server ID"}) - @console_ns.response(200, "MCP server refreshed successfully", app_server_fields) + @console_ns.response(200, "MCP server refreshed successfully", app_server_model) @console_ns.response(403, "Insufficient permissions") @console_ns.response(404, "Server not found") @setup_required @login_required @account_initialization_required - @marshal_with(app_server_fields) + @marshal_with(app_server_model) @edit_permission_required def get(self, server_id): _, current_tenant_id = current_account_with_tenant() diff --git a/api/controllers/console/app/message.py b/api/controllers/console/app/message.py index b6672c88e0..7fdf49c3fa 100644 --- a/api/controllers/console/app/message.py +++ b/api/controllers/console/app/message.py @@ -23,8 +23,8 @@ from core.app.entities.app_invoke_entities import InvokeFrom from core.errors.error import ModelCurrentlyNotSupportError, ProviderTokenNotInitError, QuotaExceededError from core.model_runtime.errors.invoke import InvokeError from extensions.ext_database import db -from fields.conversation_fields import message_detail_fields -from libs.helper import uuid_value +from fields.raws import FilesContainedField +from libs.helper import TimestampField, uuid_value from libs.infinite_scroll_pagination import InfiniteScrollPagination from libs.login import current_account_with_tenant, login_required from models.model import AppMode, Conversation, Message, MessageAnnotation, MessageFeedback @@ -34,15 +34,126 @@ from services.message_service import MessageService logger = logging.getLogger(__name__) +# Register models for flask_restx to avoid dict type issues in Swagger +# Register in dependency order: base models first, then dependent models + +# Base models +simple_account_model = console_ns.model( + "SimpleAccount", + { + "id": fields.String, + "name": fields.String, + "email": fields.String, + }, +) + +message_file_model = console_ns.model( + "MessageFile", + { + "id": fields.String, + "filename": fields.String, + "type": fields.String, + "url": fields.String, + "mime_type": fields.String, + "size": fields.Integer, + "transfer_method": fields.String, + "belongs_to": fields.String(default="user"), + "upload_file_id": fields.String(default=None), + }, +) + +agent_thought_model = console_ns.model( + "AgentThought", + { + "id": fields.String, + "chain_id": fields.String, + "message_id": fields.String, + "position": fields.Integer, + "thought": fields.String, + "tool": fields.String, + "tool_labels": fields.Raw, + "tool_input": fields.String, + "created_at": TimestampField, + "observation": fields.String, + "files": fields.List(fields.String), + }, +) + +# Models that depend on simple_account_model +feedback_model = console_ns.model( + "Feedback", + { + "rating": fields.String, + "content": fields.String, + "from_source": fields.String, + "from_end_user_id": fields.String, + "from_account": fields.Nested(simple_account_model, allow_null=True), + }, +) + +annotation_model = console_ns.model( + "Annotation", + { + "id": fields.String, + "question": fields.String, + "content": fields.String, + "account": fields.Nested(simple_account_model, allow_null=True), + "created_at": TimestampField, + }, +) + +annotation_hit_history_model = console_ns.model( + "AnnotationHitHistory", + { + "annotation_id": fields.String(attribute="id"), + "annotation_create_account": fields.Nested(simple_account_model, allow_null=True), + "created_at": TimestampField, + }, +) + +# Message detail model that depends on multiple models +message_detail_model = console_ns.model( + "MessageDetail", + { + "id": fields.String, + "conversation_id": fields.String, + "inputs": FilesContainedField, + "query": fields.String, + "message": fields.Raw, + "message_tokens": fields.Integer, + "answer": fields.String(attribute="re_sign_file_url_answer"), + "answer_tokens": fields.Integer, + "provider_response_latency": fields.Float, + "from_source": fields.String, + "from_end_user_id": fields.String, + "from_account_id": fields.String, + "feedbacks": fields.List(fields.Nested(feedback_model)), + "workflow_run_id": fields.String, + "annotation": fields.Nested(annotation_model, allow_null=True), + "annotation_hit_history": fields.Nested(annotation_hit_history_model, allow_null=True), + "created_at": TimestampField, + "agent_thoughts": fields.List(fields.Nested(agent_thought_model)), + "message_files": fields.List(fields.Nested(message_file_model)), + "metadata": fields.Raw(attribute="message_metadata_dict"), + "status": fields.String, + "error": fields.String, + "parent_message_id": fields.String, + }, +) + +# Message infinite scroll pagination model +message_infinite_scroll_pagination_model = console_ns.model( + "MessageInfiniteScrollPagination", + { + "limit": fields.Integer, + "has_more": fields.Boolean, + "data": fields.List(fields.Nested(message_detail_model)), + }, +) + @console_ns.route("/apps//chat-messages") class ChatMessageListApi(Resource): - message_infinite_scroll_pagination_fields = { - "limit": fields.Integer, - "has_more": fields.Boolean, - "data": fields.List(fields.Nested(message_detail_fields)), - } - @console_ns.doc("list_chat_messages") @console_ns.doc(description="Get chat messages for a conversation with pagination") @console_ns.doc(params={"app_id": "Application ID"}) @@ -52,13 +163,13 @@ class ChatMessageListApi(Resource): .add_argument("first_id", type=str, location="args", help="First message ID for pagination") .add_argument("limit", type=int, location="args", default=20, help="Number of messages to return (1-100)") ) - @console_ns.response(200, "Success", message_infinite_scroll_pagination_fields) + @console_ns.response(200, "Success", message_infinite_scroll_pagination_model) @console_ns.response(404, "Conversation not found") @login_required @account_initialization_required @setup_required @get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT]) - @marshal_with(message_infinite_scroll_pagination_fields) + @marshal_with(message_infinite_scroll_pagination_model) @edit_permission_required def get(self, app_model): parser = ( @@ -263,13 +374,13 @@ class MessageApi(Resource): @console_ns.doc("get_message") @console_ns.doc(description="Get message details by ID") @console_ns.doc(params={"app_id": "Application ID", "message_id": "Message ID"}) - @console_ns.response(200, "Message retrieved successfully", message_detail_fields) + @console_ns.response(200, "Message retrieved successfully", message_detail_model) @console_ns.response(404, "Message not found") @get_app_model @setup_required @login_required @account_initialization_required - @marshal_with(message_detail_fields) + @marshal_with(message_detail_model) def get(self, app_model, message_id: str): message_id = str(message_id) diff --git a/api/controllers/console/app/site.py b/api/controllers/console/app/site.py index b2f1997620..d46b8c5c9d 100644 --- a/api/controllers/console/app/site.py +++ b/api/controllers/console/app/site.py @@ -16,6 +16,9 @@ from libs.datetime_utils import naive_utc_now from libs.login import current_account_with_tenant, login_required from models import Site +# Register model for flask_restx to avoid dict type issues in Swagger +app_site_model = console_ns.model("AppSite", app_site_fields) + def parse_app_site_args(): parser = ( @@ -76,7 +79,7 @@ class AppSite(Resource): }, ) ) - @console_ns.response(200, "Site configuration updated successfully", app_site_fields) + @console_ns.response(200, "Site configuration updated successfully", app_site_model) @console_ns.response(403, "Insufficient permissions") @console_ns.response(404, "App not found") @setup_required @@ -84,7 +87,7 @@ class AppSite(Resource): @edit_permission_required @account_initialization_required @get_app_model - @marshal_with(app_site_fields) + @marshal_with(app_site_model) def post(self, app_model): args = parse_app_site_args() current_user, _ = current_account_with_tenant() @@ -126,7 +129,7 @@ class AppSiteAccessTokenReset(Resource): @console_ns.doc("reset_app_site_access_token") @console_ns.doc(description="Reset access token for application site") @console_ns.doc(params={"app_id": "Application ID"}) - @console_ns.response(200, "Access token reset successfully", app_site_fields) + @console_ns.response(200, "Access token reset successfully", app_site_model) @console_ns.response(403, "Insufficient permissions (admin/owner required)") @console_ns.response(404, "App or site not found") @setup_required @@ -134,7 +137,7 @@ class AppSiteAccessTokenReset(Resource): @is_admin_or_owner_required @account_initialization_required @get_app_model - @marshal_with(app_site_fields) + @marshal_with(app_site_model) def post(self, app_model): current_user, _ = current_account_with_tenant() site = db.session.query(Site).where(Site.app_id == app_model.id).first() diff --git a/api/controllers/console/app/workflow.py b/api/controllers/console/app/workflow.py index 9b5a4e895c..7b7a8defa5 100644 --- a/api/controllers/console/app/workflow.py +++ b/api/controllers/console/app/workflow.py @@ -32,6 +32,7 @@ from core.workflow.enums import NodeType from core.workflow.graph_engine.manager import GraphEngineManager from extensions.ext_database import db from factories import file_factory, variable_factory +from fields.member_fields import simple_account_fields from fields.workflow_fields import workflow_fields, workflow_pagination_fields from fields.workflow_run_fields import workflow_run_node_execution_fields from libs import helper @@ -49,6 +50,56 @@ from services.workflow_service import DraftWorkflowDeletionError, WorkflowInUseE logger = logging.getLogger(__name__) LISTENING_RETRY_IN = 2000 +# Register models for flask_restx to avoid dict type issues in Swagger +# Register in dependency order: base models first, then dependent models + +# Base models +simple_account_model = console_ns.model("SimpleAccount", simple_account_fields) + +from fields.workflow_fields import pipeline_variable_fields, serialize_value_type + +conversation_variable_model = console_ns.model( + "ConversationVariable", + { + "id": fields.String, + "name": fields.String, + "value_type": fields.String(attribute=serialize_value_type), + "value": fields.Raw, + "description": fields.String, + }, +) + +pipeline_variable_model = console_ns.model("PipelineVariable", pipeline_variable_fields) + +# Workflow model with nested dependencies +workflow_fields_copy = workflow_fields.copy() +workflow_fields_copy["created_by"] = fields.Nested(simple_account_model, attribute="created_by_account") +workflow_fields_copy["updated_by"] = fields.Nested( + simple_account_model, attribute="updated_by_account", allow_null=True +) +workflow_fields_copy["conversation_variables"] = fields.List(fields.Nested(conversation_variable_model)) +workflow_fields_copy["rag_pipeline_variables"] = fields.List(fields.Nested(pipeline_variable_model)) +workflow_model = console_ns.model("Workflow", workflow_fields_copy) + +# Workflow pagination model +workflow_pagination_fields_copy = workflow_pagination_fields.copy() +workflow_pagination_fields_copy["items"] = fields.List(fields.Nested(workflow_model), attribute="items") +workflow_pagination_model = console_ns.model("WorkflowPagination", workflow_pagination_fields_copy) + +# Reuse workflow_run_node_execution_model from workflow_run.py if already registered +# Otherwise register it here +from fields.end_user_fields import simple_end_user_fields + +try: + simple_end_user_model = console_ns.models.get("SimpleEndUser") +except (KeyError, AttributeError): + simple_end_user_model = console_ns.model("SimpleEndUser", simple_end_user_fields) + +try: + workflow_run_node_execution_model = console_ns.models.get("WorkflowRunNodeExecution") +except (KeyError, AttributeError): + workflow_run_node_execution_model = console_ns.model("WorkflowRunNodeExecution", workflow_run_node_execution_fields) + # TODO(QuantumGhost): Refactor existing node run API to handle file parameter parsing # at the controller level rather than in the workflow logic. This would improve separation @@ -73,13 +124,13 @@ class DraftWorkflowApi(Resource): @console_ns.doc("get_draft_workflow") @console_ns.doc(description="Get draft workflow for an application") @console_ns.doc(params={"app_id": "Application ID"}) - @console_ns.response(200, "Draft workflow retrieved successfully", workflow_fields) + @console_ns.response(200, "Draft workflow retrieved successfully", workflow_model) @console_ns.response(404, "Draft workflow not found") @setup_required @login_required @account_initialization_required @get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW]) - @marshal_with(workflow_fields) + @marshal_with(workflow_model) @edit_permission_required def get(self, app_model: App): """ @@ -539,14 +590,14 @@ class DraftWorkflowNodeRunApi(Resource): }, ) ) - @console_ns.response(200, "Node run started successfully", workflow_run_node_execution_fields) + @console_ns.response(200, "Node run started successfully", workflow_run_node_execution_model) @console_ns.response(403, "Permission denied") @console_ns.response(404, "Node not found") @setup_required @login_required @account_initialization_required @get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW]) - @marshal_with(workflow_run_node_execution_fields) + @marshal_with(workflow_run_node_execution_model) @edit_permission_required def post(self, app_model: App, node_id: str): """ @@ -598,13 +649,13 @@ class PublishedWorkflowApi(Resource): @console_ns.doc("get_published_workflow") @console_ns.doc(description="Get published workflow for an application") @console_ns.doc(params={"app_id": "Application ID"}) - @console_ns.response(200, "Published workflow retrieved successfully", workflow_fields) + @console_ns.response(200, "Published workflow retrieved successfully", workflow_model) @console_ns.response(404, "Published workflow not found") @setup_required @login_required @account_initialization_required @get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW]) - @marshal_with(workflow_fields) + @marshal_with(workflow_model) @edit_permission_required def get(self, app_model: App): """ @@ -781,12 +832,12 @@ class PublishedAllWorkflowApi(Resource): @console_ns.doc("get_all_published_workflows") @console_ns.doc(description="Get all published workflows for an application") @console_ns.doc(params={"app_id": "Application ID"}) - @console_ns.response(200, "Published workflows retrieved successfully", workflow_pagination_fields) + @console_ns.response(200, "Published workflows retrieved successfully", workflow_pagination_model) @setup_required @login_required @account_initialization_required @get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW]) - @marshal_with(workflow_pagination_fields) + @marshal_with(workflow_pagination_model) @edit_permission_required def get(self, app_model: App): """ @@ -838,14 +889,14 @@ class WorkflowByIdApi(Resource): }, ) ) - @console_ns.response(200, "Workflow updated successfully", workflow_fields) + @console_ns.response(200, "Workflow updated successfully", workflow_model) @console_ns.response(404, "Workflow not found") @console_ns.response(403, "Permission denied") @setup_required @login_required @account_initialization_required @get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW]) - @marshal_with(workflow_fields) + @marshal_with(workflow_model) @edit_permission_required def patch(self, app_model: App, workflow_id: str): """ @@ -929,14 +980,14 @@ class DraftWorkflowNodeLastRunApi(Resource): @console_ns.doc("get_draft_workflow_node_last_run") @console_ns.doc(description="Get last run result for draft workflow node") @console_ns.doc(params={"app_id": "Application ID", "node_id": "Node ID"}) - @console_ns.response(200, "Node last run retrieved successfully", workflow_run_node_execution_fields) + @console_ns.response(200, "Node last run retrieved successfully", workflow_run_node_execution_model) @console_ns.response(404, "Node last run not found") @console_ns.response(403, "Permission denied") @setup_required @login_required @account_initialization_required @get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW]) - @marshal_with(workflow_run_node_execution_fields) + @marshal_with(workflow_run_node_execution_model) def get(self, app_model: App, node_id: str): srv = WorkflowService() workflow = srv.get_draft_workflow(app_model) diff --git a/api/controllers/console/app/workflow_app_log.py b/api/controllers/console/app/workflow_app_log.py index fc1fa9cb13..677678cb8f 100644 --- a/api/controllers/console/app/workflow_app_log.py +++ b/api/controllers/console/app/workflow_app_log.py @@ -8,12 +8,15 @@ from controllers.console.app.wraps import get_app_model from controllers.console.wraps import account_initialization_required, setup_required from core.workflow.enums import WorkflowExecutionStatus from extensions.ext_database import db -from fields.workflow_app_log_fields import workflow_app_log_pagination_fields +from fields.workflow_app_log_fields import build_workflow_app_log_pagination_model from libs.login import login_required from models import App from models.model import AppMode from services.workflow_app_service import WorkflowAppService +# Register model for flask_restx to avoid dict type issues in Swagger +workflow_app_log_pagination_model = build_workflow_app_log_pagination_model(console_ns) + @console_ns.route("/apps//workflow-app-logs") class WorkflowAppLogApi(Resource): @@ -33,12 +36,12 @@ class WorkflowAppLogApi(Resource): "limit": "Number of items per page (1-100)", } ) - @console_ns.response(200, "Workflow app logs retrieved successfully", workflow_app_log_pagination_fields) + @console_ns.response(200, "Workflow app logs retrieved successfully", workflow_app_log_pagination_model) @setup_required @login_required @account_initialization_required @get_app_model(mode=[AppMode.WORKFLOW]) - @marshal_with(workflow_app_log_pagination_fields) + @marshal_with(workflow_app_log_pagination_model) def get(self, app_model: App): """ Get workflow app logs diff --git a/api/controllers/console/app/workflow_draft_variable.py b/api/controllers/console/app/workflow_draft_variable.py index 007061ae7a..41ae8727de 100644 --- a/api/controllers/console/app/workflow_draft_variable.py +++ b/api/controllers/console/app/workflow_draft_variable.py @@ -141,6 +141,37 @@ _WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS = { "items": fields.List(fields.Nested(_WORKFLOW_DRAFT_VARIABLE_FIELDS), attribute=_get_items), } +# Register models for flask_restx to avoid dict type issues in Swagger +workflow_draft_variable_without_value_model = console_ns.model( + "WorkflowDraftVariableWithoutValue", _WORKFLOW_DRAFT_VARIABLE_WITHOUT_VALUE_FIELDS +) + +workflow_draft_variable_model = console_ns.model("WorkflowDraftVariable", _WORKFLOW_DRAFT_VARIABLE_FIELDS) + +workflow_draft_env_variable_model = console_ns.model("WorkflowDraftEnvVariable", _WORKFLOW_DRAFT_ENV_VARIABLE_FIELDS) + +workflow_draft_env_variable_list_fields_copy = _WORKFLOW_DRAFT_ENV_VARIABLE_LIST_FIELDS.copy() +workflow_draft_env_variable_list_fields_copy["items"] = fields.List(fields.Nested(workflow_draft_env_variable_model)) +workflow_draft_env_variable_list_model = console_ns.model( + "WorkflowDraftEnvVariableList", workflow_draft_env_variable_list_fields_copy +) + +workflow_draft_variable_list_without_value_fields_copy = _WORKFLOW_DRAFT_VARIABLE_LIST_WITHOUT_VALUE_FIELDS.copy() +workflow_draft_variable_list_without_value_fields_copy["items"] = fields.List( + fields.Nested(workflow_draft_variable_without_value_model), attribute=_get_items +) +workflow_draft_variable_list_without_value_model = console_ns.model( + "WorkflowDraftVariableListWithoutValue", workflow_draft_variable_list_without_value_fields_copy +) + +workflow_draft_variable_list_fields_copy = _WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS.copy() +workflow_draft_variable_list_fields_copy["items"] = fields.List( + fields.Nested(workflow_draft_variable_model), attribute=_get_items +) +workflow_draft_variable_list_model = console_ns.model( + "WorkflowDraftVariableList", workflow_draft_variable_list_fields_copy +) + P = ParamSpec("P") R = TypeVar("R") @@ -176,10 +207,10 @@ class WorkflowVariableCollectionApi(Resource): @console_ns.doc(params={"app_id": "Application ID"}) @console_ns.doc(params={"page": "Page number (1-100000)", "limit": "Number of items per page (1-100)"}) @console_ns.response( - 200, "Workflow variables retrieved successfully", _WORKFLOW_DRAFT_VARIABLE_LIST_WITHOUT_VALUE_FIELDS + 200, "Workflow variables retrieved successfully", workflow_draft_variable_list_without_value_model ) @_api_prerequisite - @marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_WITHOUT_VALUE_FIELDS) + @marshal_with(workflow_draft_variable_list_without_value_model) def get(self, app_model: App): """ Get draft workflow @@ -242,9 +273,9 @@ class NodeVariableCollectionApi(Resource): @console_ns.doc("get_node_variables") @console_ns.doc(description="Get variables for a specific node") @console_ns.doc(params={"app_id": "Application ID", "node_id": "Node ID"}) - @console_ns.response(200, "Node variables retrieved successfully", _WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS) + @console_ns.response(200, "Node variables retrieved successfully", workflow_draft_variable_list_model) @_api_prerequisite - @marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS) + @marshal_with(workflow_draft_variable_list_model) def get(self, app_model: App, node_id: str): validate_node_id(node_id) with Session(bind=db.engine, expire_on_commit=False) as session: @@ -275,10 +306,10 @@ class VariableApi(Resource): @console_ns.doc("get_variable") @console_ns.doc(description="Get a specific workflow variable") @console_ns.doc(params={"app_id": "Application ID", "variable_id": "Variable ID"}) - @console_ns.response(200, "Variable retrieved successfully", _WORKFLOW_DRAFT_VARIABLE_FIELDS) + @console_ns.response(200, "Variable retrieved successfully", workflow_draft_variable_model) @console_ns.response(404, "Variable not found") @_api_prerequisite - @marshal_with(_WORKFLOW_DRAFT_VARIABLE_FIELDS) + @marshal_with(workflow_draft_variable_model) def get(self, app_model: App, variable_id: str): draft_var_srv = WorkflowDraftVariableService( session=db.session(), @@ -301,10 +332,10 @@ class VariableApi(Resource): }, ) ) - @console_ns.response(200, "Variable updated successfully", _WORKFLOW_DRAFT_VARIABLE_FIELDS) + @console_ns.response(200, "Variable updated successfully", workflow_draft_variable_model) @console_ns.response(404, "Variable not found") @_api_prerequisite - @marshal_with(_WORKFLOW_DRAFT_VARIABLE_FIELDS) + @marshal_with(workflow_draft_variable_model) def patch(self, app_model: App, variable_id: str): # Request payload for file types: # @@ -390,7 +421,7 @@ class VariableResetApi(Resource): @console_ns.doc("reset_variable") @console_ns.doc(description="Reset a workflow variable to its default value") @console_ns.doc(params={"app_id": "Application ID", "variable_id": "Variable ID"}) - @console_ns.response(200, "Variable reset successfully", _WORKFLOW_DRAFT_VARIABLE_FIELDS) + @console_ns.response(200, "Variable reset successfully", workflow_draft_variable_model) @console_ns.response(204, "Variable reset (no content)") @console_ns.response(404, "Variable not found") @_api_prerequisite @@ -416,7 +447,7 @@ class VariableResetApi(Resource): if resetted is None: return Response("", 204) else: - return marshal(resetted, _WORKFLOW_DRAFT_VARIABLE_FIELDS) + return marshal(resetted, workflow_draft_variable_model) def _get_variable_list(app_model: App, node_id) -> WorkflowDraftVariableList: @@ -438,10 +469,10 @@ class ConversationVariableCollectionApi(Resource): @console_ns.doc("get_conversation_variables") @console_ns.doc(description="Get conversation variables for workflow") @console_ns.doc(params={"app_id": "Application ID"}) - @console_ns.response(200, "Conversation variables retrieved successfully", _WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS) + @console_ns.response(200, "Conversation variables retrieved successfully", workflow_draft_variable_list_model) @console_ns.response(404, "Draft workflow not found") @_api_prerequisite - @marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS) + @marshal_with(workflow_draft_variable_list_model) def get(self, app_model: App): # NOTE(QuantumGhost): Prefill conversation variables into the draft variables table # so their IDs can be returned to the caller. @@ -460,9 +491,9 @@ class SystemVariableCollectionApi(Resource): @console_ns.doc("get_system_variables") @console_ns.doc(description="Get system variables for workflow") @console_ns.doc(params={"app_id": "Application ID"}) - @console_ns.response(200, "System variables retrieved successfully", _WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS) + @console_ns.response(200, "System variables retrieved successfully", workflow_draft_variable_list_model) @_api_prerequisite - @marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS) + @marshal_with(workflow_draft_variable_list_model) def get(self, app_model: App): return _get_variable_list(app_model, SYSTEM_VARIABLE_NODE_ID) diff --git a/api/controllers/console/app/workflow_run.py b/api/controllers/console/app/workflow_run.py index 51f7445ce0..c016104ce0 100644 --- a/api/controllers/console/app/workflow_run.py +++ b/api/controllers/console/app/workflow_run.py @@ -1,15 +1,20 @@ from typing import cast -from flask_restx import Resource, marshal_with, reqparse +from flask_restx import Resource, fields, marshal_with, reqparse from flask_restx.inputs import int_range from controllers.console import console_ns from controllers.console.app.wraps import get_app_model from controllers.console.wraps import account_initialization_required, setup_required +from fields.end_user_fields import simple_end_user_fields +from fields.member_fields import simple_account_fields from fields.workflow_run_fields import ( + advanced_chat_workflow_run_for_list_fields, advanced_chat_workflow_run_pagination_fields, workflow_run_count_fields, workflow_run_detail_fields, + workflow_run_for_list_fields, + workflow_run_node_execution_fields, workflow_run_node_execution_list_fields, workflow_run_pagination_fields, ) @@ -22,6 +27,71 @@ from services.workflow_run_service import WorkflowRunService # Workflow run status choices for filtering WORKFLOW_RUN_STATUS_CHOICES = ["running", "succeeded", "failed", "stopped", "partial-succeeded"] +# Register models for flask_restx to avoid dict type issues in Swagger +# Register in dependency order: base models first, then dependent models + +# Base models +simple_account_model = console_ns.model("SimpleAccount", simple_account_fields) + +simple_end_user_model = console_ns.model("SimpleEndUser", simple_end_user_fields) + +# Models that depend on simple_account_fields +workflow_run_for_list_fields_copy = workflow_run_for_list_fields.copy() +workflow_run_for_list_fields_copy["created_by_account"] = fields.Nested( + simple_account_model, attribute="created_by_account", allow_null=True +) +workflow_run_for_list_model = console_ns.model("WorkflowRunForList", workflow_run_for_list_fields_copy) + +advanced_chat_workflow_run_for_list_fields_copy = advanced_chat_workflow_run_for_list_fields.copy() +advanced_chat_workflow_run_for_list_fields_copy["created_by_account"] = fields.Nested( + simple_account_model, attribute="created_by_account", allow_null=True +) +advanced_chat_workflow_run_for_list_model = console_ns.model( + "AdvancedChatWorkflowRunForList", advanced_chat_workflow_run_for_list_fields_copy +) + +workflow_run_detail_fields_copy = workflow_run_detail_fields.copy() +workflow_run_detail_fields_copy["created_by_account"] = fields.Nested( + simple_account_model, attribute="created_by_account", allow_null=True +) +workflow_run_detail_fields_copy["created_by_end_user"] = fields.Nested( + simple_end_user_model, attribute="created_by_end_user", allow_null=True +) +workflow_run_detail_model = console_ns.model("WorkflowRunDetail", workflow_run_detail_fields_copy) + +workflow_run_node_execution_fields_copy = workflow_run_node_execution_fields.copy() +workflow_run_node_execution_fields_copy["created_by_account"] = fields.Nested( + simple_account_model, attribute="created_by_account", allow_null=True +) +workflow_run_node_execution_fields_copy["created_by_end_user"] = fields.Nested( + simple_end_user_model, attribute="created_by_end_user", allow_null=True +) +workflow_run_node_execution_model = console_ns.model( + "WorkflowRunNodeExecution", workflow_run_node_execution_fields_copy +) + +# Simple models without nested dependencies +workflow_run_count_model = console_ns.model("WorkflowRunCount", workflow_run_count_fields) + +# Pagination models that depend on list models +advanced_chat_workflow_run_pagination_fields_copy = advanced_chat_workflow_run_pagination_fields.copy() +advanced_chat_workflow_run_pagination_fields_copy["data"] = fields.List( + fields.Nested(advanced_chat_workflow_run_for_list_model), attribute="data" +) +advanced_chat_workflow_run_pagination_model = console_ns.model( + "AdvancedChatWorkflowRunPagination", advanced_chat_workflow_run_pagination_fields_copy +) + +workflow_run_pagination_fields_copy = workflow_run_pagination_fields.copy() +workflow_run_pagination_fields_copy["data"] = fields.List(fields.Nested(workflow_run_for_list_model), attribute="data") +workflow_run_pagination_model = console_ns.model("WorkflowRunPagination", workflow_run_pagination_fields_copy) + +workflow_run_node_execution_list_fields_copy = workflow_run_node_execution_list_fields.copy() +workflow_run_node_execution_list_fields_copy["data"] = fields.List(fields.Nested(workflow_run_node_execution_model)) +workflow_run_node_execution_list_model = console_ns.model( + "WorkflowRunNodeExecutionList", workflow_run_node_execution_list_fields_copy +) + def _parse_workflow_run_list_args(): """ @@ -100,12 +170,12 @@ class AdvancedChatAppWorkflowRunListApi(Resource): @console_ns.doc( params={"triggered_from": "Filter by trigger source (optional): debugging or app-run. Default: debugging"} ) - @console_ns.response(200, "Workflow runs retrieved successfully", advanced_chat_workflow_run_pagination_fields) + @console_ns.response(200, "Workflow runs retrieved successfully", advanced_chat_workflow_run_pagination_model) @setup_required @login_required @account_initialization_required @get_app_model(mode=[AppMode.ADVANCED_CHAT]) - @marshal_with(advanced_chat_workflow_run_pagination_fields) + @marshal_with(advanced_chat_workflow_run_pagination_model) def get(self, app_model: App): """ Get advanced chat app workflow run list @@ -146,12 +216,12 @@ class AdvancedChatAppWorkflowRunCountApi(Resource): @console_ns.doc( params={"triggered_from": "Filter by trigger source (optional): debugging or app-run. Default: debugging"} ) - @console_ns.response(200, "Workflow runs count retrieved successfully", workflow_run_count_fields) + @console_ns.response(200, "Workflow runs count retrieved successfully", workflow_run_count_model) @setup_required @login_required @account_initialization_required @get_app_model(mode=[AppMode.ADVANCED_CHAT]) - @marshal_with(workflow_run_count_fields) + @marshal_with(workflow_run_count_model) def get(self, app_model: App): """ Get advanced chat workflow runs count statistics @@ -188,12 +258,12 @@ class WorkflowRunListApi(Resource): @console_ns.doc( params={"triggered_from": "Filter by trigger source (optional): debugging or app-run. Default: debugging"} ) - @console_ns.response(200, "Workflow runs retrieved successfully", workflow_run_pagination_fields) + @console_ns.response(200, "Workflow runs retrieved successfully", workflow_run_pagination_model) @setup_required @login_required @account_initialization_required @get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW]) - @marshal_with(workflow_run_pagination_fields) + @marshal_with(workflow_run_pagination_model) def get(self, app_model: App): """ Get workflow run list @@ -234,12 +304,12 @@ class WorkflowRunCountApi(Resource): @console_ns.doc( params={"triggered_from": "Filter by trigger source (optional): debugging or app-run. Default: debugging"} ) - @console_ns.response(200, "Workflow runs count retrieved successfully", workflow_run_count_fields) + @console_ns.response(200, "Workflow runs count retrieved successfully", workflow_run_count_model) @setup_required @login_required @account_initialization_required @get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW]) - @marshal_with(workflow_run_count_fields) + @marshal_with(workflow_run_count_model) def get(self, app_model: App): """ Get workflow runs count statistics @@ -269,13 +339,13 @@ class WorkflowRunDetailApi(Resource): @console_ns.doc("get_workflow_run_detail") @console_ns.doc(description="Get workflow run detail") @console_ns.doc(params={"app_id": "Application ID", "run_id": "Workflow run ID"}) - @console_ns.response(200, "Workflow run detail retrieved successfully", workflow_run_detail_fields) + @console_ns.response(200, "Workflow run detail retrieved successfully", workflow_run_detail_model) @console_ns.response(404, "Workflow run not found") @setup_required @login_required @account_initialization_required @get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW]) - @marshal_with(workflow_run_detail_fields) + @marshal_with(workflow_run_detail_model) def get(self, app_model: App, run_id): """ Get workflow run detail @@ -293,13 +363,13 @@ class WorkflowRunNodeExecutionListApi(Resource): @console_ns.doc("get_workflow_run_node_executions") @console_ns.doc(description="Get workflow run node execution list") @console_ns.doc(params={"app_id": "Application ID", "run_id": "Workflow run ID"}) - @console_ns.response(200, "Node executions retrieved successfully", workflow_run_node_execution_list_fields) + @console_ns.response(200, "Node executions retrieved successfully", workflow_run_node_execution_list_model) @console_ns.response(404, "Workflow run not found") @setup_required @login_required @account_initialization_required @get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW]) - @marshal_with(workflow_run_node_execution_list_fields) + @marshal_with(workflow_run_node_execution_list_model) def get(self, app_model: App, run_id): """ Get workflow run node execution list diff --git a/api/controllers/console/datasets/datasets.py b/api/controllers/console/datasets/datasets.py index 54761413f4..45bc1fa694 100644 --- a/api/controllers/console/datasets/datasets.py +++ b/api/controllers/console/datasets/datasets.py @@ -8,7 +8,10 @@ from werkzeug.exceptions import Forbidden, NotFound import services from configs import dify_config from controllers.console import console_ns -from controllers.console.apikey import api_key_fields, api_key_list +from controllers.console.apikey import ( + api_key_item_model, + api_key_list_model, +) from controllers.console.app.error import ProviderNotInitializeError from controllers.console.datasets.error import DatasetInUseError, DatasetNameDuplicateError, IndexingEstimateError from controllers.console.wraps import ( @@ -27,8 +30,22 @@ from core.rag.extractor.entity.datasource_type import DatasourceType from core.rag.extractor.entity.extract_setting import ExtractSetting, NotionInfo, WebsiteInfo from core.rag.retrieval.retrieval_methods import RetrievalMethod from extensions.ext_database import db -from fields.app_fields import related_app_list -from fields.dataset_fields import dataset_detail_fields, dataset_query_detail_fields +from fields.app_fields import app_detail_kernel_fields, related_app_list +from fields.dataset_fields import ( + dataset_detail_fields, + dataset_fields, + dataset_query_detail_fields, + dataset_retrieval_model_fields, + doc_metadata_fields, + external_knowledge_info_fields, + external_retrieval_model_fields, + icon_info_fields, + keyword_setting_fields, + reranking_model_fields, + tag_fields, + vector_setting_fields, + weighted_score_fields, +) from fields.document_fields import document_status_fields from libs.login import current_account_with_tenant, login_required from libs.validators import validate_description_length @@ -38,6 +55,58 @@ from models.provider_ids import ModelProviderID from services.dataset_service import DatasetPermissionService, DatasetService, DocumentService +def _get_or_create_model(model_name: str, field_def): + existing = console_ns.models.get(model_name) + if existing is None: + existing = console_ns.model(model_name, field_def) + return existing + + +# Register models for flask_restx to avoid dict type issues in Swagger +dataset_base_model = _get_or_create_model("DatasetBase", dataset_fields) + +tag_model = _get_or_create_model("Tag", tag_fields) + +keyword_setting_model = _get_or_create_model("DatasetKeywordSetting", keyword_setting_fields) +vector_setting_model = _get_or_create_model("DatasetVectorSetting", vector_setting_fields) + +weighted_score_fields_copy = weighted_score_fields.copy() +weighted_score_fields_copy["keyword_setting"] = fields.Nested(keyword_setting_model) +weighted_score_fields_copy["vector_setting"] = fields.Nested(vector_setting_model) +weighted_score_model = _get_or_create_model("DatasetWeightedScore", weighted_score_fields_copy) + +reranking_model = _get_or_create_model("DatasetRerankingModel", reranking_model_fields) + +dataset_retrieval_model_fields_copy = dataset_retrieval_model_fields.copy() +dataset_retrieval_model_fields_copy["reranking_model"] = fields.Nested(reranking_model) +dataset_retrieval_model_fields_copy["weights"] = fields.Nested(weighted_score_model, allow_null=True) +dataset_retrieval_model = _get_or_create_model("DatasetRetrievalModel", dataset_retrieval_model_fields_copy) + +external_knowledge_info_model = _get_or_create_model("ExternalKnowledgeInfo", external_knowledge_info_fields) + +external_retrieval_model = _get_or_create_model("ExternalRetrievalModel", external_retrieval_model_fields) + +doc_metadata_model = _get_or_create_model("DatasetDocMetadata", doc_metadata_fields) + +icon_info_model = _get_or_create_model("DatasetIconInfo", icon_info_fields) + +dataset_detail_fields_copy = dataset_detail_fields.copy() +dataset_detail_fields_copy["retrieval_model_dict"] = fields.Nested(dataset_retrieval_model) +dataset_detail_fields_copy["tags"] = fields.List(fields.Nested(tag_model)) +dataset_detail_fields_copy["external_knowledge_info"] = fields.Nested(external_knowledge_info_model) +dataset_detail_fields_copy["external_retrieval_model"] = fields.Nested(external_retrieval_model, allow_null=True) +dataset_detail_fields_copy["doc_metadata"] = fields.List(fields.Nested(doc_metadata_model)) +dataset_detail_fields_copy["icon_info"] = fields.Nested(icon_info_model) +dataset_detail_model = _get_or_create_model("DatasetDetail", dataset_detail_fields_copy) + +dataset_query_detail_model = _get_or_create_model("DatasetQueryDetail", dataset_query_detail_fields) + +app_detail_kernel_model = _get_or_create_model("AppDetailKernel", app_detail_kernel_fields) +related_app_list_copy = related_app_list.copy() +related_app_list_copy["data"] = fields.List(fields.Nested(app_detail_kernel_model)) +related_app_list_model = _get_or_create_model("RelatedAppList", related_app_list_copy) + + def _validate_name(name: str) -> str: if not name or len(name) < 1 or len(name) > 40: raise ValueError("Name must be between 1 to 40 characters.") @@ -282,7 +351,7 @@ class DatasetApi(Resource): @console_ns.doc("get_dataset") @console_ns.doc(description="Get dataset details") @console_ns.doc(params={"dataset_id": "Dataset ID"}) - @console_ns.response(200, "Dataset retrieved successfully", dataset_detail_fields) + @console_ns.response(200, "Dataset retrieved successfully", dataset_detail_model) @console_ns.response(404, "Dataset not found") @console_ns.response(403, "Permission denied") @setup_required @@ -342,7 +411,7 @@ class DatasetApi(Resource): }, ) ) - @console_ns.response(200, "Dataset updated successfully", dataset_detail_fields) + @console_ns.response(200, "Dataset updated successfully", dataset_detail_model) @console_ns.response(404, "Dataset not found") @console_ns.response(403, "Permission denied") @setup_required @@ -507,7 +576,7 @@ class DatasetQueryApi(Resource): @console_ns.doc("get_dataset_queries") @console_ns.doc(description="Get dataset query history") @console_ns.doc(params={"dataset_id": "Dataset ID"}) - @console_ns.response(200, "Query history retrieved successfully", dataset_query_detail_fields) + @console_ns.response(200, "Query history retrieved successfully", dataset_query_detail_model) @setup_required @login_required @account_initialization_required @@ -529,7 +598,7 @@ class DatasetQueryApi(Resource): dataset_queries, total = DatasetService.get_dataset_queries(dataset_id=dataset.id, page=page, per_page=limit) response = { - "data": marshal(dataset_queries, dataset_query_detail_fields), + "data": marshal(dataset_queries, dataset_query_detail_model), "has_more": len(dataset_queries) == limit, "limit": limit, "total": total, @@ -653,11 +722,11 @@ class DatasetRelatedAppListApi(Resource): @console_ns.doc("get_dataset_related_apps") @console_ns.doc(description="Get applications related to dataset") @console_ns.doc(params={"dataset_id": "Dataset ID"}) - @console_ns.response(200, "Related apps retrieved successfully", related_app_list) + @console_ns.response(200, "Related apps retrieved successfully", related_app_list_model) @setup_required @login_required @account_initialization_required - @marshal_with(related_app_list) + @marshal_with(related_app_list_model) def get(self, dataset_id): current_user, _ = current_account_with_tenant() dataset_id_str = str(dataset_id) @@ -740,11 +809,11 @@ class DatasetApiKeyApi(Resource): @console_ns.doc("get_dataset_api_keys") @console_ns.doc(description="Get dataset API keys") - @console_ns.response(200, "API keys retrieved successfully", api_key_list) + @console_ns.response(200, "API keys retrieved successfully", api_key_list_model) @setup_required @login_required @account_initialization_required - @marshal_with(api_key_list) + @marshal_with(api_key_list_model) def get(self): _, current_tenant_id = current_account_with_tenant() keys = db.session.scalars( @@ -756,7 +825,7 @@ class DatasetApiKeyApi(Resource): @login_required @is_admin_or_owner_required @account_initialization_required - @marshal_with(api_key_fields) + @marshal_with(api_key_item_model) def post(self): _, current_tenant_id = current_account_with_tenant() diff --git a/api/controllers/console/datasets/datasets_document.py b/api/controllers/console/datasets/datasets_document.py index b5761c9ada..2663c939bc 100644 --- a/api/controllers/console/datasets/datasets_document.py +++ b/api/controllers/console/datasets/datasets_document.py @@ -45,9 +45,11 @@ from core.plugin.impl.exc import PluginDaemonClientSideError from core.rag.extractor.entity.datasource_type import DatasourceType from core.rag.extractor.entity.extract_setting import ExtractSetting, NotionInfo, WebsiteInfo from extensions.ext_database import db +from fields.dataset_fields import dataset_fields from fields.document_fields import ( dataset_and_document_fields, document_fields, + document_metadata_fields, document_status_fields, document_with_segments_fields, ) @@ -61,6 +63,36 @@ from services.entities.knowledge_entities.knowledge_entities import KnowledgeCon logger = logging.getLogger(__name__) +def _get_or_create_model(model_name: str, field_def): + existing = console_ns.models.get(model_name) + if existing is None: + existing = console_ns.model(model_name, field_def) + return existing + + +# Register models for flask_restx to avoid dict type issues in Swagger +dataset_model = _get_or_create_model("Dataset", dataset_fields) + +document_metadata_model = _get_or_create_model("DocumentMetadata", document_metadata_fields) + +document_fields_copy = document_fields.copy() +document_fields_copy["doc_metadata"] = fields.List( + fields.Nested(document_metadata_model), attribute="doc_metadata_details" +) +document_model = _get_or_create_model("Document", document_fields_copy) + +document_with_segments_fields_copy = document_with_segments_fields.copy() +document_with_segments_fields_copy["doc_metadata"] = fields.List( + fields.Nested(document_metadata_model), attribute="doc_metadata_details" +) +document_with_segments_model = _get_or_create_model("DocumentWithSegments", document_with_segments_fields_copy) + +dataset_and_document_fields_copy = dataset_and_document_fields.copy() +dataset_and_document_fields_copy["dataset"] = fields.Nested(dataset_model) +dataset_and_document_fields_copy["documents"] = fields.List(fields.Nested(document_model)) +dataset_and_document_model = _get_or_create_model("DatasetAndDocument", dataset_and_document_fields_copy) + + class DocumentResource(Resource): def get_document(self, dataset_id: str, document_id: str) -> Document: current_user, current_tenant_id = current_account_with_tenant() @@ -169,9 +201,8 @@ class DatasetDocumentListApi(Resource): @setup_required @login_required @account_initialization_required - def get(self, dataset_id): + def get(self, dataset_id: str): current_user, current_tenant_id = current_account_with_tenant() - dataset_id = str(dataset_id) page = request.args.get("page", default=1, type=int) limit = request.args.get("limit", default=20, type=int) search = request.args.get("keyword", default=None, type=str) @@ -276,7 +307,7 @@ class DatasetDocumentListApi(Resource): @setup_required @login_required @account_initialization_required - @marshal_with(dataset_and_document_fields) + @marshal_with(dataset_and_document_model) @cloud_edition_billing_resource_check("vector_space") @cloud_edition_billing_rate_limit_check("knowledge") def post(self, dataset_id): @@ -370,12 +401,12 @@ class DatasetInitApi(Resource): }, ) ) - @console_ns.response(201, "Dataset initialized successfully", dataset_and_document_fields) + @console_ns.response(201, "Dataset initialized successfully", dataset_and_document_model) @console_ns.response(400, "Invalid request parameters") @setup_required @login_required @account_initialization_required - @marshal_with(dataset_and_document_fields) + @marshal_with(dataset_and_document_model) @cloud_edition_billing_resource_check("vector_space") @cloud_edition_billing_rate_limit_check("knowledge") def post(self): diff --git a/api/controllers/console/datasets/external.py b/api/controllers/console/datasets/external.py index f48f384e94..950884e496 100644 --- a/api/controllers/console/datasets/external.py +++ b/api/controllers/console/datasets/external.py @@ -6,7 +6,19 @@ import services from controllers.console import console_ns from controllers.console.datasets.error import DatasetNameDuplicateError from controllers.console.wraps import account_initialization_required, edit_permission_required, setup_required -from fields.dataset_fields import dataset_detail_fields +from fields.dataset_fields import ( + dataset_detail_fields, + dataset_retrieval_model_fields, + doc_metadata_fields, + external_knowledge_info_fields, + external_retrieval_model_fields, + icon_info_fields, + keyword_setting_fields, + reranking_model_fields, + tag_fields, + vector_setting_fields, + weighted_score_fields, +) from libs.login import current_account_with_tenant, login_required from services.dataset_service import DatasetService from services.external_knowledge_service import ExternalDatasetService @@ -14,6 +26,51 @@ from services.hit_testing_service import HitTestingService from services.knowledge_service import ExternalDatasetTestService +def _get_or_create_model(model_name: str, field_def): + existing = console_ns.models.get(model_name) + if existing is None: + existing = console_ns.model(model_name, field_def) + return existing + + +def _build_dataset_detail_model(): + keyword_setting_model = _get_or_create_model("DatasetKeywordSetting", keyword_setting_fields) + vector_setting_model = _get_or_create_model("DatasetVectorSetting", vector_setting_fields) + + weighted_score_fields_copy = weighted_score_fields.copy() + weighted_score_fields_copy["keyword_setting"] = fields.Nested(keyword_setting_model) + weighted_score_fields_copy["vector_setting"] = fields.Nested(vector_setting_model) + weighted_score_model = _get_or_create_model("DatasetWeightedScore", weighted_score_fields_copy) + + reranking_model = _get_or_create_model("DatasetRerankingModel", reranking_model_fields) + + dataset_retrieval_model_fields_copy = dataset_retrieval_model_fields.copy() + dataset_retrieval_model_fields_copy["reranking_model"] = fields.Nested(reranking_model) + dataset_retrieval_model_fields_copy["weights"] = fields.Nested(weighted_score_model, allow_null=True) + dataset_retrieval_model = _get_or_create_model("DatasetRetrievalModel", dataset_retrieval_model_fields_copy) + + tag_model = _get_or_create_model("Tag", tag_fields) + doc_metadata_model = _get_or_create_model("DatasetDocMetadata", doc_metadata_fields) + external_knowledge_info_model = _get_or_create_model("ExternalKnowledgeInfo", external_knowledge_info_fields) + external_retrieval_model = _get_or_create_model("ExternalRetrievalModel", external_retrieval_model_fields) + icon_info_model = _get_or_create_model("DatasetIconInfo", icon_info_fields) + + dataset_detail_fields_copy = dataset_detail_fields.copy() + dataset_detail_fields_copy["retrieval_model_dict"] = fields.Nested(dataset_retrieval_model) + dataset_detail_fields_copy["tags"] = fields.List(fields.Nested(tag_model)) + dataset_detail_fields_copy["external_knowledge_info"] = fields.Nested(external_knowledge_info_model) + dataset_detail_fields_copy["external_retrieval_model"] = fields.Nested(external_retrieval_model, allow_null=True) + dataset_detail_fields_copy["doc_metadata"] = fields.List(fields.Nested(doc_metadata_model)) + dataset_detail_fields_copy["icon_info"] = fields.Nested(icon_info_model) + return _get_or_create_model("DatasetDetail", dataset_detail_fields_copy) + + +try: + dataset_detail_model = console_ns.models["DatasetDetail"] +except KeyError: + dataset_detail_model = _build_dataset_detail_model() + + def _validate_name(name: str) -> str: if not name or len(name) < 1 or len(name) > 100: raise ValueError("Name must be between 1 to 100 characters.") @@ -194,7 +251,7 @@ class ExternalDatasetCreateApi(Resource): }, ) ) - @console_ns.response(201, "External dataset created successfully", dataset_detail_fields) + @console_ns.response(201, "External dataset created successfully", dataset_detail_model) @console_ns.response(400, "Invalid parameters") @console_ns.response(403, "Permission denied") @setup_required diff --git a/api/controllers/console/extension.py b/api/controllers/console/extension.py index 6f92b9744f..08f29b4655 100644 --- a/api/controllers/console/extension.py +++ b/api/controllers/console/extension.py @@ -9,6 +9,10 @@ from models.api_based_extension import APIBasedExtension from services.api_based_extension_service import APIBasedExtensionService from services.code_based_extension_service import CodeBasedExtensionService +api_based_extension_model = console_ns.model("ApiBasedExtensionModel", api_based_extension_fields) + +api_based_extension_list_model = fields.List(fields.Nested(api_based_extension_model)) + @console_ns.route("/code-based-extension") class CodeBasedExtensionAPI(Resource): @@ -41,11 +45,11 @@ class CodeBasedExtensionAPI(Resource): class APIBasedExtensionAPI(Resource): @console_ns.doc("get_api_based_extensions") @console_ns.doc(description="Get all API-based extensions for current tenant") - @console_ns.response(200, "Success", fields.List(fields.Nested(api_based_extension_fields))) + @console_ns.response(200, "Success", api_based_extension_list_model) @setup_required @login_required @account_initialization_required - @marshal_with(api_based_extension_fields) + @marshal_with(api_based_extension_model) def get(self): _, tenant_id = current_account_with_tenant() return APIBasedExtensionService.get_all_by_tenant_id(tenant_id) @@ -62,11 +66,11 @@ class APIBasedExtensionAPI(Resource): }, ) ) - @console_ns.response(201, "Extension created successfully", api_based_extension_fields) + @console_ns.response(201, "Extension created successfully", api_based_extension_model) @setup_required @login_required @account_initialization_required - @marshal_with(api_based_extension_fields) + @marshal_with(api_based_extension_model) def post(self): args = console_ns.payload _, current_tenant_id = current_account_with_tenant() @@ -86,11 +90,11 @@ class APIBasedExtensionDetailAPI(Resource): @console_ns.doc("get_api_based_extension") @console_ns.doc(description="Get API-based extension by ID") @console_ns.doc(params={"id": "Extension ID"}) - @console_ns.response(200, "Success", api_based_extension_fields) + @console_ns.response(200, "Success", api_based_extension_model) @setup_required @login_required @account_initialization_required - @marshal_with(api_based_extension_fields) + @marshal_with(api_based_extension_model) def get(self, id): api_based_extension_id = str(id) _, tenant_id = current_account_with_tenant() @@ -110,11 +114,11 @@ class APIBasedExtensionDetailAPI(Resource): }, ) ) - @console_ns.response(200, "Extension updated successfully", api_based_extension_fields) + @console_ns.response(200, "Extension updated successfully", api_based_extension_model) @setup_required @login_required @account_initialization_required - @marshal_with(api_based_extension_fields) + @marshal_with(api_based_extension_model) def post(self, id): api_based_extension_id = str(id) _, current_tenant_id = current_account_with_tenant() From 034e3e85e98edabfdbae1dd7532a178dc6b35d01 Mon Sep 17 00:00:00 2001 From: yyh <92089059+lyzno1@users.noreply.github.com> Date: Mon, 24 Nov 2025 21:00:40 +0800 Subject: [PATCH 19/22] Fix Node.js SDK routes and multipart handling (#28573) --- sdks/nodejs-client/babel.config.cjs | 12 ++++ sdks/nodejs-client/index.js | 30 ++++------ sdks/nodejs-client/index.test.js | 88 +++++++++++++++++++++++++++-- sdks/nodejs-client/jest.config.cjs | 6 ++ sdks/nodejs-client/package.json | 5 -- 5 files changed, 111 insertions(+), 30 deletions(-) create mode 100644 sdks/nodejs-client/babel.config.cjs create mode 100644 sdks/nodejs-client/jest.config.cjs diff --git a/sdks/nodejs-client/babel.config.cjs b/sdks/nodejs-client/babel.config.cjs new file mode 100644 index 0000000000..392abb66d8 --- /dev/null +++ b/sdks/nodejs-client/babel.config.cjs @@ -0,0 +1,12 @@ +module.exports = { + presets: [ + [ + "@babel/preset-env", + { + targets: { + node: "current", + }, + }, + ], + ], +}; diff --git a/sdks/nodejs-client/index.js b/sdks/nodejs-client/index.js index 3025cc2ab6..9743ae358c 100644 --- a/sdks/nodejs-client/index.js +++ b/sdks/nodejs-client/index.js @@ -71,7 +71,7 @@ export const routes = { }, stopWorkflow: { method: "POST", - url: (task_id) => `/workflows/${task_id}/stop`, + url: (task_id) => `/workflows/tasks/${task_id}/stop`, } }; @@ -94,11 +94,13 @@ export class DifyClient { stream = false, headerParams = {} ) { + const isFormData = + (typeof FormData !== "undefined" && data instanceof FormData) || + (data && data.constructor && data.constructor.name === "FormData"); const headers = { - - Authorization: `Bearer ${this.apiKey}`, - "Content-Type": "application/json", - ...headerParams + Authorization: `Bearer ${this.apiKey}`, + ...(isFormData ? {} : { "Content-Type": "application/json" }), + ...headerParams, }; const url = `${this.baseUrl}${endpoint}`; @@ -152,12 +154,7 @@ export class DifyClient { return this.sendRequest( routes.fileUpload.method, routes.fileUpload.url(), - data, - null, - false, - { - "Content-Type": 'multipart/form-data' - } + data ); } @@ -179,8 +176,8 @@ export class DifyClient { getMeta(user) { const params = { user }; return this.sendRequest( - routes.meta.method, - routes.meta.url(), + routes.getMeta.method, + routes.getMeta.url(), null, params ); @@ -320,12 +317,7 @@ export class ChatClient extends DifyClient { return this.sendRequest( routes.audioToText.method, routes.audioToText.url(), - data, - null, - false, - { - "Content-Type": 'multipart/form-data' - } + data ); } diff --git a/sdks/nodejs-client/index.test.js b/sdks/nodejs-client/index.test.js index 1f5d6edb06..e3a1715238 100644 --- a/sdks/nodejs-client/index.test.js +++ b/sdks/nodejs-client/index.test.js @@ -1,9 +1,13 @@ -import { DifyClient, BASE_URL, routes } from "."; +import { DifyClient, WorkflowClient, BASE_URL, routes } from "."; import axios from 'axios' jest.mock('axios') +afterEach(() => { + jest.resetAllMocks() +}) + describe('Client', () => { let difyClient beforeEach(() => { @@ -27,13 +31,9 @@ describe('Send Requests', () => { difyClient = new DifyClient('test') }) - afterEach(() => { - jest.resetAllMocks() - }) - it('should make a successful request to the application parameter', async () => { const method = 'GET' - const endpoint = routes.application.url + const endpoint = routes.application.url() const expectedResponse = { data: 'response' } axios.mockResolvedValue(expectedResponse) @@ -62,4 +62,80 @@ describe('Send Requests', () => { errorMessage ) }) + + it('uses the getMeta route configuration', async () => { + axios.mockResolvedValue({ data: 'ok' }) + await difyClient.getMeta('end-user') + + expect(axios).toHaveBeenCalledWith({ + method: routes.getMeta.method, + url: `${BASE_URL}${routes.getMeta.url()}`, + params: { user: 'end-user' }, + headers: { + Authorization: `Bearer ${difyClient.apiKey}`, + 'Content-Type': 'application/json', + }, + responseType: 'json', + }) + }) +}) + +describe('File uploads', () => { + let difyClient + const OriginalFormData = global.FormData + + beforeAll(() => { + global.FormData = class FormDataMock {} + }) + + afterAll(() => { + global.FormData = OriginalFormData + }) + + beforeEach(() => { + difyClient = new DifyClient('test') + }) + + it('does not override multipart boundary headers for FormData', async () => { + const form = new FormData() + axios.mockResolvedValue({ data: 'ok' }) + + await difyClient.fileUpload(form) + + expect(axios).toHaveBeenCalledWith({ + method: routes.fileUpload.method, + url: `${BASE_URL}${routes.fileUpload.url()}`, + data: form, + params: null, + headers: { + Authorization: `Bearer ${difyClient.apiKey}`, + }, + responseType: 'json', + }) + }) +}) + +describe('Workflow client', () => { + let workflowClient + + beforeEach(() => { + workflowClient = new WorkflowClient('test') + }) + + it('uses tasks stop path for workflow stop', async () => { + axios.mockResolvedValue({ data: 'stopped' }) + await workflowClient.stop('task-1', 'end-user') + + expect(axios).toHaveBeenCalledWith({ + method: routes.stopWorkflow.method, + url: `${BASE_URL}${routes.stopWorkflow.url('task-1')}`, + data: { user: 'end-user' }, + params: null, + headers: { + Authorization: `Bearer ${workflowClient.apiKey}`, + 'Content-Type': 'application/json', + }, + responseType: 'json', + }) + }) }) diff --git a/sdks/nodejs-client/jest.config.cjs b/sdks/nodejs-client/jest.config.cjs new file mode 100644 index 0000000000..ea0fb34ad1 --- /dev/null +++ b/sdks/nodejs-client/jest.config.cjs @@ -0,0 +1,6 @@ +module.exports = { + testEnvironment: "node", + transform: { + "^.+\\.[tj]sx?$": "babel-jest", + }, +}; diff --git a/sdks/nodejs-client/package.json b/sdks/nodejs-client/package.json index cd3bcc4bce..c6bb0a9c1f 100644 --- a/sdks/nodejs-client/package.json +++ b/sdks/nodejs-client/package.json @@ -18,11 +18,6 @@ "scripts": { "test": "jest" }, - "jest": { - "transform": { - "^.+\\.[t|j]sx?$": "babel-jest" - } - }, "dependencies": { "axios": "^1.3.5" }, From da98a38b14565f08bc38d6964084c40cb7335cdc Mon Sep 17 00:00:00 2001 From: NeatGuyCoding <15627489+NeatGuyCoding@users.noreply.github.com> Date: Mon, 24 Nov 2025 21:01:32 +0800 Subject: [PATCH 20/22] fix: i18n: standardize trigger events terminology in billing translations (#28543) Signed-off-by: NeatGuyCoding <15627489+NeatGuyCoding@users.noreply.github.com> Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com> --- web/i18n/it-IT/billing.ts | 10 +++++----- web/i18n/pt-BR/billing.ts | 8 ++++---- web/i18n/ro-RO/billing.ts | 4 ++-- 3 files changed, 11 insertions(+), 11 deletions(-) diff --git a/web/i18n/it-IT/billing.ts b/web/i18n/it-IT/billing.ts index 60fe22bf6d..1b5b6dc934 100644 --- a/web/i18n/it-IT/billing.ts +++ b/web/i18n/it-IT/billing.ts @@ -115,7 +115,7 @@ const translation = { tooltip: 'Priorità e velocità della coda di esecuzione del flusso di lavoro.', }, startNodes: { - unlimited: 'Trigger/workflow illimitati', + unlimited: 'Eventi di attivazione/workflow illimitati', }, }, plans: { @@ -211,15 +211,15 @@ const translation = { documentsUploadQuota: 'Quota di Caricamento Documenti', vectorSpaceTooltip: 'I documenti con la modalità di indicizzazione ad alta qualità consumeranno risorse di Knowledge Data Storage. Quando il Knowledge Data Storage raggiunge il limite, nuovi documenti non verranno caricati.', perMonth: 'al mese', - triggerEvents: 'Eventi scatenanti', + triggerEvents: 'Eventi di attivazione', }, teamMembers: 'Membri del team', triggerLimitModal: { upgrade: 'Aggiornamento', dismiss: 'Ignora', - usageTitle: 'EVENTI SCATENANTI', - title: 'Aggiorna per sbloccare più eventi trigger', - description: 'Hai raggiunto il limite dei trigger degli eventi del flusso di lavoro per questo piano.', + usageTitle: 'EVENTI DI ATTIVAZIONE', + title: 'Aggiorna per sbloccare più eventi di attivazione', + description: 'Hai raggiunto il limite degli eventi di attivazione del flusso di lavoro per questo piano.', }, } diff --git a/web/i18n/pt-BR/billing.ts b/web/i18n/pt-BR/billing.ts index baec9813f4..17efe6ff34 100644 --- a/web/i18n/pt-BR/billing.ts +++ b/web/i18n/pt-BR/billing.ts @@ -107,7 +107,7 @@ const translation = { standard: 'Execução Padrão de Fluxo de Trabalho', }, startNodes: { - unlimited: 'Gatilhos/workflow ilimitados', + unlimited: 'Eventos de Gatilho/fluxo de trabalho ilimitados', }, }, plans: { @@ -200,15 +200,15 @@ const translation = { vectorSpaceTooltip: 'Documentos com o modo de indexação de Alta Qualidade consumirã recursos de Armazenamento de Dados de Conhecimento. Quando o Armazenamento de Dados de Conhecimento atingir o limite, novos documentos não serão carregados.', buildApps: 'Desenvolver Apps', perMonth: 'por mês', - triggerEvents: 'Eventos Desencadeadores', + triggerEvents: 'Eventos de Gatilho', }, teamMembers: 'Membros da equipe', triggerLimitModal: { dismiss: 'Dispensar', - usageTitle: 'EVENTOS DESENCADEADORES', + usageTitle: 'EVENTOS DE GATILHO', title: 'Atualize para desbloquear mais eventos de gatilho', upgrade: 'Atualizar', - description: 'Você atingiu o limite de gatilhos de eventos de fluxo de trabalho para este plano.', + description: 'Você atingiu o limite de eventos de gatilho de fluxo de trabalho para este plano.', }, } diff --git a/web/i18n/ro-RO/billing.ts b/web/i18n/ro-RO/billing.ts index 8b25b6e23d..d7b000bd45 100644 --- a/web/i18n/ro-RO/billing.ts +++ b/web/i18n/ro-RO/billing.ts @@ -206,8 +206,8 @@ const translation = { triggerLimitModal: { dismiss: 'Respinge', upgrade: 'Actualizare', - usageTitle: 'EVENIMENTE TRIGER', - description: 'Ai atins limita de declanșatoare de evenimente de flux de lucru pentru acest plan.', + usageTitle: 'EVENIMENTE DECLANȘATOARE', + description: 'Ai atins limita de evenimente declanșatoare de flux de lucru pentru acest plan.', title: 'Actualizează pentru a debloca mai multe evenimente declanșatoare', }, } From 751ce4ec41ba872c294aca71041b1474ce91be34 Mon Sep 17 00:00:00 2001 From: Asuka Minato Date: Mon, 24 Nov 2025 22:01:46 +0900 Subject: [PATCH 21/22] more typed orm (#28577) Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- api/core/rag/embedding/cached_embedding.py | 3 +- api/models/dataset.py | 94 +++++++++++-------- api/models/model.py | 90 ++++++++++-------- api/models/trigger.py | 27 +++--- api/models/workflow.py | 2 +- api/services/async_workflow_service.py | 6 ++ api/services/message_service.py | 1 + .../rag_pipeline/rag_pipeline_dsl_service.py | 13 +-- .../rag_pipeline_transform_service.py | 17 ++-- api/services/tag_service.py | 2 +- api/tasks/trigger_processing_tasks.py | 2 + .../tasks/test_clean_dataset_task.py | 21 +++-- .../tasks/test_rag_pipeline_run_tasks.py | 2 +- 13 files changed, 163 insertions(+), 117 deletions(-) diff --git a/api/core/rag/embedding/cached_embedding.py b/api/core/rag/embedding/cached_embedding.py index 937b8f033c..7fb20c1941 100644 --- a/api/core/rag/embedding/cached_embedding.py +++ b/api/core/rag/embedding/cached_embedding.py @@ -1,5 +1,6 @@ import base64 import logging +import pickle from typing import Any, cast import numpy as np @@ -89,8 +90,8 @@ class CacheEmbedding(Embeddings): model_name=self._model_instance.model, hash=hash, provider_name=self._model_instance.provider, + embedding=pickle.dumps(n_embedding, protocol=pickle.HIGHEST_PROTOCOL), ) - embedding_cache.set_embedding(n_embedding) db.session.add(embedding_cache) cache_embeddings.append(hash) db.session.commit() diff --git a/api/models/dataset.py b/api/models/dataset.py index 3f2d16d3bd..2ea6d98b5f 100644 --- a/api/models/dataset.py +++ b/api/models/dataset.py @@ -307,7 +307,7 @@ class Dataset(Base): return f"{dify_config.VECTOR_INDEX_NAME_PREFIX}_{normalized_dataset_id}_Node" -class DatasetProcessRule(Base): +class DatasetProcessRule(Base): # bug __tablename__ = "dataset_process_rules" __table_args__ = ( sa.PrimaryKeyConstraint("id", name="dataset_process_rule_pkey"), @@ -1004,7 +1004,7 @@ class DatasetKeywordTable(TypeBase): return None -class Embedding(Base): +class Embedding(TypeBase): __tablename__ = "embeddings" __table_args__ = ( sa.PrimaryKeyConstraint("id", name="embedding_pkey"), @@ -1012,12 +1012,16 @@ class Embedding(Base): sa.Index("created_at_idx", "created_at"), ) - id = mapped_column(StringUUID, primary_key=True, default=lambda: str(uuid4())) - model_name = mapped_column(String(255), nullable=False, server_default=sa.text("'text-embedding-ada-002'")) - hash = mapped_column(String(64), nullable=False) - embedding = mapped_column(BinaryData, nullable=False) - created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) - provider_name = mapped_column(String(255), nullable=False, server_default=sa.text("''")) + id: Mapped[str] = mapped_column(StringUUID, primary_key=True, default=lambda: str(uuid4()), init=False) + model_name: Mapped[str] = mapped_column( + String(255), nullable=False, server_default=sa.text("'text-embedding-ada-002'") + ) + hash: Mapped[str] = mapped_column(String(64), nullable=False) + embedding: Mapped[bytes] = mapped_column(BinaryData, nullable=False) + created_at: Mapped[datetime] = mapped_column( + DateTime, nullable=False, server_default=func.current_timestamp(), init=False + ) + provider_name: Mapped[str] = mapped_column(String(255), nullable=False, server_default=sa.text("''")) def set_embedding(self, embedding_data: list[float]): self.embedding = pickle.dumps(embedding_data, protocol=pickle.HIGHEST_PROTOCOL) @@ -1214,7 +1218,7 @@ class RateLimitLog(TypeBase): ) -class DatasetMetadata(Base): +class DatasetMetadata(TypeBase): __tablename__ = "dataset_metadatas" __table_args__ = ( sa.PrimaryKeyConstraint("id", name="dataset_metadata_pkey"), @@ -1222,20 +1226,26 @@ class DatasetMetadata(Base): sa.Index("dataset_metadata_dataset_idx", "dataset_id"), ) - id = mapped_column(StringUUID, default=lambda: str(uuid4())) - tenant_id = mapped_column(StringUUID, nullable=False) - dataset_id = mapped_column(StringUUID, nullable=False) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False) + tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) + dataset_id: Mapped[str] = mapped_column(StringUUID, nullable=False) type: Mapped[str] = mapped_column(String(255), nullable=False) name: Mapped[str] = mapped_column(String(255), nullable=False) - created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=sa.func.current_timestamp()) - updated_at: Mapped[datetime] = mapped_column( - DateTime, nullable=False, server_default=sa.func.current_timestamp(), onupdate=func.current_timestamp() + created_at: Mapped[datetime] = mapped_column( + DateTime, nullable=False, server_default=sa.func.current_timestamp(), init=False ) - created_by = mapped_column(StringUUID, nullable=False) - updated_by = mapped_column(StringUUID, nullable=True) + updated_at: Mapped[datetime] = mapped_column( + DateTime, + nullable=False, + server_default=sa.func.current_timestamp(), + onupdate=func.current_timestamp(), + init=False, + ) + created_by: Mapped[str] = mapped_column(StringUUID, nullable=False) + updated_by: Mapped[str] = mapped_column(StringUUID, nullable=True, default=None) -class DatasetMetadataBinding(Base): +class DatasetMetadataBinding(TypeBase): __tablename__ = "dataset_metadata_bindings" __table_args__ = ( sa.PrimaryKeyConstraint("id", name="dataset_metadata_binding_pkey"), @@ -1245,13 +1255,15 @@ class DatasetMetadataBinding(Base): sa.Index("dataset_metadata_binding_document_idx", "document_id"), ) - id = mapped_column(StringUUID, default=lambda: str(uuid4())) - tenant_id = mapped_column(StringUUID, nullable=False) - dataset_id = mapped_column(StringUUID, nullable=False) - metadata_id = mapped_column(StringUUID, nullable=False) - document_id = mapped_column(StringUUID, nullable=False) - created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) - created_by = mapped_column(StringUUID, nullable=False) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False) + tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) + dataset_id: Mapped[str] = mapped_column(StringUUID, nullable=False) + metadata_id: Mapped[str] = mapped_column(StringUUID, nullable=False) + document_id: Mapped[str] = mapped_column(StringUUID, nullable=False) + created_at: Mapped[datetime] = mapped_column( + DateTime, nullable=False, server_default=func.current_timestamp(), init=False + ) + created_by: Mapped[str] = mapped_column(StringUUID, nullable=False) class PipelineBuiltInTemplate(TypeBase): @@ -1319,22 +1331,30 @@ class PipelineCustomizedTemplate(TypeBase): return "" -class Pipeline(Base): # type: ignore[name-defined] +class Pipeline(TypeBase): __tablename__ = "pipelines" __table_args__ = (sa.PrimaryKeyConstraint("id", name="pipeline_pkey"),) - id = mapped_column(StringUUID, default=lambda: str(uuidv7())) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuidv7()), init=False) tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) - name = mapped_column(sa.String(255), nullable=False) - description = mapped_column(LongText, nullable=False, default=sa.text("''")) - workflow_id = mapped_column(StringUUID, nullable=True) - is_public = mapped_column(sa.Boolean, nullable=False, server_default=sa.text("false")) - is_published = mapped_column(sa.Boolean, nullable=False, server_default=sa.text("false")) - created_by = mapped_column(StringUUID, nullable=True) - created_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) - updated_by = mapped_column(StringUUID, nullable=True) - updated_at = mapped_column( - sa.DateTime, nullable=False, server_default=func.current_timestamp(), onupdate=func.current_timestamp() + name: Mapped[str] = mapped_column(sa.String(255), nullable=False) + description: Mapped[str] = mapped_column(LongText, nullable=False, default=sa.text("''")) + workflow_id: Mapped[str | None] = mapped_column(StringUUID, nullable=True, default=None) + is_public: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=sa.text("false"), default=False) + is_published: Mapped[bool] = mapped_column( + sa.Boolean, nullable=False, server_default=sa.text("false"), default=False + ) + created_by: Mapped[str | None] = mapped_column(StringUUID, nullable=True, default=None) + created_at: Mapped[datetime] = mapped_column( + sa.DateTime, nullable=False, server_default=func.current_timestamp(), init=False + ) + updated_by: Mapped[str | None] = mapped_column(StringUUID, nullable=True, default=None) + updated_at: Mapped[datetime] = mapped_column( + sa.DateTime, + nullable=False, + server_default=func.current_timestamp(), + onupdate=func.current_timestamp(), + init=False, ) def retrieve_dataset(self, session: Session): diff --git a/api/models/model.py b/api/models/model.py index e2b9da46f1..fb084d1dc6 100644 --- a/api/models/model.py +++ b/api/models/model.py @@ -533,7 +533,7 @@ class AppModelConfig(Base): return self -class RecommendedApp(Base): +class RecommendedApp(Base): # bug __tablename__ = "recommended_apps" __table_args__ = ( sa.PrimaryKeyConstraint("id", name="recommended_app_pkey"), @@ -1294,7 +1294,7 @@ class Message(Base): ) -class MessageFeedback(Base): +class MessageFeedback(TypeBase): __tablename__ = "message_feedbacks" __table_args__ = ( sa.PrimaryKeyConstraint("id", name="message_feedback_pkey"), @@ -1303,18 +1303,24 @@ class MessageFeedback(Base): sa.Index("message_feedback_conversation_idx", "conversation_id", "from_source", "rating"), ) - id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4())) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False) app_id: Mapped[str] = mapped_column(StringUUID, nullable=False) conversation_id: Mapped[str] = mapped_column(StringUUID, nullable=False) message_id: Mapped[str] = mapped_column(StringUUID, nullable=False) rating: Mapped[str] = mapped_column(String(255), nullable=False) - content: Mapped[str | None] = mapped_column(LongText) from_source: Mapped[str] = mapped_column(String(255), nullable=False) - from_end_user_id: Mapped[str | None] = mapped_column(StringUUID) - from_account_id: Mapped[str | None] = mapped_column(StringUUID) - created_at: Mapped[datetime] = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) + content: Mapped[str | None] = mapped_column(LongText, nullable=True, default=None) + from_end_user_id: Mapped[str | None] = mapped_column(StringUUID, nullable=True, default=None) + from_account_id: Mapped[str | None] = mapped_column(StringUUID, nullable=True, default=None) + created_at: Mapped[datetime] = mapped_column( + sa.DateTime, nullable=False, server_default=func.current_timestamp(), init=False + ) updated_at: Mapped[datetime] = mapped_column( - sa.DateTime, nullable=False, server_default=func.current_timestamp(), onupdate=func.current_timestamp() + sa.DateTime, + nullable=False, + server_default=func.current_timestamp(), + onupdate=func.current_timestamp(), + init=False, ) @property @@ -1467,22 +1473,28 @@ class AppAnnotationSetting(TypeBase): return collection_binding_detail -class OperationLog(Base): +class OperationLog(TypeBase): __tablename__ = "operation_logs" __table_args__ = ( sa.PrimaryKeyConstraint("id", name="operation_log_pkey"), sa.Index("operation_log_account_action_idx", "tenant_id", "account_id", "action"), ) - id = mapped_column(StringUUID, default=lambda: str(uuid4())) - tenant_id = mapped_column(StringUUID, nullable=False) - account_id = mapped_column(StringUUID, nullable=False) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False) + tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) + account_id: Mapped[str] = mapped_column(StringUUID, nullable=False) action: Mapped[str] = mapped_column(String(255), nullable=False) - content = mapped_column(sa.JSON) - created_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) + content: Mapped[Any] = mapped_column(sa.JSON) + created_at: Mapped[datetime] = mapped_column( + sa.DateTime, nullable=False, server_default=func.current_timestamp(), init=False + ) created_ip: Mapped[str] = mapped_column(String(255), nullable=False) - updated_at = mapped_column( - sa.DateTime, nullable=False, server_default=func.current_timestamp(), onupdate=func.current_timestamp() + updated_at: Mapped[datetime] = mapped_column( + sa.DateTime, + nullable=False, + server_default=func.current_timestamp(), + onupdate=func.current_timestamp(), + init=False, ) @@ -1627,7 +1639,7 @@ class Site(Base): return dify_config.APP_WEB_URL or request.url_root.rstrip("/") -class ApiToken(Base): +class ApiToken(Base): # bug: this uses setattr so idk the field. __tablename__ = "api_tokens" __table_args__ = ( sa.PrimaryKeyConstraint("id", name="api_token_pkey"), @@ -1887,34 +1899,36 @@ class MessageAgentThought(Base): return {} -class DatasetRetrieverResource(Base): +class DatasetRetrieverResource(TypeBase): __tablename__ = "dataset_retriever_resources" __table_args__ = ( sa.PrimaryKeyConstraint("id", name="dataset_retriever_resource_pkey"), sa.Index("dataset_retriever_resource_message_id_idx", "message_id"), ) - id = mapped_column(StringUUID, default=lambda: str(uuid4())) - message_id = mapped_column(StringUUID, nullable=False) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False) + message_id: Mapped[str] = mapped_column(StringUUID, nullable=False) position: Mapped[int] = mapped_column(sa.Integer, nullable=False) - dataset_id = mapped_column(StringUUID, nullable=False) - dataset_name = mapped_column(LongText, nullable=False) - document_id = mapped_column(StringUUID, nullable=True) - document_name = mapped_column(LongText, nullable=False) - data_source_type = mapped_column(LongText, nullable=True) - segment_id = mapped_column(StringUUID, nullable=True) + dataset_id: Mapped[str] = mapped_column(StringUUID, nullable=False) + dataset_name: Mapped[str] = mapped_column(LongText, nullable=False) + document_id: Mapped[str | None] = mapped_column(StringUUID, nullable=True) + document_name: Mapped[str] = mapped_column(LongText, nullable=False) + data_source_type: Mapped[str | None] = mapped_column(LongText, nullable=True) + segment_id: Mapped[str | None] = mapped_column(StringUUID, nullable=True) score: Mapped[float | None] = mapped_column(sa.Float, nullable=True) - content = mapped_column(LongText, nullable=False) + content: Mapped[str] = mapped_column(LongText, nullable=False) hit_count: Mapped[int | None] = mapped_column(sa.Integer, nullable=True) word_count: Mapped[int | None] = mapped_column(sa.Integer, nullable=True) segment_position: Mapped[int | None] = mapped_column(sa.Integer, nullable=True) - index_node_hash = mapped_column(LongText, nullable=True) - retriever_from = mapped_column(LongText, nullable=False) - created_by = mapped_column(StringUUID, nullable=False) - created_at = mapped_column(sa.DateTime, nullable=False, server_default=sa.func.current_timestamp()) + index_node_hash: Mapped[str | None] = mapped_column(LongText, nullable=True) + retriever_from: Mapped[str] = mapped_column(LongText, nullable=False) + created_by: Mapped[str] = mapped_column(StringUUID, nullable=False) + created_at: Mapped[datetime] = mapped_column( + sa.DateTime, nullable=False, server_default=sa.func.current_timestamp(), init=False + ) -class Tag(Base): +class Tag(TypeBase): __tablename__ = "tags" __table_args__ = ( sa.PrimaryKeyConstraint("id", name="tag_pkey"), @@ -1924,12 +1938,14 @@ class Tag(Base): TAG_TYPE_LIST = ["knowledge", "app"] - id = mapped_column(StringUUID, default=lambda: str(uuid4())) - tenant_id = mapped_column(StringUUID, nullable=True) - type = mapped_column(String(16), nullable=False) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False) + tenant_id: Mapped[str | None] = mapped_column(StringUUID, nullable=True) + type: Mapped[str] = mapped_column(String(16), nullable=False) name: Mapped[str] = mapped_column(String(255), nullable=False) - created_by = mapped_column(StringUUID, nullable=False) - created_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) + created_by: Mapped[str] = mapped_column(StringUUID, nullable=False) + created_at: Mapped[datetime] = mapped_column( + sa.DateTime, nullable=False, server_default=func.current_timestamp(), init=False + ) class TagBinding(TypeBase): diff --git a/api/models/trigger.py b/api/models/trigger.py index e89309551a..088e797f82 100644 --- a/api/models/trigger.py +++ b/api/models/trigger.py @@ -17,7 +17,7 @@ from core.trigger.utils.endpoint import generate_plugin_trigger_endpoint_url, ge from libs.datetime_utils import naive_utc_now from libs.uuid_utils import uuidv7 -from .base import Base, TypeBase +from .base import TypeBase from .engine import db from .enums import AppTriggerStatus, AppTriggerType, CreatorUserRole, WorkflowTriggerStatus from .model import Account @@ -160,7 +160,7 @@ class TriggerOAuthTenantClient(TypeBase): return cast(Mapping[str, Any], json.loads(self.encrypted_oauth_params or "{}")) -class WorkflowTriggerLog(Base): +class WorkflowTriggerLog(TypeBase): """ Workflow Trigger Log @@ -202,7 +202,7 @@ class WorkflowTriggerLog(Base): sa.Index("workflow_trigger_log_workflow_id_idx", "workflow_id"), ) - id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuidv7())) + id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuidv7()), init=False) tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) app_id: Mapped[str] = mapped_column(StringUUID, nullable=False) workflow_id: Mapped[str] = mapped_column(StringUUID, nullable=False) @@ -214,24 +214,21 @@ class WorkflowTriggerLog(Base): inputs: Mapped[str] = mapped_column(LongText, nullable=False) # Just inputs for easy viewing outputs: Mapped[str | None] = mapped_column(LongText, nullable=True) - status: Mapped[str] = mapped_column( - EnumText(WorkflowTriggerStatus, length=50), nullable=False, default=WorkflowTriggerStatus.PENDING - ) + status: Mapped[str] = mapped_column(EnumText(WorkflowTriggerStatus, length=50), nullable=False) error: Mapped[str | None] = mapped_column(LongText, nullable=True) queue_name: Mapped[str] = mapped_column(String(100), nullable=False) celery_task_id: Mapped[str | None] = mapped_column(String(255), nullable=True) - retry_count: Mapped[int] = mapped_column(sa.Integer, nullable=False, default=0) - - elapsed_time: Mapped[float | None] = mapped_column(sa.Float, nullable=True) - total_tokens: Mapped[int | None] = mapped_column(sa.Integer, nullable=True) - - created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) created_by_role: Mapped[str] = mapped_column(String(255), nullable=False) created_by: Mapped[str] = mapped_column(String(255), nullable=False) - - triggered_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True) - finished_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True) + retry_count: Mapped[int] = mapped_column(sa.Integer, nullable=False, default=0) + elapsed_time: Mapped[float | None] = mapped_column(sa.Float, nullable=True, default=None) + total_tokens: Mapped[int | None] = mapped_column(sa.Integer, nullable=True, default=None) + created_at: Mapped[datetime] = mapped_column( + DateTime, nullable=False, server_default=func.current_timestamp(), init=False + ) + triggered_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True, default=None) + finished_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True, default=None) @property def created_by_account(self): diff --git a/api/models/workflow.py b/api/models/workflow.py index 0280353d45..f206a6a870 100644 --- a/api/models/workflow.py +++ b/api/models/workflow.py @@ -96,7 +96,7 @@ class _InvalidGraphDefinitionError(Exception): pass -class Workflow(Base): +class Workflow(Base): # bug """ Workflow, for `Workflow App` and `Chat App workflow mode`. diff --git a/api/services/async_workflow_service.py b/api/services/async_workflow_service.py index 8d62f121e2..e100582511 100644 --- a/api/services/async_workflow_service.py +++ b/api/services/async_workflow_service.py @@ -113,6 +113,8 @@ class AsyncWorkflowService: trigger_data.trigger_metadata.model_dump_json() if trigger_data.trigger_metadata else "{}" ), trigger_type=trigger_data.trigger_type, + workflow_run_id=None, + outputs=None, trigger_data=trigger_data.model_dump_json(), inputs=json.dumps(dict(trigger_data.inputs)), status=WorkflowTriggerStatus.PENDING, @@ -120,6 +122,10 @@ class AsyncWorkflowService: retry_count=0, created_by_role=created_by_role, created_by=created_by, + celery_task_id=None, + error=None, + elapsed_time=None, + total_tokens=None, ) trigger_log = trigger_log_repo.create(trigger_log) diff --git a/api/services/message_service.py b/api/services/message_service.py index 7ed56d80f2..e1a256e64d 100644 --- a/api/services/message_service.py +++ b/api/services/message_service.py @@ -164,6 +164,7 @@ class MessageService: elif not rating and not feedback: raise ValueError("rating cannot be None when feedback not exists") else: + assert rating is not None feedback = MessageFeedback( app_id=app_model.id, conversation_id=message.conversation_id, diff --git a/api/services/rag_pipeline/rag_pipeline_dsl_service.py b/api/services/rag_pipeline/rag_pipeline_dsl_service.py index c02fad4dc6..06f294863d 100644 --- a/api/services/rag_pipeline/rag_pipeline_dsl_service.py +++ b/api/services/rag_pipeline/rag_pipeline_dsl_service.py @@ -580,13 +580,14 @@ class RagPipelineDslService: raise ValueError("Current tenant is not set") # Create new app - pipeline = Pipeline() + pipeline = Pipeline( + tenant_id=account.current_tenant_id, + name=pipeline_data.get("name", ""), + description=pipeline_data.get("description", ""), + created_by=account.id, + updated_by=account.id, + ) pipeline.id = str(uuid4()) - pipeline.tenant_id = account.current_tenant_id - pipeline.name = pipeline_data.get("name", "") - pipeline.description = pipeline_data.get("description", "") - pipeline.created_by = account.id - pipeline.updated_by = account.id self._session.add(pipeline) self._session.commit() diff --git a/api/services/rag_pipeline/rag_pipeline_transform_service.py b/api/services/rag_pipeline/rag_pipeline_transform_service.py index 22025dd44a..84f97907c0 100644 --- a/api/services/rag_pipeline/rag_pipeline_transform_service.py +++ b/api/services/rag_pipeline/rag_pipeline_transform_service.py @@ -198,15 +198,16 @@ class RagPipelineTransformService: graph = workflow_data.get("graph", {}) # Create new app - pipeline = Pipeline() + pipeline = Pipeline( + tenant_id=current_user.current_tenant_id, + name=pipeline_data.get("name", ""), + description=pipeline_data.get("description", ""), + created_by=current_user.id, + updated_by=current_user.id, + is_published=True, + is_public=True, + ) pipeline.id = str(uuid4()) - pipeline.tenant_id = current_user.current_tenant_id - pipeline.name = pipeline_data.get("name", "") - pipeline.description = pipeline_data.get("description", "") - pipeline.created_by = current_user.id - pipeline.updated_by = current_user.id - pipeline.is_published = True - pipeline.is_public = True db.session.add(pipeline) db.session.flush() diff --git a/api/services/tag_service.py b/api/services/tag_service.py index db7ed3d5c3..937e6593fe 100644 --- a/api/services/tag_service.py +++ b/api/services/tag_service.py @@ -79,12 +79,12 @@ class TagService: if TagService.get_tag_by_tag_name(args["type"], current_user.current_tenant_id, args["name"]): raise ValueError("Tag name already exists") tag = Tag( - id=str(uuid.uuid4()), name=args["name"], type=args["type"], created_by=current_user.id, tenant_id=current_user.current_tenant_id, ) + tag.id = str(uuid.uuid4()) db.session.add(tag) db.session.commit() return tag diff --git a/api/tasks/trigger_processing_tasks.py b/api/tasks/trigger_processing_tasks.py index 2619d8dd28..ee1d31aa91 100644 --- a/api/tasks/trigger_processing_tasks.py +++ b/api/tasks/trigger_processing_tasks.py @@ -218,6 +218,8 @@ def _record_trigger_failure_log( finished_at=now, elapsed_time=0.0, total_tokens=0, + outputs=None, + celery_task_id=None, ) session.add(trigger_log) session.commit() diff --git a/api/tests/test_containers_integration_tests/tasks/test_clean_dataset_task.py b/api/tests/test_containers_integration_tests/tasks/test_clean_dataset_task.py index 45eb9d4f78..9297e997e9 100644 --- a/api/tests/test_containers_integration_tests/tasks/test_clean_dataset_task.py +++ b/api/tests/test_containers_integration_tests/tasks/test_clean_dataset_task.py @@ -384,24 +384,24 @@ class TestCleanDatasetTask: # Create dataset metadata and bindings metadata = DatasetMetadata( - id=str(uuid.uuid4()), dataset_id=dataset.id, tenant_id=tenant.id, name="test_metadata", type="string", created_by=account.id, - created_at=datetime.now(), ) + metadata.id = str(uuid.uuid4()) + metadata.created_at = datetime.now() binding = DatasetMetadataBinding( - id=str(uuid.uuid4()), tenant_id=tenant.id, dataset_id=dataset.id, metadata_id=metadata.id, document_id=documents[0].id, # Use first document as example created_by=account.id, - created_at=datetime.now(), ) + binding.id = str(uuid.uuid4()) + binding.created_at = datetime.now() from extensions.ext_database import db @@ -697,26 +697,26 @@ class TestCleanDatasetTask: for i in range(10): # Create 10 metadata items metadata = DatasetMetadata( - id=str(uuid.uuid4()), dataset_id=dataset.id, tenant_id=tenant.id, name=f"test_metadata_{i}", type="string", created_by=account.id, - created_at=datetime.now(), ) + metadata.id = str(uuid.uuid4()) + metadata.created_at = datetime.now() metadata_items.append(metadata) # Create binding for each metadata item binding = DatasetMetadataBinding( - id=str(uuid.uuid4()), tenant_id=tenant.id, dataset_id=dataset.id, metadata_id=metadata.id, document_id=documents[i % len(documents)].id, created_by=account.id, - created_at=datetime.now(), ) + binding.id = str(uuid.uuid4()) + binding.created_at = datetime.now() bindings.append(binding) from extensions.ext_database import db @@ -966,14 +966,15 @@ class TestCleanDatasetTask: # Create metadata with special characters special_metadata = DatasetMetadata( - id=str(uuid.uuid4()), dataset_id=dataset.id, tenant_id=tenant.id, name=f"metadata_{special_content}", type="string", created_by=account.id, - created_at=datetime.now(), ) + special_metadata.id = str(uuid.uuid4()) + special_metadata.created_at = datetime.now() + db.session.add(special_metadata) db.session.commit() diff --git a/api/tests/test_containers_integration_tests/tasks/test_rag_pipeline_run_tasks.py b/api/tests/test_containers_integration_tests/tasks/test_rag_pipeline_run_tasks.py index c82162238c..e29b98037f 100644 --- a/api/tests/test_containers_integration_tests/tasks/test_rag_pipeline_run_tasks.py +++ b/api/tests/test_containers_integration_tests/tasks/test_rag_pipeline_run_tasks.py @@ -112,13 +112,13 @@ class TestRagPipelineRunTasks: # Create pipeline pipeline = Pipeline( - id=str(uuid.uuid4()), tenant_id=tenant.id, workflow_id=workflow.id, name=fake.company(), description=fake.text(max_nb_chars=100), created_by=account.id, ) + pipeline.id = str(uuid.uuid4()) db.session.add(pipeline) db.session.commit() From bb3aa0178d4de68d5c05a6941a38e0a052e71f90 Mon Sep 17 00:00:00 2001 From: Maries Date: Tue, 25 Nov 2025 00:40:25 +0800 Subject: [PATCH 22/22] =?UTF-8?q?fix:=20update=20plugin=20verification=20l?= =?UTF-8?q?ogic=20to=20use=20unique=20identifier=20instea=E2=80=A6=20(#286?= =?UTF-8?q?08)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- api/services/trigger/trigger_provider_service.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/api/services/trigger/trigger_provider_service.py b/api/services/trigger/trigger_provider_service.py index 6079d47bbf..668e4c5be2 100644 --- a/api/services/trigger/trigger_provider_service.py +++ b/api/services/trigger/trigger_provider_service.py @@ -475,7 +475,7 @@ class TriggerProviderService: oauth_params = encrypter.decrypt(dict(tenant_client.oauth_params)) return oauth_params - is_verified = PluginService.is_plugin_verified(tenant_id, provider_id.plugin_id) + is_verified = PluginService.is_plugin_verified(tenant_id, provider_controller.plugin_unique_identifier) if not is_verified: return None @@ -499,7 +499,8 @@ class TriggerProviderService: """ Check if system OAuth client exists for a trigger provider. """ - is_verified = PluginService.is_plugin_verified(tenant_id, provider_id.plugin_id) + provider_controller = TriggerManager.get_trigger_provider(tenant_id=tenant_id, provider_id=provider_id) + is_verified = PluginService.is_plugin_verified(tenant_id, provider_controller.plugin_unique_identifier) if not is_verified: return False with Session(db.engine, expire_on_commit=False) as session: