diff --git a/api/controllers/console/datasets/datasets.py b/api/controllers/console/datasets/datasets.py index 3a13bb6b67..a1ae941d4b 100644 --- a/api/controllers/console/datasets/datasets.py +++ b/api/controllers/console/datasets/datasets.py @@ -741,7 +741,7 @@ class DatasetApiDeleteApi(Resource): return {"result": "success"}, 204 -@console_ns.route("/datasets//api-keys/") +@console_ns.route("/datasets//api-keys/") class DatasetEnableApiApi(Resource): @setup_required @login_required diff --git a/api/core/app/app_config/entities.py b/api/core/app/app_config/entities.py index 2ad81fe005..3dbc8706d3 100644 --- a/api/core/app/app_config/entities.py +++ b/api/core/app/app_config/entities.py @@ -1,6 +1,6 @@ from collections.abc import Sequence from enum import StrEnum, auto -from typing import Any, Literal +from typing import Any, Literal, Optional from pydantic import BaseModel, Field, field_validator diff --git a/api/core/rag/extractor/entity/extract_setting.py b/api/core/rag/extractor/entity/extract_setting.py index 0a57c792f1..c0e79b02c4 100644 --- a/api/core/rag/extractor/entity/extract_setting.py +++ b/api/core/rag/extractor/entity/extract_setting.py @@ -1,3 +1,5 @@ +from typing import Optional + from pydantic import BaseModel, ConfigDict from models.dataset import Document diff --git a/api/core/rag/index_processor/index_processor_base.py b/api/core/rag/index_processor/index_processor_base.py index b3fc4ac221..05cffb5a55 100644 --- a/api/core/rag/index_processor/index_processor_base.py +++ b/api/core/rag/index_processor/index_processor_base.py @@ -2,7 +2,7 @@ from abc import ABC, abstractmethod from collections.abc import Mapping -from typing import TYPE_CHECKING, Any +from typing import TYPE_CHECKING, Any, Optional from configs import dify_config from core.rag.extractor.entity.extract_setting import ExtractSetting @@ -64,7 +64,7 @@ class BaseIndexProcessor(ABC): max_tokens: int, chunk_overlap: int, separator: str, - embedding_model_instance: ModelInstance | None, + embedding_model_instance: Optional["ModelInstance"], ) -> TextSplitter: """ Get the NodeParser object according to the processing rule. diff --git a/api/models/workflow.py b/api/models/workflow.py index a25d65669a..97e1790e19 100644 --- a/api/models/workflow.py +++ b/api/models/workflow.py @@ -3,7 +3,7 @@ import logging from collections.abc import Mapping, Sequence from datetime import datetime from enum import StrEnum -from typing import TYPE_CHECKING, Any, Union, cast +from typing import TYPE_CHECKING, Any, Union, cast, Optional from uuid import uuid4 import sqlalchemy as sa diff --git a/api/services/dataset_service.py b/api/services/dataset_service.py index ed2301e172..400b00ef83 100644 --- a/api/services/dataset_service.py +++ b/api/services/dataset_service.py @@ -7,7 +7,7 @@ import time import uuid from collections import Counter from collections.abc import Sequence -from typing import Any, Literal +from typing import Any, Literal, Optional import sqlalchemy as sa from sqlalchemy import exists, func, select