From ad870de55439e5d88edae5a8854d9c7a3ba31d1d Mon Sep 17 00:00:00 2001 From: jyong <718720800@qq.com> Date: Fri, 12 Sep 2025 15:35:13 +0800 Subject: [PATCH] add dataset service api enable --- api/controllers/console/datasets/datasets.py | 12 ++++++ .../service_api/dataset/metadata.py | 4 +- api/controllers/service_api/wraps.py | 41 +++++++++++++++++++ ..._1429-0b2ca375fabe_add_pipeline_info_18.py | 35 ++++++++++++++++ api/models/dataset.py | 1 + api/services/dataset_service.py | 11 +++++ 6 files changed, 102 insertions(+), 2 deletions(-) create mode 100644 api/migrations/versions/2025_09_12_1429-0b2ca375fabe_add_pipeline_info_18.py diff --git a/api/controllers/console/datasets/datasets.py b/api/controllers/console/datasets/datasets.py index 3834daa007..ef1fc5a958 100644 --- a/api/controllers/console/datasets/datasets.py +++ b/api/controllers/console/datasets/datasets.py @@ -650,6 +650,17 @@ class DatasetApiDeleteApi(Resource): return {"result": "success"}, 204 +class DatasetEnableApiApi(Resource): + @setup_required + @login_required + @account_initialization_required + def post(self, dataset_id, status): + dataset_id_str = str(dataset_id) + + DatasetService.update_dataset_api_status(dataset_id_str, status == "enable") + + return {"result": "success"}, 200 + class DatasetApiBaseUrlApi(Resource): @setup_required @@ -816,6 +827,7 @@ api.add_resource(DatasetRelatedAppListApi, "/datasets//related- api.add_resource(DatasetIndexingStatusApi, "/datasets//indexing-status") api.add_resource(DatasetApiKeyApi, "/datasets/api-keys") api.add_resource(DatasetApiDeleteApi, "/datasets/api-keys/") +api.add_resource(DatasetEnableApiApi, "/datasets//") api.add_resource(DatasetApiBaseUrlApi, "/datasets/api-base-info") api.add_resource(DatasetRetrievalSettingApi, "/datasets/retrieval-setting") api.add_resource(DatasetRetrievalSettingMockApi, "/datasets/retrieval-setting/") diff --git a/api/controllers/service_api/dataset/metadata.py b/api/controllers/service_api/dataset/metadata.py index c2df97eaec..c6032048e6 100644 --- a/api/controllers/service_api/dataset/metadata.py +++ b/api/controllers/service_api/dataset/metadata.py @@ -133,7 +133,7 @@ class DatasetMetadataServiceApi(DatasetApiResource): return 204 -@service_api_ns.route("/datasets/metadata/built-in") +@service_api_ns.route("/datasets//metadata/built-in") class DatasetMetadataBuiltInFieldServiceApi(DatasetApiResource): @service_api_ns.doc("get_built_in_fields") @service_api_ns.doc(description="Get all built-in metadata fields") @@ -143,7 +143,7 @@ class DatasetMetadataBuiltInFieldServiceApi(DatasetApiResource): 401: "Unauthorized - invalid API token", } ) - def get(self, tenant_id): + def get(self, tenant_id, dataset_id): """Get all built-in metadata fields.""" built_in_fields = MetadataService.get_built_in_fields() return {"fields": built_in_fields}, 200 diff --git a/api/controllers/service_api/wraps.py b/api/controllers/service_api/wraps.py index 14291578d5..e8816c74a9 100644 --- a/api/controllers/service_api/wraps.py +++ b/api/controllers/service_api/wraps.py @@ -193,6 +193,47 @@ def validate_dataset_token(view=None): def decorator(view): @wraps(view) def decorated(*args, **kwargs): + # get url path dataset_id from positional args or kwargs + # Flask passes URL path parameters as positional arguments + dataset_id = None + + # First try to get from kwargs (explicit parameter) + dataset_id = kwargs.get("dataset_id") + + # If not in kwargs, try to extract from positional args + if not dataset_id and args: + # For class methods: args[0] is self, args[1] is dataset_id (if exists) + # Check if first arg is likely a class instance (has __dict__ or __class__) + if len(args) > 1 and hasattr(args[0], '__dict__'): + # This is a class method, dataset_id should be in args[1] + potential_id = args[1] + # Validate it's a string-like UUID, not another object + try: + # Try to convert to string and check if it's a valid UUID format + str_id = str(potential_id) + # Basic check: UUIDs are 36 chars with hyphens + if len(str_id) == 36 and str_id.count('-') == 4: + dataset_id = str_id + except: + pass + elif len(args) > 0: + # Not a class method, check if args[0] looks like a UUID + potential_id = args[0] + try: + str_id = str(potential_id) + if len(str_id) == 36 and str_id.count('-') == 4: + dataset_id = str_id + except: + pass + + # Validate dataset if dataset_id is provided + if dataset_id: + dataset_id = str(dataset_id) + dataset = db.session.query(Dataset).where(Dataset.id == dataset_id).first() + if not dataset: + raise NotFound("Dataset not found.") + if not dataset.enable_api: + raise Forbidden("Dataset api access is not enabled.") api_token = validate_and_get_api_token("dataset") tenant_account_join = ( db.session.query(Tenant, TenantAccountJoin) diff --git a/api/migrations/versions/2025_09_12_1429-0b2ca375fabe_add_pipeline_info_18.py b/api/migrations/versions/2025_09_12_1429-0b2ca375fabe_add_pipeline_info_18.py new file mode 100644 index 0000000000..4d8be75b5a --- /dev/null +++ b/api/migrations/versions/2025_09_12_1429-0b2ca375fabe_add_pipeline_info_18.py @@ -0,0 +1,35 @@ +"""add_pipeline_info_18 + +Revision ID: 0b2ca375fabe +Revises: b45e25c2d166 +Create Date: 2025-09-12 14:29:38.078589 + +""" +from alembic import op +import models as models +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '0b2ca375fabe' +down_revision = 'b45e25c2d166' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + + with op.batch_alter_table('datasets', schema=None) as batch_op: + batch_op.add_column(sa.Column('enable_api', sa.Boolean(), server_default=sa.text('true'), nullable=False)) + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + + with op.batch_alter_table('datasets', schema=None) as batch_op: + batch_op.drop_column('enable_api') + + # ### end Alembic commands ### diff --git a/api/models/dataset.py b/api/models/dataset.py index 4674ef81e6..0cd53138cc 100644 --- a/api/models/dataset.py +++ b/api/models/dataset.py @@ -72,6 +72,7 @@ class Dataset(Base): runtime_mode = db.Column(db.String(255), nullable=True, server_default=db.text("'general'::character varying")) pipeline_id = db.Column(StringUUID, nullable=True) chunk_structure = db.Column(db.String(255), nullable=True) + enable_api = db.Column(db.Boolean, nullable=False, server_default=db.text("true")) @property def total_documents(self): diff --git a/api/services/dataset_service.py b/api/services/dataset_service.py index f0b800842c..f0157db0f9 100644 --- a/api/services/dataset_service.py +++ b/api/services/dataset_service.py @@ -916,6 +916,17 @@ class DatasetService: .all() ) + @staticmethod + def update_dataset_api_status(dataset_id: str, status: bool): + dataset = DatasetService.get_dataset(dataset_id) + if dataset is None: + raise NotFound("Dataset not found.") + dataset.enable_api = status + dataset.updated_by = current_user.id + dataset.updated_at = naive_utc_now() + db.session.commit() + + @staticmethod def get_dataset_auto_disable_logs(dataset_id: str): assert isinstance(current_user, Account)