diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile
index 3dd00ee4db..c03f281858 100644
--- a/.devcontainer/Dockerfile
+++ b/.devcontainer/Dockerfile
@@ -1,4 +1,4 @@
-FROM mcr.microsoft.com/devcontainers/python:3.12-bullseye
+FROM mcr.microsoft.com/devcontainers/python:3.12-bookworm
RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
&& apt-get -y install libgmp-dev libmpfr-dev libmpc-dev
diff --git a/.github/workflows/autofix.yml b/.github/workflows/autofix.yml
index ef69e08da9..0cae2ef552 100644
--- a/.github/workflows/autofix.yml
+++ b/.github/workflows/autofix.yml
@@ -30,6 +30,8 @@ jobs:
run: |
uvx --from ast-grep-cli sg --pattern 'db.session.query($WHATEVER).filter($HERE)' --rewrite 'db.session.query($WHATEVER).where($HERE)' -l py --update-all
uvx --from ast-grep-cli sg --pattern 'session.query($WHATEVER).filter($HERE)' --rewrite 'session.query($WHATEVER).where($HERE)' -l py --update-all
+ uvx --from ast-grep-cli sg -p '$A = db.Column($$$B)' -r '$A = mapped_column($$$B)' -l py --update-all
+ uvx --from ast-grep-cli sg -p '$A : $T = db.Column($$$B)' -r '$A : $T = mapped_column($$$B)' -l py --update-all
# Convert Optional[T] to T | None (ignoring quoted types)
cat > /tmp/optional-rule.yml << 'EOF'
id: convert-optional-to-union
diff --git a/.github/workflows/deploy-dev.yml b/.github/workflows/deploy-dev.yml
index de732c3134..cd1c86e668 100644
--- a/.github/workflows/deploy-dev.yml
+++ b/.github/workflows/deploy-dev.yml
@@ -18,7 +18,7 @@ jobs:
- name: Deploy to server
uses: appleboy/ssh-action@v0.1.8
with:
- host: ${{ secrets.RAG_SSH_HOST }}
+ host: ${{ secrets.SSH_HOST }}
username: ${{ secrets.SSH_USER }}
key: ${{ secrets.SSH_PRIVATE_KEY }}
script: |
diff --git a/Makefile b/Makefile
index ea560c7157..19c398ec82 100644
--- a/Makefile
+++ b/Makefile
@@ -26,7 +26,6 @@ prepare-web:
@echo "🌐 Setting up web environment..."
@cp -n web/.env.example web/.env 2>/dev/null || echo "Web .env already exists"
@cd web && pnpm install
- @cd web && pnpm build
@echo "✅ Web environment prepared (not started)"
# Step 3: Prepare API environment
diff --git a/README.md b/README.md
index 8159057f55..aadced582d 100644
--- a/README.md
+++ b/README.md
@@ -40,18 +40,18 @@
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
Dify is an open-source platform for developing LLM applications. Its intuitive interface combines agentic AI workflows, RAG pipelines, agent capabilities, model management, observability features, and more—allowing you to quickly move from prototype to production.
diff --git a/api/.env.example b/api/.env.example
index d53de3779b..a462bfdbec 100644
--- a/api/.env.example
+++ b/api/.env.example
@@ -427,8 +427,8 @@ CODE_EXECUTION_POOL_MAX_KEEPALIVE_CONNECTIONS=20
CODE_EXECUTION_POOL_KEEPALIVE_EXPIRY=5.0
CODE_MAX_NUMBER=9223372036854775807
CODE_MIN_NUMBER=-9223372036854775808
-CODE_MAX_STRING_LENGTH=80000
-TEMPLATE_TRANSFORM_MAX_LENGTH=80000
+CODE_MAX_STRING_LENGTH=400000
+TEMPLATE_TRANSFORM_MAX_LENGTH=400000
CODE_MAX_STRING_ARRAY_LENGTH=30
CODE_MAX_OBJECT_ARRAY_LENGTH=30
CODE_MAX_NUMBER_ARRAY_LENGTH=1000
diff --git a/api/.ruff.toml b/api/.ruff.toml
index 643bc063a1..5a29e1d8fa 100644
--- a/api/.ruff.toml
+++ b/api/.ruff.toml
@@ -81,7 +81,6 @@ ignore = [
"SIM113", # enumerate-for-loop
"SIM117", # multiple-with-statements
"SIM210", # if-expr-with-true-false
- "UP038", # deprecated and not recommended by Ruff, https://docs.astral.sh/ruff/rules/non-pep604-isinstance/
]
[lint.per-file-ignores]
diff --git a/api/README.md b/api/README.md
index 5ecf92a4f0..e75ea3d354 100644
--- a/api/README.md
+++ b/api/README.md
@@ -80,10 +80,10 @@
1. If you need to handle and debug the async tasks (e.g. dataset importing and documents indexing), please start the worker service.
```bash
-uv run celery -A app.celery worker -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation
+uv run celery -A app.celery worker -P gevent -c 2 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation
```
-Addition, if you want to debug the celery scheduled tasks, you can use the following command in another terminal:
+Additionally, if you want to debug the celery scheduled tasks, you can run the following command in another terminal to start the beat service:
```bash
uv run celery -A app.celery beat
diff --git a/api/configs/feature/__init__.py b/api/configs/feature/__init__.py
index 363cf4e2b5..5b871f69f9 100644
--- a/api/configs/feature/__init__.py
+++ b/api/configs/feature/__init__.py
@@ -150,7 +150,7 @@ class CodeExecutionSandboxConfig(BaseSettings):
CODE_MAX_STRING_LENGTH: PositiveInt = Field(
description="Maximum allowed length for strings in code execution",
- default=80000,
+ default=400_000,
)
CODE_MAX_STRING_ARRAY_LENGTH: PositiveInt = Field(
@@ -362,11 +362,11 @@ class HttpConfig(BaseSettings):
)
HTTP_REQUEST_MAX_READ_TIMEOUT: int = Field(
- ge=1, description="Maximum read timeout in seconds for HTTP requests", default=60
+ ge=1, description="Maximum read timeout in seconds for HTTP requests", default=600
)
HTTP_REQUEST_MAX_WRITE_TIMEOUT: int = Field(
- ge=1, description="Maximum write timeout in seconds for HTTP requests", default=20
+ ge=1, description="Maximum write timeout in seconds for HTTP requests", default=600
)
HTTP_REQUEST_NODE_MAX_BINARY_SIZE: PositiveInt = Field(
@@ -582,6 +582,11 @@ class WorkflowConfig(BaseSettings):
default=200 * 1024,
)
+ TEMPLATE_TRANSFORM_MAX_LENGTH: PositiveInt = Field(
+ description="Maximum number of characters allowed in Template Transform node output",
+ default=400_000,
+ )
+
# GraphEngine Worker Pool Configuration
GRAPH_ENGINE_MIN_WORKERS: PositiveInt = Field(
description="Minimum number of workers per GraphEngine instance",
@@ -766,7 +771,7 @@ class MailConfig(BaseSettings):
MAIL_TEMPLATING_TIMEOUT: int = Field(
description="""
- Timeout for email templating in seconds. Used to prevent infinite loops in malicious templates.
+ Timeout for email templating in seconds. Used to prevent infinite loops in malicious templates.
Only available in sandbox mode.""",
default=3,
)
diff --git a/api/constants/__init__.py b/api/constants/__init__.py
index fe8f4f8785..9141fbea95 100644
--- a/api/constants/__init__.py
+++ b/api/constants/__init__.py
@@ -1,4 +1,5 @@
from configs import dify_config
+from libs.collection_utils import convert_to_lower_and_upper_set
HIDDEN_VALUE = "[__HIDDEN__]"
UNKNOWN_VALUE = "[__UNKNOWN__]"
@@ -6,24 +7,39 @@ UUID_NIL = "00000000-0000-0000-0000-000000000000"
DEFAULT_FILE_NUMBER_LIMITS = 3
-IMAGE_EXTENSIONS = ["jpg", "jpeg", "png", "webp", "gif", "svg"]
-IMAGE_EXTENSIONS.extend([ext.upper() for ext in IMAGE_EXTENSIONS])
+IMAGE_EXTENSIONS = convert_to_lower_and_upper_set({"jpg", "jpeg", "png", "webp", "gif", "svg"})
-VIDEO_EXTENSIONS = ["mp4", "mov", "mpeg", "webm"]
-VIDEO_EXTENSIONS.extend([ext.upper() for ext in VIDEO_EXTENSIONS])
+VIDEO_EXTENSIONS = convert_to_lower_and_upper_set({"mp4", "mov", "mpeg", "webm"})
-AUDIO_EXTENSIONS = ["mp3", "m4a", "wav", "amr", "mpga"]
-AUDIO_EXTENSIONS.extend([ext.upper() for ext in AUDIO_EXTENSIONS])
+AUDIO_EXTENSIONS = convert_to_lower_and_upper_set({"mp3", "m4a", "wav", "amr", "mpga"})
-
-_doc_extensions: list[str]
+_doc_extensions: set[str]
if dify_config.ETL_TYPE == "Unstructured":
- _doc_extensions = ["txt", "markdown", "md", "mdx", "pdf", "html", "htm", "xlsx", "xls", "vtt", "properties"]
- _doc_extensions.extend(("doc", "docx", "csv", "eml", "msg", "pptx", "xml", "epub"))
+ _doc_extensions = {
+ "txt",
+ "markdown",
+ "md",
+ "mdx",
+ "pdf",
+ "html",
+ "htm",
+ "xlsx",
+ "xls",
+ "vtt",
+ "properties",
+ "doc",
+ "docx",
+ "csv",
+ "eml",
+ "msg",
+ "pptx",
+ "xml",
+ "epub",
+ }
if dify_config.UNSTRUCTURED_API_URL:
- _doc_extensions.append("ppt")
+ _doc_extensions.add("ppt")
else:
- _doc_extensions = [
+ _doc_extensions = {
"txt",
"markdown",
"md",
@@ -37,5 +53,5 @@ else:
"csv",
"vtt",
"properties",
- ]
-DOCUMENT_EXTENSIONS = _doc_extensions + [ext.upper() for ext in _doc_extensions]
+ }
+DOCUMENT_EXTENSIONS: set[str] = convert_to_lower_and_upper_set(_doc_extensions)
diff --git a/api/controllers/console/__init__.py b/api/controllers/console/__init__.py
index ee02ff3937..621f5066e4 100644
--- a/api/controllers/console/__init__.py
+++ b/api/controllers/console/__init__.py
@@ -1,31 +1,10 @@
+from importlib import import_module
+
from flask import Blueprint
from flask_restx import Namespace
from libs.external_api import ExternalApi
-from .app.app_import import AppImportApi, AppImportCheckDependenciesApi, AppImportConfirmApi
-from .explore.audio import ChatAudioApi, ChatTextApi
-from .explore.completion import ChatApi, ChatStopApi, CompletionApi, CompletionStopApi
-from .explore.conversation import (
- ConversationApi,
- ConversationListApi,
- ConversationPinApi,
- ConversationRenameApi,
- ConversationUnPinApi,
-)
-from .explore.message import (
- MessageFeedbackApi,
- MessageListApi,
- MessageMoreLikeThisApi,
- MessageSuggestedQuestionApi,
-)
-from .explore.workflow import (
- InstalledAppWorkflowRunApi,
- InstalledAppWorkflowTaskStopApi,
-)
-from .files import FileApi, FilePreviewApi, FileSupportTypeApi
-from .remote_files import RemoteFileInfoApi, RemoteFileUploadApi
-
bp = Blueprint("console", __name__, url_prefix="/console/api")
api = ExternalApi(
@@ -35,23 +14,23 @@ api = ExternalApi(
description="Console management APIs for app configuration, monitoring, and administration",
)
-# Create namespace
console_ns = Namespace("console", description="Console management API operations", path="/")
-# File
-api.add_resource(FileApi, "/files/upload")
-api.add_resource(FilePreviewApi, "/files//preview")
-api.add_resource(FileSupportTypeApi, "/files/support-type")
+RESOURCE_MODULES = (
+ "controllers.console.app.app_import",
+ "controllers.console.explore.audio",
+ "controllers.console.explore.completion",
+ "controllers.console.explore.conversation",
+ "controllers.console.explore.message",
+ "controllers.console.explore.workflow",
+ "controllers.console.files",
+ "controllers.console.remote_files",
+)
-# Remote files
-api.add_resource(RemoteFileInfoApi, "/remote-files/")
-api.add_resource(RemoteFileUploadApi, "/remote-files/upload")
-
-# Import App
-api.add_resource(AppImportApi, "/apps/imports")
-api.add_resource(AppImportConfirmApi, "/apps/imports//confirm")
-api.add_resource(AppImportCheckDependenciesApi, "/apps/imports//check-dependencies")
+for module_name in RESOURCE_MODULES:
+ import_module(module_name)
+# Ensure resource modules are imported so route decorators are evaluated.
# Import other controllers
from . import (
admin,
@@ -150,77 +129,6 @@ from .workspace import (
workspace,
)
-# Explore Audio
-api.add_resource(ChatAudioApi, "/installed-apps//audio-to-text", endpoint="installed_app_audio")
-api.add_resource(ChatTextApi, "/installed-apps//text-to-audio", endpoint="installed_app_text")
-
-# Explore Completion
-api.add_resource(
- CompletionApi, "/installed-apps//completion-messages", endpoint="installed_app_completion"
-)
-api.add_resource(
- CompletionStopApi,
- "/installed-apps//completion-messages//stop",
- endpoint="installed_app_stop_completion",
-)
-api.add_resource(
- ChatApi, "/installed-apps//chat-messages", endpoint="installed_app_chat_completion"
-)
-api.add_resource(
- ChatStopApi,
- "/installed-apps//chat-messages//stop",
- endpoint="installed_app_stop_chat_completion",
-)
-
-# Explore Conversation
-api.add_resource(
- ConversationRenameApi,
- "/installed-apps//conversations//name",
- endpoint="installed_app_conversation_rename",
-)
-api.add_resource(
- ConversationListApi, "/installed-apps//conversations", endpoint="installed_app_conversations"
-)
-api.add_resource(
- ConversationApi,
- "/installed-apps//conversations/",
- endpoint="installed_app_conversation",
-)
-api.add_resource(
- ConversationPinApi,
- "/installed-apps//conversations//pin",
- endpoint="installed_app_conversation_pin",
-)
-api.add_resource(
- ConversationUnPinApi,
- "/installed-apps//conversations//unpin",
- endpoint="installed_app_conversation_unpin",
-)
-
-
-# Explore Message
-api.add_resource(MessageListApi, "/installed-apps//messages", endpoint="installed_app_messages")
-api.add_resource(
- MessageFeedbackApi,
- "/installed-apps//messages//feedbacks",
- endpoint="installed_app_message_feedback",
-)
-api.add_resource(
- MessageMoreLikeThisApi,
- "/installed-apps//messages//more-like-this",
- endpoint="installed_app_more_like_this",
-)
-api.add_resource(
- MessageSuggestedQuestionApi,
- "/installed-apps//messages//suggested-questions",
- endpoint="installed_app_suggested_question",
-)
-# Explore Workflow
-api.add_resource(InstalledAppWorkflowRunApi, "/installed-apps//workflows/run")
-api.add_resource(
- InstalledAppWorkflowTaskStopApi, "/installed-apps//workflows/tasks//stop"
-)
-
api.add_namespace(console_ns)
__all__ = [
diff --git a/api/controllers/console/app/app.py b/api/controllers/console/app/app.py
index 2d2e4b448a..23b8e2c5a2 100644
--- a/api/controllers/console/app/app.py
+++ b/api/controllers/console/app/app.py
@@ -19,6 +19,7 @@ from core.ops.ops_trace_manager import OpsTraceManager
from extensions.ext_database import db
from fields.app_fields import app_detail_fields, app_detail_fields_with_site, app_pagination_fields
from libs.login import login_required
+from libs.validators import validate_description_length
from models import Account, App
from services.app_dsl_service import AppDslService, ImportMode
from services.app_service import AppService
@@ -28,12 +29,6 @@ from services.feature_service import FeatureService
ALLOW_CREATE_APP_MODES = ["chat", "agent-chat", "advanced-chat", "workflow", "completion"]
-def _validate_description_length(description):
- if description and len(description) > 400:
- raise ValueError("Description cannot exceed 400 characters.")
- return description
-
-
@console_ns.route("/apps")
class AppListApi(Resource):
@api.doc("list_apps")
@@ -138,7 +133,7 @@ class AppListApi(Resource):
"""Create app"""
parser = reqparse.RequestParser()
parser.add_argument("name", type=str, required=True, location="json")
- parser.add_argument("description", type=_validate_description_length, location="json")
+ parser.add_argument("description", type=validate_description_length, location="json")
parser.add_argument("mode", type=str, choices=ALLOW_CREATE_APP_MODES, location="json")
parser.add_argument("icon_type", type=str, location="json")
parser.add_argument("icon", type=str, location="json")
@@ -219,7 +214,7 @@ class AppApi(Resource):
parser = reqparse.RequestParser()
parser.add_argument("name", type=str, required=True, nullable=False, location="json")
- parser.add_argument("description", type=_validate_description_length, location="json")
+ parser.add_argument("description", type=validate_description_length, location="json")
parser.add_argument("icon_type", type=str, location="json")
parser.add_argument("icon", type=str, location="json")
parser.add_argument("icon_background", type=str, location="json")
@@ -297,7 +292,7 @@ class AppCopyApi(Resource):
parser = reqparse.RequestParser()
parser.add_argument("name", type=str, location="json")
- parser.add_argument("description", type=_validate_description_length, location="json")
+ parser.add_argument("description", type=validate_description_length, location="json")
parser.add_argument("icon_type", type=str, location="json")
parser.add_argument("icon", type=str, location="json")
parser.add_argument("icon_background", type=str, location="json")
diff --git a/api/controllers/console/app/app_import.py b/api/controllers/console/app/app_import.py
index aee93a8814..c14f597c25 100644
--- a/api/controllers/console/app/app_import.py
+++ b/api/controllers/console/app/app_import.py
@@ -20,7 +20,10 @@ from services.app_dsl_service import AppDslService, ImportStatus
from services.enterprise.enterprise_service import EnterpriseService
from services.feature_service import FeatureService
+from .. import console_ns
+
+@console_ns.route("/apps/imports")
class AppImportApi(Resource):
@setup_required
@login_required
@@ -74,6 +77,7 @@ class AppImportApi(Resource):
return result.model_dump(mode="json"), 200
+@console_ns.route("/apps/imports//confirm")
class AppImportConfirmApi(Resource):
@setup_required
@login_required
@@ -98,6 +102,7 @@ class AppImportConfirmApi(Resource):
return result.model_dump(mode="json"), 200
+@console_ns.route("/apps/imports//check-dependencies")
class AppImportCheckDependenciesApi(Resource):
@setup_required
@login_required
diff --git a/api/controllers/console/app/model_config.py b/api/controllers/console/app/model_config.py
index 11df511840..e71b774d3e 100644
--- a/api/controllers/console/app/model_config.py
+++ b/api/controllers/console/app/model_config.py
@@ -90,7 +90,7 @@ class ModelConfigResource(Resource):
if not isinstance(tool, dict) or len(tool.keys()) <= 3:
continue
- agent_tool_entity = AgentToolEntity(**tool)
+ agent_tool_entity = AgentToolEntity.model_validate(tool)
# get tool
try:
tool_runtime = ToolManager.get_agent_tool_runtime(
@@ -124,7 +124,7 @@ class ModelConfigResource(Resource):
# encrypt agent tool parameters if it's secret-input
agent_mode = new_app_model_config.agent_mode_dict
for tool in agent_mode.get("tools") or []:
- agent_tool_entity = AgentToolEntity(**tool)
+ agent_tool_entity = AgentToolEntity.model_validate(tool)
# get tool
key = f"{agent_tool_entity.provider_id}.{agent_tool_entity.provider_type}.{agent_tool_entity.tool_name}"
diff --git a/api/controllers/console/auth/data_source_bearer_auth.py b/api/controllers/console/auth/data_source_bearer_auth.py
index 796e6916cc..207303b212 100644
--- a/api/controllers/console/auth/data_source_bearer_auth.py
+++ b/api/controllers/console/auth/data_source_bearer_auth.py
@@ -2,7 +2,7 @@ from flask_login import current_user
from flask_restx import Resource, reqparse
from werkzeug.exceptions import Forbidden
-from controllers.console import api
+from controllers.console import console_ns
from controllers.console.auth.error import ApiKeyAuthFailedError
from libs.login import login_required
from services.auth.api_key_auth_service import ApiKeyAuthService
@@ -10,6 +10,7 @@ from services.auth.api_key_auth_service import ApiKeyAuthService
from ..wraps import account_initialization_required, setup_required
+@console_ns.route("/api-key-auth/data-source")
class ApiKeyAuthDataSource(Resource):
@setup_required
@login_required
@@ -33,6 +34,7 @@ class ApiKeyAuthDataSource(Resource):
return {"sources": []}
+@console_ns.route("/api-key-auth/data-source/binding")
class ApiKeyAuthDataSourceBinding(Resource):
@setup_required
@login_required
@@ -54,6 +56,7 @@ class ApiKeyAuthDataSourceBinding(Resource):
return {"result": "success"}, 200
+@console_ns.route("/api-key-auth/data-source/")
class ApiKeyAuthDataSourceBindingDelete(Resource):
@setup_required
@login_required
@@ -66,8 +69,3 @@ class ApiKeyAuthDataSourceBindingDelete(Resource):
ApiKeyAuthService.delete_provider_auth(current_user.current_tenant_id, binding_id)
return {"result": "success"}, 204
-
-
-api.add_resource(ApiKeyAuthDataSource, "/api-key-auth/data-source")
-api.add_resource(ApiKeyAuthDataSourceBinding, "/api-key-auth/data-source/binding")
-api.add_resource(ApiKeyAuthDataSourceBindingDelete, "/api-key-auth/data-source/")
diff --git a/api/controllers/console/auth/email_register.py b/api/controllers/console/auth/email_register.py
index 91de19a78a..d3613d9183 100644
--- a/api/controllers/console/auth/email_register.py
+++ b/api/controllers/console/auth/email_register.py
@@ -5,7 +5,7 @@ from sqlalchemy.orm import Session
from configs import dify_config
from constants.languages import languages
-from controllers.console import api
+from controllers.console import console_ns
from controllers.console.auth.error import (
EmailAlreadyInUseError,
EmailCodeError,
@@ -25,6 +25,7 @@ from services.billing_service import BillingService
from services.errors.account import AccountNotFoundError, AccountRegisterError
+@console_ns.route("/email-register/send-email")
class EmailRegisterSendEmailApi(Resource):
@setup_required
@email_password_login_enabled
@@ -52,6 +53,7 @@ class EmailRegisterSendEmailApi(Resource):
return {"result": "success", "data": token}
+@console_ns.route("/email-register/validity")
class EmailRegisterCheckApi(Resource):
@setup_required
@email_password_login_enabled
@@ -92,6 +94,7 @@ class EmailRegisterCheckApi(Resource):
return {"is_valid": True, "email": token_data.get("email"), "token": new_token}
+@console_ns.route("/email-register")
class EmailRegisterResetApi(Resource):
@setup_required
@email_password_login_enabled
@@ -148,8 +151,3 @@ class EmailRegisterResetApi(Resource):
raise AccountInFreezeError()
return account
-
-
-api.add_resource(EmailRegisterSendEmailApi, "/email-register/send-email")
-api.add_resource(EmailRegisterCheckApi, "/email-register/validity")
-api.add_resource(EmailRegisterResetApi, "/email-register")
diff --git a/api/controllers/console/auth/forgot_password.py b/api/controllers/console/auth/forgot_password.py
index 36ccb1d562..704bcf8fb8 100644
--- a/api/controllers/console/auth/forgot_password.py
+++ b/api/controllers/console/auth/forgot_password.py
@@ -221,8 +221,3 @@ class ForgotPasswordResetApi(Resource):
TenantService.create_tenant_member(tenant, account, role="owner")
account.current_tenant = tenant
tenant_was_created.send(tenant)
-
-
-api.add_resource(ForgotPasswordSendEmailApi, "/forgot-password")
-api.add_resource(ForgotPasswordCheckApi, "/forgot-password/validity")
-api.add_resource(ForgotPasswordResetApi, "/forgot-password/resets")
diff --git a/api/controllers/console/auth/login.py b/api/controllers/console/auth/login.py
index 3b35ab3c23..ba614aa828 100644
--- a/api/controllers/console/auth/login.py
+++ b/api/controllers/console/auth/login.py
@@ -7,7 +7,7 @@ from flask_restx import Resource, reqparse
import services
from configs import dify_config
from constants.languages import languages
-from controllers.console import api
+from controllers.console import console_ns
from controllers.console.auth.error import (
AuthenticationFailedError,
EmailCodeError,
@@ -34,6 +34,7 @@ from services.errors.workspace import WorkSpaceNotAllowedCreateError, Workspaces
from services.feature_service import FeatureService
+@console_ns.route("/login")
class LoginApi(Resource):
"""Resource for user login."""
@@ -91,6 +92,7 @@ class LoginApi(Resource):
return {"result": "success", "data": token_pair.model_dump()}
+@console_ns.route("/logout")
class LogoutApi(Resource):
@setup_required
def get(self):
@@ -102,6 +104,7 @@ class LogoutApi(Resource):
return {"result": "success"}
+@console_ns.route("/reset-password")
class ResetPasswordSendEmailApi(Resource):
@setup_required
@email_password_login_enabled
@@ -130,6 +133,7 @@ class ResetPasswordSendEmailApi(Resource):
return {"result": "success", "data": token}
+@console_ns.route("/email-code-login")
class EmailCodeLoginSendEmailApi(Resource):
@setup_required
def post(self):
@@ -162,6 +166,7 @@ class EmailCodeLoginSendEmailApi(Resource):
return {"result": "success", "data": token}
+@console_ns.route("/email-code-login/validity")
class EmailCodeLoginApi(Resource):
@setup_required
def post(self):
@@ -218,6 +223,7 @@ class EmailCodeLoginApi(Resource):
return {"result": "success", "data": token_pair.model_dump()}
+@console_ns.route("/refresh-token")
class RefreshTokenApi(Resource):
def post(self):
parser = reqparse.RequestParser()
@@ -229,11 +235,3 @@ class RefreshTokenApi(Resource):
return {"result": "success", "data": new_token_pair.model_dump()}
except Exception as e:
return {"result": "fail", "data": str(e)}, 401
-
-
-api.add_resource(LoginApi, "/login")
-api.add_resource(LogoutApi, "/logout")
-api.add_resource(EmailCodeLoginSendEmailApi, "/email-code-login")
-api.add_resource(EmailCodeLoginApi, "/email-code-login/validity")
-api.add_resource(ResetPasswordSendEmailApi, "/reset-password")
-api.add_resource(RefreshTokenApi, "/refresh-token")
diff --git a/api/controllers/console/auth/oauth_server.py b/api/controllers/console/auth/oauth_server.py
index a54c1443f8..46281860ae 100644
--- a/api/controllers/console/auth/oauth_server.py
+++ b/api/controllers/console/auth/oauth_server.py
@@ -14,7 +14,7 @@ from models.account import Account
from models.model import OAuthProviderApp
from services.oauth_server import OAUTH_ACCESS_TOKEN_EXPIRES_IN, OAuthGrantType, OAuthServerService
-from .. import api
+from .. import console_ns
P = ParamSpec("P")
R = TypeVar("R")
@@ -86,6 +86,7 @@ def oauth_server_access_token_required(view: Callable[Concatenate[T, OAuthProvid
return decorated
+@console_ns.route("/oauth/provider")
class OAuthServerAppApi(Resource):
@setup_required
@oauth_server_client_id_required
@@ -108,6 +109,7 @@ class OAuthServerAppApi(Resource):
)
+@console_ns.route("/oauth/provider/authorize")
class OAuthServerUserAuthorizeApi(Resource):
@setup_required
@login_required
@@ -125,6 +127,7 @@ class OAuthServerUserAuthorizeApi(Resource):
)
+@console_ns.route("/oauth/provider/token")
class OAuthServerUserTokenApi(Resource):
@setup_required
@oauth_server_client_id_required
@@ -180,6 +183,7 @@ class OAuthServerUserTokenApi(Resource):
)
+@console_ns.route("/oauth/provider/account")
class OAuthServerUserAccountApi(Resource):
@setup_required
@oauth_server_client_id_required
@@ -194,9 +198,3 @@ class OAuthServerUserAccountApi(Resource):
"timezone": account.timezone,
}
)
-
-
-api.add_resource(OAuthServerAppApi, "/oauth/provider")
-api.add_resource(OAuthServerUserAuthorizeApi, "/oauth/provider/authorize")
-api.add_resource(OAuthServerUserTokenApi, "/oauth/provider/token")
-api.add_resource(OAuthServerUserAccountApi, "/oauth/provider/account")
diff --git a/api/controllers/console/billing/billing.py b/api/controllers/console/billing/billing.py
index 39fc7dec6b..fa89f45122 100644
--- a/api/controllers/console/billing/billing.py
+++ b/api/controllers/console/billing/billing.py
@@ -1,12 +1,13 @@
from flask_restx import Resource, reqparse
-from controllers.console import api
+from controllers.console import console_ns
from controllers.console.wraps import account_initialization_required, only_edition_cloud, setup_required
from libs.login import current_user, login_required
from models.model import Account
from services.billing_service import BillingService
+@console_ns.route("/billing/subscription")
class Subscription(Resource):
@setup_required
@login_required
@@ -26,6 +27,7 @@ class Subscription(Resource):
)
+@console_ns.route("/billing/invoices")
class Invoices(Resource):
@setup_required
@login_required
@@ -36,7 +38,3 @@ class Invoices(Resource):
BillingService.is_tenant_owner_or_admin(current_user)
assert current_user.current_tenant_id is not None
return BillingService.get_invoices(current_user.email, current_user.current_tenant_id)
-
-
-api.add_resource(Subscription, "/billing/subscription")
-api.add_resource(Invoices, "/billing/invoices")
diff --git a/api/controllers/console/billing/compliance.py b/api/controllers/console/billing/compliance.py
index 4bc073f679..e489b48c82 100644
--- a/api/controllers/console/billing/compliance.py
+++ b/api/controllers/console/billing/compliance.py
@@ -6,10 +6,11 @@ from libs.helper import extract_remote_ip
from libs.login import login_required
from services.billing_service import BillingService
-from .. import api
+from .. import console_ns
from ..wraps import account_initialization_required, only_edition_cloud, setup_required
+@console_ns.route("/compliance/download")
class ComplianceApi(Resource):
@setup_required
@login_required
@@ -30,6 +31,3 @@ class ComplianceApi(Resource):
ip=ip_address,
device_info=device_info,
)
-
-
-api.add_resource(ComplianceApi, "/compliance/download")
diff --git a/api/controllers/console/datasets/data_source.py b/api/controllers/console/datasets/data_source.py
index 3a9530af84..b0f18c11d4 100644
--- a/api/controllers/console/datasets/data_source.py
+++ b/api/controllers/console/datasets/data_source.py
@@ -9,13 +9,13 @@ from sqlalchemy import select
from sqlalchemy.orm import Session
from werkzeug.exceptions import NotFound
-from controllers.console import api
+from controllers.console import console_ns
from controllers.console.wraps import account_initialization_required, setup_required
from core.datasource.entities.datasource_entities import DatasourceProviderType, OnlineDocumentPagesMessage
from core.datasource.online_document.online_document_plugin import OnlineDocumentDatasourcePlugin
from core.indexing_runner import IndexingRunner
from core.rag.extractor.entity.datasource_type import DatasourceType
-from core.rag.extractor.entity.extract_setting import ExtractSetting
+from core.rag.extractor.entity.extract_setting import ExtractSetting, NotionInfo
from core.rag.extractor.notion_extractor import NotionExtractor
from extensions.ext_database import db
from fields.data_source_fields import integrate_list_fields, integrate_notion_info_list_fields
@@ -27,6 +27,10 @@ from services.datasource_provider_service import DatasourceProviderService
from tasks.document_indexing_sync_task import document_indexing_sync_task
+@console_ns.route(
+ "/data-source/integrates",
+ "/data-source/integrates//",
+)
class DataSourceApi(Resource):
@setup_required
@login_required
@@ -109,6 +113,7 @@ class DataSourceApi(Resource):
return {"result": "success"}, 200
+@console_ns.route("/notion/pre-import/pages")
class DataSourceNotionListApi(Resource):
@setup_required
@login_required
@@ -196,6 +201,10 @@ class DataSourceNotionListApi(Resource):
return {"notion_info": {**workspace_info, "pages": pages}}, 200
+@console_ns.route(
+ "/notion/workspaces//pages///preview",
+ "/datasets/notion-indexing-estimate",
+)
class DataSourceNotionApi(Resource):
@setup_required
@login_required
@@ -248,13 +257,15 @@ class DataSourceNotionApi(Resource):
for page in notion_info["pages"]:
extract_setting = ExtractSetting(
datasource_type=DatasourceType.NOTION.value,
- notion_info={
- "credential_id": credential_id,
- "notion_workspace_id": workspace_id,
- "notion_obj_id": page["page_id"],
- "notion_page_type": page["type"],
- "tenant_id": current_user.current_tenant_id,
- },
+ notion_info=NotionInfo.model_validate(
+ {
+ "credential_id": credential_id,
+ "notion_workspace_id": workspace_id,
+ "notion_obj_id": page["page_id"],
+ "notion_page_type": page["type"],
+ "tenant_id": current_user.current_tenant_id,
+ }
+ ),
document_model=args["doc_form"],
)
extract_settings.append(extract_setting)
@@ -269,6 +280,7 @@ class DataSourceNotionApi(Resource):
return response.model_dump(), 200
+@console_ns.route("/datasets//notion/sync")
class DataSourceNotionDatasetSyncApi(Resource):
@setup_required
@login_required
@@ -285,6 +297,7 @@ class DataSourceNotionDatasetSyncApi(Resource):
return {"result": "success"}, 200
+@console_ns.route("/datasets//documents//notion/sync")
class DataSourceNotionDocumentSyncApi(Resource):
@setup_required
@login_required
@@ -301,16 +314,3 @@ class DataSourceNotionDocumentSyncApi(Resource):
raise NotFound("Document not found.")
document_indexing_sync_task.delay(dataset_id_str, document_id_str)
return {"result": "success"}, 200
-
-
-api.add_resource(DataSourceApi, "/data-source/integrates", "/data-source/integrates//")
-api.add_resource(DataSourceNotionListApi, "/notion/pre-import/pages")
-api.add_resource(
- DataSourceNotionApi,
- "/notion/workspaces//pages///preview",
- "/datasets/notion-indexing-estimate",
-)
-api.add_resource(DataSourceNotionDatasetSyncApi, "/datasets//notion/sync")
-api.add_resource(
- DataSourceNotionDocumentSyncApi, "/datasets//documents//notion/sync"
-)
diff --git a/api/controllers/console/datasets/datasets.py b/api/controllers/console/datasets/datasets.py
index 2affbd6a42..284f88ff1e 100644
--- a/api/controllers/console/datasets/datasets.py
+++ b/api/controllers/console/datasets/datasets.py
@@ -1,4 +1,5 @@
-import flask_restx
+from typing import Any, cast
+
from flask import request
from flask_login import current_user
from flask_restx import Resource, fields, marshal, marshal_with, reqparse
@@ -23,31 +24,27 @@ from core.model_runtime.entities.model_entities import ModelType
from core.provider_manager import ProviderManager
from core.rag.datasource.vdb.vector_type import VectorType
from core.rag.extractor.entity.datasource_type import DatasourceType
-from core.rag.extractor.entity.extract_setting import ExtractSetting
+from core.rag.extractor.entity.extract_setting import ExtractSetting, NotionInfo, WebsiteInfo
from core.rag.retrieval.retrieval_methods import RetrievalMethod
from extensions.ext_database import db
from fields.app_fields import related_app_list
from fields.dataset_fields import dataset_detail_fields, dataset_query_detail_fields
from fields.document_fields import document_status_fields
from libs.login import login_required
+from libs.validators import validate_description_length
from models import ApiToken, Dataset, Document, DocumentSegment, UploadFile
+from models.account import Account
from models.dataset import DatasetPermissionEnum
from models.provider_ids import ModelProviderID
from services.dataset_service import DatasetPermissionService, DatasetService, DocumentService
-def _validate_name(name):
+def _validate_name(name: str) -> str:
if not name or len(name) < 1 or len(name) > 40:
raise ValueError("Name must be between 1 to 40 characters.")
return name
-def _validate_description_length(description):
- if description and len(description) > 400:
- raise ValueError("Description cannot exceed 400 characters.")
- return description
-
-
@console_ns.route("/datasets")
class DatasetListApi(Resource):
@api.doc("get_datasets")
@@ -92,7 +89,7 @@ class DatasetListApi(Resource):
for embedding_model in embedding_models:
model_names.append(f"{embedding_model.model}:{embedding_model.provider.provider}")
- data = marshal(datasets, dataset_detail_fields)
+ data = cast(list[dict[str, Any]], marshal(datasets, dataset_detail_fields))
for item in data:
# convert embedding_model_provider to plugin standard format
if item["indexing_technique"] == "high_quality" and item["embedding_model_provider"]:
@@ -147,7 +144,7 @@ class DatasetListApi(Resource):
)
parser.add_argument(
"description",
- type=_validate_description_length,
+ type=validate_description_length,
nullable=True,
required=False,
default="",
@@ -192,7 +189,7 @@ class DatasetListApi(Resource):
name=args["name"],
description=args["description"],
indexing_technique=args["indexing_technique"],
- account=current_user,
+ account=cast(Account, current_user),
permission=DatasetPermissionEnum.ONLY_ME,
provider=args["provider"],
external_knowledge_api_id=args["external_knowledge_api_id"],
@@ -224,7 +221,7 @@ class DatasetApi(Resource):
DatasetService.check_dataset_permission(dataset, current_user)
except services.errors.account.NoPermissionError as e:
raise Forbidden(str(e))
- data = marshal(dataset, dataset_detail_fields)
+ data = cast(dict[str, Any], marshal(dataset, dataset_detail_fields))
if dataset.indexing_technique == "high_quality":
if dataset.embedding_model_provider:
provider_id = ModelProviderID(dataset.embedding_model_provider)
@@ -288,7 +285,7 @@ class DatasetApi(Resource):
help="type is required. Name must be between 1 to 40 characters.",
type=_validate_name,
)
- parser.add_argument("description", location="json", store_missing=False, type=_validate_description_length)
+ parser.add_argument("description", location="json", store_missing=False, type=validate_description_length)
parser.add_argument(
"indexing_technique",
type=str,
@@ -369,7 +366,7 @@ class DatasetApi(Resource):
if dataset is None:
raise NotFound("Dataset not found.")
- result_data = marshal(dataset, dataset_detail_fields)
+ result_data = cast(dict[str, Any], marshal(dataset, dataset_detail_fields))
tenant_id = current_user.current_tenant_id
if data.get("partial_member_list") and data.get("permission") == "partial_members":
@@ -516,13 +513,15 @@ class DatasetIndexingEstimateApi(Resource):
for page in notion_info["pages"]:
extract_setting = ExtractSetting(
datasource_type=DatasourceType.NOTION.value,
- notion_info={
- "credential_id": credential_id,
- "notion_workspace_id": workspace_id,
- "notion_obj_id": page["page_id"],
- "notion_page_type": page["type"],
- "tenant_id": current_user.current_tenant_id,
- },
+ notion_info=NotionInfo.model_validate(
+ {
+ "credential_id": credential_id,
+ "notion_workspace_id": workspace_id,
+ "notion_obj_id": page["page_id"],
+ "notion_page_type": page["type"],
+ "tenant_id": current_user.current_tenant_id,
+ }
+ ),
document_model=args["doc_form"],
)
extract_settings.append(extract_setting)
@@ -531,14 +530,16 @@ class DatasetIndexingEstimateApi(Resource):
for url in website_info_list["urls"]:
extract_setting = ExtractSetting(
datasource_type=DatasourceType.WEBSITE.value,
- website_info={
- "provider": website_info_list["provider"],
- "job_id": website_info_list["job_id"],
- "url": url,
- "tenant_id": current_user.current_tenant_id,
- "mode": "crawl",
- "only_main_content": website_info_list["only_main_content"],
- },
+ website_info=WebsiteInfo.model_validate(
+ {
+ "provider": website_info_list["provider"],
+ "job_id": website_info_list["job_id"],
+ "url": url,
+ "tenant_id": current_user.current_tenant_id,
+ "mode": "crawl",
+ "only_main_content": website_info_list["only_main_content"],
+ }
+ ),
document_model=args["doc_form"],
)
extract_settings.append(extract_setting)
@@ -688,7 +689,7 @@ class DatasetApiKeyApi(Resource):
)
if current_key_count >= self.max_keys:
- flask_restx.abort(
+ api.abort(
400,
message=f"Cannot create more than {self.max_keys} API keys for this resource type.",
code="max_keys_exceeded",
@@ -733,7 +734,7 @@ class DatasetApiDeleteApi(Resource):
)
if key is None:
- flask_restx.abort(404, message="API key not found")
+ api.abort(404, message="API key not found")
db.session.query(ApiToken).where(ApiToken.id == api_key_id).delete()
db.session.commit()
diff --git a/api/controllers/console/datasets/datasets_document.py b/api/controllers/console/datasets/datasets_document.py
index e6f5daa87b..a90730e997 100644
--- a/api/controllers/console/datasets/datasets_document.py
+++ b/api/controllers/console/datasets/datasets_document.py
@@ -44,7 +44,7 @@ from core.model_runtime.entities.model_entities import ModelType
from core.model_runtime.errors.invoke import InvokeAuthorizationError
from core.plugin.impl.exc import PluginDaemonClientSideError
from core.rag.extractor.entity.datasource_type import DatasourceType
-from core.rag.extractor.entity.extract_setting import ExtractSetting
+from core.rag.extractor.entity.extract_setting import ExtractSetting, NotionInfo, WebsiteInfo
from extensions.ext_database import db
from fields.document_fields import (
dataset_and_document_fields,
@@ -55,6 +55,7 @@ from fields.document_fields import (
from libs.datetime_utils import naive_utc_now
from libs.login import login_required
from models import Dataset, DatasetProcessRule, Document, DocumentSegment, UploadFile
+from models.account import Account
from models.dataset import DocumentPipelineExecutionLog
from services.dataset_service import DatasetService, DocumentService
from services.entities.knowledge_entities.knowledge_entities import KnowledgeConfig
@@ -304,7 +305,7 @@ class DatasetDocumentListApi(Resource):
"doc_language", type=str, default="English", required=False, nullable=False, location="json"
)
args = parser.parse_args()
- knowledge_config = KnowledgeConfig(**args)
+ knowledge_config = KnowledgeConfig.model_validate(args)
if not dataset.indexing_technique and not knowledge_config.indexing_technique:
raise ValueError("indexing_technique is required.")
@@ -394,7 +395,7 @@ class DatasetInitApi(Resource):
parser.add_argument("embedding_model_provider", type=str, required=False, nullable=True, location="json")
args = parser.parse_args()
- knowledge_config = KnowledgeConfig(**args)
+ knowledge_config = KnowledgeConfig.model_validate(args)
if knowledge_config.indexing_technique == "high_quality":
if knowledge_config.embedding_model is None or knowledge_config.embedding_model_provider is None:
raise ValueError("embedding model and embedding model provider are required for high quality indexing.")
@@ -418,7 +419,9 @@ class DatasetInitApi(Resource):
try:
dataset, documents, batch = DocumentService.save_document_without_dataset_id(
- tenant_id=current_user.current_tenant_id, knowledge_config=knowledge_config, account=current_user
+ tenant_id=current_user.current_tenant_id,
+ knowledge_config=knowledge_config,
+ account=cast(Account, current_user),
)
except ProviderTokenNotInitError as ex:
raise ProviderNotInitializeError(ex.description)
@@ -452,7 +455,7 @@ class DocumentIndexingEstimateApi(DocumentResource):
raise DocumentAlreadyFinishedError()
data_process_rule = document.dataset_process_rule
- data_process_rule_dict = data_process_rule.to_dict()
+ data_process_rule_dict = data_process_rule.to_dict() if data_process_rule else {}
response = {"tokens": 0, "total_price": 0, "currency": "USD", "total_segments": 0, "preview": []}
@@ -514,7 +517,7 @@ class DocumentBatchIndexingEstimateApi(DocumentResource):
if not documents:
return {"tokens": 0, "total_price": 0, "currency": "USD", "total_segments": 0, "preview": []}, 200
data_process_rule = documents[0].dataset_process_rule
- data_process_rule_dict = data_process_rule.to_dict()
+ data_process_rule_dict = data_process_rule.to_dict() if data_process_rule else {}
extract_settings = []
for document in documents:
if document.indexing_status in {"completed", "error"}:
@@ -544,13 +547,15 @@ class DocumentBatchIndexingEstimateApi(DocumentResource):
continue
extract_setting = ExtractSetting(
datasource_type=DatasourceType.NOTION.value,
- notion_info={
- "credential_id": data_source_info["credential_id"],
- "notion_workspace_id": data_source_info["notion_workspace_id"],
- "notion_obj_id": data_source_info["notion_page_id"],
- "notion_page_type": data_source_info["type"],
- "tenant_id": current_user.current_tenant_id,
- },
+ notion_info=NotionInfo.model_validate(
+ {
+ "credential_id": data_source_info["credential_id"],
+ "notion_workspace_id": data_source_info["notion_workspace_id"],
+ "notion_obj_id": data_source_info["notion_page_id"],
+ "notion_page_type": data_source_info["type"],
+ "tenant_id": current_user.current_tenant_id,
+ }
+ ),
document_model=document.doc_form,
)
extract_settings.append(extract_setting)
@@ -559,14 +564,16 @@ class DocumentBatchIndexingEstimateApi(DocumentResource):
continue
extract_setting = ExtractSetting(
datasource_type=DatasourceType.WEBSITE.value,
- website_info={
- "provider": data_source_info["provider"],
- "job_id": data_source_info["job_id"],
- "url": data_source_info["url"],
- "tenant_id": current_user.current_tenant_id,
- "mode": data_source_info["mode"],
- "only_main_content": data_source_info["only_main_content"],
- },
+ website_info=WebsiteInfo.model_validate(
+ {
+ "provider": data_source_info["provider"],
+ "job_id": data_source_info["job_id"],
+ "url": data_source_info["url"],
+ "tenant_id": current_user.current_tenant_id,
+ "mode": data_source_info["mode"],
+ "only_main_content": data_source_info["only_main_content"],
+ }
+ ),
document_model=document.doc_form,
)
extract_settings.append(extract_setting)
@@ -753,7 +760,7 @@ class DocumentApi(DocumentResource):
}
else:
dataset_process_rules = DatasetService.get_process_rules(dataset_id)
- document_process_rules = document.dataset_process_rule.to_dict()
+ document_process_rules = document.dataset_process_rule.to_dict() if document.dataset_process_rule else {}
data_source_info = document.data_source_detail_dict
response = {
"id": document.id,
@@ -1073,7 +1080,9 @@ class DocumentRenameApi(DocumentResource):
if not current_user.is_dataset_editor:
raise Forbidden()
dataset = DatasetService.get_dataset(dataset_id)
- DatasetService.check_dataset_operator_permission(current_user, dataset)
+ if not dataset:
+ raise NotFound("Dataset not found.")
+ DatasetService.check_dataset_operator_permission(cast(Account, current_user), dataset)
parser = reqparse.RequestParser()
parser.add_argument("name", type=str, required=True, nullable=False, location="json")
args = parser.parse_args()
@@ -1114,6 +1123,7 @@ class WebsiteDocumentSyncApi(DocumentResource):
return {"result": "success"}, 200
+@console_ns.route("/datasets//documents//pipeline-execution-log")
class DocumentPipelineExecutionLogApi(DocumentResource):
@setup_required
@login_required
@@ -1147,29 +1157,3 @@ class DocumentPipelineExecutionLogApi(DocumentResource):
"input_data": log.input_data,
"datasource_node_id": log.datasource_node_id,
}, 200
-
-
-api.add_resource(GetProcessRuleApi, "/datasets/process-rule")
-api.add_resource(DatasetDocumentListApi, "/datasets//documents")
-api.add_resource(DatasetInitApi, "/datasets/init")
-api.add_resource(
- DocumentIndexingEstimateApi, "/datasets//documents//indexing-estimate"
-)
-api.add_resource(DocumentBatchIndexingEstimateApi, "/datasets//batch//indexing-estimate")
-api.add_resource(DocumentBatchIndexingStatusApi, "/datasets//batch//indexing-status")
-api.add_resource(DocumentIndexingStatusApi, "/datasets//documents//indexing-status")
-api.add_resource(DocumentApi, "/datasets//documents/")
-api.add_resource(
- DocumentProcessingApi, "/datasets//documents//processing/"
-)
-api.add_resource(DocumentMetadataApi, "/datasets//documents//metadata")
-api.add_resource(DocumentStatusApi, "/datasets//documents/status//batch")
-api.add_resource(DocumentPauseApi, "/datasets//documents//processing/pause")
-api.add_resource(DocumentRecoverApi, "/datasets//documents//processing/resume")
-api.add_resource(DocumentRetryApi, "/datasets//retry")
-api.add_resource(DocumentRenameApi, "/datasets//documents//rename")
-
-api.add_resource(WebsiteDocumentSyncApi, "/datasets//documents//website-sync")
-api.add_resource(
- DocumentPipelineExecutionLogApi, "/datasets//documents//pipeline-execution-log"
-)
diff --git a/api/controllers/console/datasets/datasets_segments.py b/api/controllers/console/datasets/datasets_segments.py
index 463fd2d7ec..d6bd02483d 100644
--- a/api/controllers/console/datasets/datasets_segments.py
+++ b/api/controllers/console/datasets/datasets_segments.py
@@ -7,7 +7,7 @@ from sqlalchemy import select
from werkzeug.exceptions import Forbidden, NotFound
import services
-from controllers.console import api
+from controllers.console import console_ns
from controllers.console.app.error import ProviderNotInitializeError
from controllers.console.datasets.error import (
ChildChunkDeleteIndexError,
@@ -37,6 +37,7 @@ from services.errors.chunk import ChildChunkIndexingError as ChildChunkIndexingS
from tasks.batch_create_segment_to_index_task import batch_create_segment_to_index_task
+@console_ns.route("/datasets//documents//segments")
class DatasetDocumentSegmentListApi(Resource):
@setup_required
@login_required
@@ -139,6 +140,7 @@ class DatasetDocumentSegmentListApi(Resource):
return {"result": "success"}, 204
+@console_ns.route("/datasets//documents//segment/")
class DatasetDocumentSegmentApi(Resource):
@setup_required
@login_required
@@ -193,6 +195,7 @@ class DatasetDocumentSegmentApi(Resource):
return {"result": "success"}, 200
+@console_ns.route("/datasets//documents//segment")
class DatasetDocumentSegmentAddApi(Resource):
@setup_required
@login_required
@@ -244,6 +247,7 @@ class DatasetDocumentSegmentAddApi(Resource):
return {"data": marshal(segment, segment_fields), "doc_form": document.doc_form}, 200
+@console_ns.route("/datasets//documents//segments/")
class DatasetDocumentSegmentUpdateApi(Resource):
@setup_required
@login_required
@@ -305,7 +309,7 @@ class DatasetDocumentSegmentUpdateApi(Resource):
)
args = parser.parse_args()
SegmentService.segment_create_args_validate(args, document)
- segment = SegmentService.update_segment(SegmentUpdateArgs(**args), segment, document, dataset)
+ segment = SegmentService.update_segment(SegmentUpdateArgs.model_validate(args), segment, document, dataset)
return {"data": marshal(segment, segment_fields), "doc_form": document.doc_form}, 200
@setup_required
@@ -345,6 +349,10 @@ class DatasetDocumentSegmentUpdateApi(Resource):
return {"result": "success"}, 204
+@console_ns.route(
+ "/datasets//documents//segments/batch_import",
+ "/datasets/batch_import_status/",
+)
class DatasetDocumentSegmentBatchImportApi(Resource):
@setup_required
@login_required
@@ -384,7 +392,12 @@ class DatasetDocumentSegmentBatchImportApi(Resource):
# send batch add segments task
redis_client.setnx(indexing_cache_key, "waiting")
batch_create_segment_to_index_task.delay(
- str(job_id), upload_file_id, dataset_id, document_id, current_user.current_tenant_id, current_user.id
+ str(job_id),
+ upload_file_id,
+ dataset_id,
+ document_id,
+ current_user.current_tenant_id,
+ current_user.id,
)
except Exception as e:
return {"error": str(e)}, 500
@@ -393,7 +406,9 @@ class DatasetDocumentSegmentBatchImportApi(Resource):
@setup_required
@login_required
@account_initialization_required
- def get(self, job_id):
+ def get(self, job_id=None, dataset_id=None, document_id=None):
+ if job_id is None:
+ raise NotFound("The job does not exist.")
job_id = str(job_id)
indexing_cache_key = f"segment_batch_import_{job_id}"
cache_result = redis_client.get(indexing_cache_key)
@@ -403,6 +418,7 @@ class DatasetDocumentSegmentBatchImportApi(Resource):
return {"job_id": job_id, "job_status": cache_result.decode()}, 200
+@console_ns.route("/datasets//documents//segments//child_chunks")
class ChildChunkAddApi(Resource):
@setup_required
@login_required
@@ -457,7 +473,8 @@ class ChildChunkAddApi(Resource):
parser.add_argument("content", type=str, required=True, nullable=False, location="json")
args = parser.parse_args()
try:
- child_chunk = SegmentService.create_child_chunk(args.get("content"), segment, document, dataset)
+ content = args["content"]
+ child_chunk = SegmentService.create_child_chunk(content, segment, document, dataset)
except ChildChunkIndexingServiceError as e:
raise ChildChunkIndexingError(str(e))
return {"data": marshal(child_chunk, child_chunk_fields)}, 200
@@ -546,13 +563,17 @@ class ChildChunkAddApi(Resource):
parser.add_argument("chunks", type=list, required=True, nullable=False, location="json")
args = parser.parse_args()
try:
- chunks = [ChildChunkUpdateArgs(**chunk) for chunk in args.get("chunks")]
+ chunks_data = args["chunks"]
+ chunks = [ChildChunkUpdateArgs.model_validate(chunk) for chunk in chunks_data]
child_chunks = SegmentService.update_child_chunks(chunks, segment, document, dataset)
except ChildChunkIndexingServiceError as e:
raise ChildChunkIndexingError(str(e))
return {"data": marshal(child_chunks, child_chunk_fields)}, 200
+@console_ns.route(
+ "/datasets//documents//segments//child_chunks/"
+)
class ChildChunkUpdateApi(Resource):
@setup_required
@login_required
@@ -660,33 +681,8 @@ class ChildChunkUpdateApi(Resource):
parser.add_argument("content", type=str, required=True, nullable=False, location="json")
args = parser.parse_args()
try:
- child_chunk = SegmentService.update_child_chunk(
- args.get("content"), child_chunk, segment, document, dataset
- )
+ content = args["content"]
+ child_chunk = SegmentService.update_child_chunk(content, child_chunk, segment, document, dataset)
except ChildChunkIndexingServiceError as e:
raise ChildChunkIndexingError(str(e))
return {"data": marshal(child_chunk, child_chunk_fields)}, 200
-
-
-api.add_resource(DatasetDocumentSegmentListApi, "/datasets//documents//segments")
-api.add_resource(
- DatasetDocumentSegmentApi, "/datasets//documents//segment/"
-)
-api.add_resource(DatasetDocumentSegmentAddApi, "/datasets//documents//segment")
-api.add_resource(
- DatasetDocumentSegmentUpdateApi,
- "/datasets//documents//segments/",
-)
-api.add_resource(
- DatasetDocumentSegmentBatchImportApi,
- "/datasets//documents//segments/batch_import",
- "/datasets/batch_import_status/",
-)
-api.add_resource(
- ChildChunkAddApi,
- "/datasets//documents//segments//child_chunks",
-)
-api.add_resource(
- ChildChunkUpdateApi,
- "/datasets//documents//segments//child_chunks/",
-)
diff --git a/api/controllers/console/datasets/external.py b/api/controllers/console/datasets/external.py
index e8f5a11b41..adf9f53523 100644
--- a/api/controllers/console/datasets/external.py
+++ b/api/controllers/console/datasets/external.py
@@ -1,3 +1,5 @@
+from typing import cast
+
from flask import request
from flask_login import current_user
from flask_restx import Resource, fields, marshal, reqparse
@@ -9,13 +11,14 @@ from controllers.console.datasets.error import DatasetNameDuplicateError
from controllers.console.wraps import account_initialization_required, setup_required
from fields.dataset_fields import dataset_detail_fields
from libs.login import login_required
+from models.account import Account
from services.dataset_service import DatasetService
from services.external_knowledge_service import ExternalDatasetService
from services.hit_testing_service import HitTestingService
from services.knowledge_service import ExternalDatasetTestService
-def _validate_name(name):
+def _validate_name(name: str) -> str:
if not name or len(name) < 1 or len(name) > 100:
raise ValueError("Name must be between 1 to 100 characters.")
return name
@@ -274,7 +277,7 @@ class ExternalKnowledgeHitTestingApi(Resource):
response = HitTestingService.external_retrieve(
dataset=dataset,
query=args["query"],
- account=current_user,
+ account=cast(Account, current_user),
external_retrieval_model=args["external_retrieval_model"],
metadata_filtering_conditions=args["metadata_filtering_conditions"],
)
diff --git a/api/controllers/console/datasets/hit_testing_base.py b/api/controllers/console/datasets/hit_testing_base.py
index cfbfc50873..a68e337135 100644
--- a/api/controllers/console/datasets/hit_testing_base.py
+++ b/api/controllers/console/datasets/hit_testing_base.py
@@ -1,10 +1,11 @@
import logging
+from typing import cast
from flask_login import current_user
from flask_restx import marshal, reqparse
from werkzeug.exceptions import Forbidden, InternalServerError, NotFound
-import services.dataset_service
+import services
from controllers.console.app.error import (
CompletionRequestError,
ProviderModelCurrentlyNotSupportError,
@@ -20,6 +21,7 @@ from core.errors.error import (
)
from core.model_runtime.errors.invoke import InvokeError
from fields.hit_testing_fields import hit_testing_record_fields
+from models.account import Account
from services.dataset_service import DatasetService
from services.hit_testing_service import HitTestingService
@@ -59,7 +61,7 @@ class DatasetsHitTestingBase:
response = HitTestingService.retrieve(
dataset=dataset,
query=args["query"],
- account=current_user,
+ account=cast(Account, current_user),
retrieval_model=args["retrieval_model"],
external_retrieval_model=args["external_retrieval_model"],
limit=10,
diff --git a/api/controllers/console/datasets/metadata.py b/api/controllers/console/datasets/metadata.py
index 21ab5e4fe1..8438458617 100644
--- a/api/controllers/console/datasets/metadata.py
+++ b/api/controllers/console/datasets/metadata.py
@@ -4,7 +4,7 @@ from flask_login import current_user
from flask_restx import Resource, marshal_with, reqparse
from werkzeug.exceptions import NotFound
-from controllers.console import api
+from controllers.console import console_ns
from controllers.console.wraps import account_initialization_required, enterprise_license_required, setup_required
from fields.dataset_fields import dataset_metadata_fields
from libs.login import login_required
@@ -16,6 +16,7 @@ from services.entities.knowledge_entities.knowledge_entities import (
from services.metadata_service import MetadataService
+@console_ns.route("/datasets//metadata")
class DatasetMetadataCreateApi(Resource):
@setup_required
@login_required
@@ -27,7 +28,7 @@ class DatasetMetadataCreateApi(Resource):
parser.add_argument("type", type=str, required=True, nullable=False, location="json")
parser.add_argument("name", type=str, required=True, nullable=False, location="json")
args = parser.parse_args()
- metadata_args = MetadataArgs(**args)
+ metadata_args = MetadataArgs.model_validate(args)
dataset_id_str = str(dataset_id)
dataset = DatasetService.get_dataset(dataset_id_str)
@@ -50,6 +51,7 @@ class DatasetMetadataCreateApi(Resource):
return MetadataService.get_dataset_metadatas(dataset), 200
+@console_ns.route("/datasets//metadata/")
class DatasetMetadataApi(Resource):
@setup_required
@login_required
@@ -60,6 +62,7 @@ class DatasetMetadataApi(Resource):
parser = reqparse.RequestParser()
parser.add_argument("name", type=str, required=True, nullable=False, location="json")
args = parser.parse_args()
+ name = args["name"]
dataset_id_str = str(dataset_id)
metadata_id_str = str(metadata_id)
@@ -68,7 +71,7 @@ class DatasetMetadataApi(Resource):
raise NotFound("Dataset not found.")
DatasetService.check_dataset_permission(dataset, current_user)
- metadata = MetadataService.update_metadata_name(dataset_id_str, metadata_id_str, args.get("name"))
+ metadata = MetadataService.update_metadata_name(dataset_id_str, metadata_id_str, name)
return metadata, 200
@setup_required
@@ -87,6 +90,7 @@ class DatasetMetadataApi(Resource):
return {"result": "success"}, 204
+@console_ns.route("/datasets/metadata/built-in")
class DatasetMetadataBuiltInFieldApi(Resource):
@setup_required
@login_required
@@ -97,6 +101,7 @@ class DatasetMetadataBuiltInFieldApi(Resource):
return {"fields": built_in_fields}, 200
+@console_ns.route("/datasets//metadata/built-in/")
class DatasetMetadataBuiltInFieldActionApi(Resource):
@setup_required
@login_required
@@ -116,6 +121,7 @@ class DatasetMetadataBuiltInFieldActionApi(Resource):
return {"result": "success"}, 200
+@console_ns.route("/datasets//documents/metadata")
class DocumentMetadataEditApi(Resource):
@setup_required
@login_required
@@ -131,15 +137,8 @@ class DocumentMetadataEditApi(Resource):
parser = reqparse.RequestParser()
parser.add_argument("operation_data", type=list, required=True, nullable=False, location="json")
args = parser.parse_args()
- metadata_args = MetadataOperationData(**args)
+ metadata_args = MetadataOperationData.model_validate(args)
MetadataService.update_documents_metadata(dataset, metadata_args)
return {"result": "success"}, 200
-
-
-api.add_resource(DatasetMetadataCreateApi, "/datasets//metadata")
-api.add_resource(DatasetMetadataApi, "/datasets//metadata/")
-api.add_resource(DatasetMetadataBuiltInFieldApi, "/datasets/metadata/built-in")
-api.add_resource(DatasetMetadataBuiltInFieldActionApi, "/datasets//metadata/built-in/")
-api.add_resource(DocumentMetadataEditApi, "/datasets//documents/metadata")
diff --git a/api/controllers/console/datasets/rag_pipeline/datasource_auth.py b/api/controllers/console/datasets/rag_pipeline/datasource_auth.py
index 1a845cf326..53b5a0d965 100644
--- a/api/controllers/console/datasets/rag_pipeline/datasource_auth.py
+++ b/api/controllers/console/datasets/rag_pipeline/datasource_auth.py
@@ -1,16 +1,16 @@
-from fastapi.encoders import jsonable_encoder
from flask import make_response, redirect, request
from flask_login import current_user
from flask_restx import Resource, reqparse
from werkzeug.exceptions import Forbidden, NotFound
from configs import dify_config
-from controllers.console import api
+from controllers.console import console_ns
from controllers.console.wraps import (
account_initialization_required,
setup_required,
)
from core.model_runtime.errors.validate import CredentialsValidateFailedError
+from core.model_runtime.utils.encoders import jsonable_encoder
from core.plugin.impl.oauth import OAuthHandler
from libs.helper import StrLen
from libs.login import login_required
@@ -19,6 +19,7 @@ from services.datasource_provider_service import DatasourceProviderService
from services.plugin.oauth_service import OAuthProxyService
+@console_ns.route("/oauth/plugin//datasource/get-authorization-url")
class DatasourcePluginOAuthAuthorizationUrl(Resource):
@setup_required
@login_required
@@ -68,6 +69,7 @@ class DatasourcePluginOAuthAuthorizationUrl(Resource):
return response
+@console_ns.route("/oauth/plugin//datasource/callback")
class DatasourceOAuthCallback(Resource):
@setup_required
def get(self, provider_id: str):
@@ -123,6 +125,7 @@ class DatasourceOAuthCallback(Resource):
return redirect(f"{dify_config.CONSOLE_WEB_URL}/oauth-callback")
+@console_ns.route("/auth/plugin/datasource/")
class DatasourceAuth(Resource):
@setup_required
@login_required
@@ -165,6 +168,7 @@ class DatasourceAuth(Resource):
return {"result": datasources}, 200
+@console_ns.route("/auth/plugin/datasource//delete")
class DatasourceAuthDeleteApi(Resource):
@setup_required
@login_required
@@ -188,6 +192,7 @@ class DatasourceAuthDeleteApi(Resource):
return {"result": "success"}, 200
+@console_ns.route("/auth/plugin/datasource//update")
class DatasourceAuthUpdateApi(Resource):
@setup_required
@login_required
@@ -213,6 +218,7 @@ class DatasourceAuthUpdateApi(Resource):
return {"result": "success"}, 201
+@console_ns.route("/auth/plugin/datasource/list")
class DatasourceAuthListApi(Resource):
@setup_required
@login_required
@@ -225,6 +231,7 @@ class DatasourceAuthListApi(Resource):
return {"result": jsonable_encoder(datasources)}, 200
+@console_ns.route("/auth/plugin/datasource/default-list")
class DatasourceHardCodeAuthListApi(Resource):
@setup_required
@login_required
@@ -237,6 +244,7 @@ class DatasourceHardCodeAuthListApi(Resource):
return {"result": jsonable_encoder(datasources)}, 200
+@console_ns.route("/auth/plugin/datasource//custom-client")
class DatasourceAuthOauthCustomClient(Resource):
@setup_required
@login_required
@@ -271,6 +279,7 @@ class DatasourceAuthOauthCustomClient(Resource):
return {"result": "success"}, 200
+@console_ns.route("/auth/plugin/datasource//default")
class DatasourceAuthDefaultApi(Resource):
@setup_required
@login_required
@@ -291,6 +300,7 @@ class DatasourceAuthDefaultApi(Resource):
return {"result": "success"}, 200
+@console_ns.route("/auth/plugin/datasource//update-name")
class DatasourceUpdateProviderNameApi(Resource):
@setup_required
@login_required
@@ -311,52 +321,3 @@ class DatasourceUpdateProviderNameApi(Resource):
credential_id=args["credential_id"],
)
return {"result": "success"}, 200
-
-
-api.add_resource(
- DatasourcePluginOAuthAuthorizationUrl,
- "/oauth/plugin//datasource/get-authorization-url",
-)
-api.add_resource(
- DatasourceOAuthCallback,
- "/oauth/plugin//datasource/callback",
-)
-api.add_resource(
- DatasourceAuth,
- "/auth/plugin/datasource/",
-)
-
-api.add_resource(
- DatasourceAuthUpdateApi,
- "/auth/plugin/datasource//update",
-)
-
-api.add_resource(
- DatasourceAuthDeleteApi,
- "/auth/plugin/datasource//delete",
-)
-
-api.add_resource(
- DatasourceAuthListApi,
- "/auth/plugin/datasource/list",
-)
-
-api.add_resource(
- DatasourceHardCodeAuthListApi,
- "/auth/plugin/datasource/default-list",
-)
-
-api.add_resource(
- DatasourceAuthOauthCustomClient,
- "/auth/plugin/datasource//custom-client",
-)
-
-api.add_resource(
- DatasourceAuthDefaultApi,
- "/auth/plugin/datasource//default",
-)
-
-api.add_resource(
- DatasourceUpdateProviderNameApi,
- "/auth/plugin/datasource//update-name",
-)
diff --git a/api/controllers/console/datasets/rag_pipeline/datasource_content_preview.py b/api/controllers/console/datasets/rag_pipeline/datasource_content_preview.py
index 05fa681a33..6c04cc877a 100644
--- a/api/controllers/console/datasets/rag_pipeline/datasource_content_preview.py
+++ b/api/controllers/console/datasets/rag_pipeline/datasource_content_preview.py
@@ -4,7 +4,7 @@ from flask_restx import ( # type: ignore
)
from werkzeug.exceptions import Forbidden
-from controllers.console import api
+from controllers.console import console_ns
from controllers.console.datasets.wraps import get_rag_pipeline
from controllers.console.wraps import account_initialization_required, setup_required
from libs.login import current_user, login_required
@@ -13,6 +13,7 @@ from models.dataset import Pipeline
from services.rag_pipeline.rag_pipeline import RagPipelineService
+@console_ns.route("/rag/pipelines//workflows/published/datasource/nodes//preview")
class DataSourceContentPreviewApi(Resource):
@setup_required
@login_required
@@ -49,9 +50,3 @@ class DataSourceContentPreviewApi(Resource):
credential_id=args.get("credential_id"),
)
return preview_content, 200
-
-
-api.add_resource(
- DataSourceContentPreviewApi,
- "/rag/pipelines//workflows/published/datasource/nodes//preview",
-)
diff --git a/api/controllers/console/datasets/rag_pipeline/rag_pipeline.py b/api/controllers/console/datasets/rag_pipeline/rag_pipeline.py
index f04b0e04c3..e021f95283 100644
--- a/api/controllers/console/datasets/rag_pipeline/rag_pipeline.py
+++ b/api/controllers/console/datasets/rag_pipeline/rag_pipeline.py
@@ -4,7 +4,7 @@ from flask import request
from flask_restx import Resource, reqparse
from sqlalchemy.orm import Session
-from controllers.console import api
+from controllers.console import console_ns
from controllers.console.wraps import (
account_initialization_required,
enterprise_license_required,
@@ -20,18 +20,19 @@ from services.rag_pipeline.rag_pipeline import RagPipelineService
logger = logging.getLogger(__name__)
-def _validate_name(name):
+def _validate_name(name: str) -> str:
if not name or len(name) < 1 or len(name) > 40:
raise ValueError("Name must be between 1 to 40 characters.")
return name
-def _validate_description_length(description):
+def _validate_description_length(description: str) -> str:
if len(description) > 400:
raise ValueError("Description cannot exceed 400 characters.")
return description
+@console_ns.route("/rag/pipeline/templates")
class PipelineTemplateListApi(Resource):
@setup_required
@login_required
@@ -45,6 +46,7 @@ class PipelineTemplateListApi(Resource):
return pipeline_templates, 200
+@console_ns.route("/rag/pipeline/templates/")
class PipelineTemplateDetailApi(Resource):
@setup_required
@login_required
@@ -57,6 +59,7 @@ class PipelineTemplateDetailApi(Resource):
return pipeline_template, 200
+@console_ns.route("/rag/pipeline/customized/templates/")
class CustomizedPipelineTemplateApi(Resource):
@setup_required
@login_required
@@ -73,7 +76,7 @@ class CustomizedPipelineTemplateApi(Resource):
)
parser.add_argument(
"description",
- type=str,
+ type=_validate_description_length,
nullable=True,
required=False,
default="",
@@ -85,7 +88,7 @@ class CustomizedPipelineTemplateApi(Resource):
nullable=True,
)
args = parser.parse_args()
- pipeline_template_info = PipelineTemplateInfoEntity(**args)
+ pipeline_template_info = PipelineTemplateInfoEntity.model_validate(args)
RagPipelineService.update_customized_pipeline_template(template_id, pipeline_template_info)
return 200
@@ -112,6 +115,7 @@ class CustomizedPipelineTemplateApi(Resource):
return {"data": template.yaml_content}, 200
+@console_ns.route("/rag/pipelines//customized/publish")
class PublishCustomizedPipelineTemplateApi(Resource):
@setup_required
@login_required
@@ -129,7 +133,7 @@ class PublishCustomizedPipelineTemplateApi(Resource):
)
parser.add_argument(
"description",
- type=str,
+ type=_validate_description_length,
nullable=True,
required=False,
default="",
@@ -144,21 +148,3 @@ class PublishCustomizedPipelineTemplateApi(Resource):
rag_pipeline_service = RagPipelineService()
rag_pipeline_service.publish_customized_pipeline_template(pipeline_id, args)
return {"result": "success"}
-
-
-api.add_resource(
- PipelineTemplateListApi,
- "/rag/pipeline/templates",
-)
-api.add_resource(
- PipelineTemplateDetailApi,
- "/rag/pipeline/templates/",
-)
-api.add_resource(
- CustomizedPipelineTemplateApi,
- "/rag/pipeline/customized/templates/",
-)
-api.add_resource(
- PublishCustomizedPipelineTemplateApi,
- "/rag/pipelines//customized/publish",
-)
diff --git a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_datasets.py b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_datasets.py
index 34faa4ec85..404aa42073 100644
--- a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_datasets.py
+++ b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_datasets.py
@@ -1,10 +1,10 @@
-from flask_login import current_user # type: ignore # type: ignore
-from flask_restx import Resource, marshal, reqparse # type: ignore
+from flask_login import current_user
+from flask_restx import Resource, marshal, reqparse
from sqlalchemy.orm import Session
from werkzeug.exceptions import Forbidden
import services
-from controllers.console import api
+from controllers.console import console_ns
from controllers.console.datasets.error import DatasetNameDuplicateError
from controllers.console.wraps import (
account_initialization_required,
@@ -20,18 +20,7 @@ from services.entities.knowledge_entities.rag_pipeline_entities import IconInfo,
from services.rag_pipeline.rag_pipeline_dsl_service import RagPipelineDslService
-def _validate_name(name):
- if not name or len(name) < 1 or len(name) > 40:
- raise ValueError("Name must be between 1 to 40 characters.")
- return name
-
-
-def _validate_description_length(description):
- if len(description) > 400:
- raise ValueError("Description cannot exceed 400 characters.")
- return description
-
-
+@console_ns.route("/rag/pipeline/dataset")
class CreateRagPipelineDatasetApi(Resource):
@setup_required
@login_required
@@ -84,6 +73,7 @@ class CreateRagPipelineDatasetApi(Resource):
return import_info, 201
+@console_ns.route("/rag/pipeline/empty-dataset")
class CreateEmptyRagPipelineDatasetApi(Resource):
@setup_required
@login_required
@@ -108,7 +98,3 @@ class CreateEmptyRagPipelineDatasetApi(Resource):
),
)
return marshal(dataset, dataset_detail_fields), 201
-
-
-api.add_resource(CreateRagPipelineDatasetApi, "/rag/pipeline/dataset")
-api.add_resource(CreateEmptyRagPipelineDatasetApi, "/rag/pipeline/empty-dataset")
diff --git a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_draft_variable.py b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_draft_variable.py
index db07e7729a..bef6bfd13e 100644
--- a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_draft_variable.py
+++ b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_draft_variable.py
@@ -1,24 +1,22 @@
import logging
-from typing import Any, NoReturn
+from typing import NoReturn
from flask import Response
from flask_restx import Resource, fields, inputs, marshal, marshal_with, reqparse
from sqlalchemy.orm import Session
from werkzeug.exceptions import Forbidden
-from controllers.console import api
+from controllers.console import console_ns
from controllers.console.app.error import (
DraftWorkflowNotExist,
)
from controllers.console.app.workflow_draft_variable import (
- _WORKFLOW_DRAFT_VARIABLE_FIELDS,
- _WORKFLOW_DRAFT_VARIABLE_WITHOUT_VALUE_FIELDS,
+ _WORKFLOW_DRAFT_VARIABLE_FIELDS, # type: ignore[private-usage]
+ _WORKFLOW_DRAFT_VARIABLE_WITHOUT_VALUE_FIELDS, # type: ignore[private-usage]
)
from controllers.console.datasets.wraps import get_rag_pipeline
from controllers.console.wraps import account_initialization_required, setup_required
from controllers.web.error import InvalidArgumentError, NotFoundError
-from core.variables.segment_group import SegmentGroup
-from core.variables.segments import ArrayFileSegment, FileSegment, Segment
from core.variables.types import SegmentType
from core.workflow.constants import CONVERSATION_VARIABLE_NODE_ID, SYSTEM_VARIABLE_NODE_ID
from extensions.ext_database import db
@@ -34,32 +32,6 @@ from services.workflow_draft_variable_service import WorkflowDraftVariableList,
logger = logging.getLogger(__name__)
-def _convert_values_to_json_serializable_object(value: Segment) -> Any:
- if isinstance(value, FileSegment):
- return value.value.model_dump()
- elif isinstance(value, ArrayFileSegment):
- return [i.model_dump() for i in value.value]
- elif isinstance(value, SegmentGroup):
- return [_convert_values_to_json_serializable_object(i) for i in value.value]
- else:
- return value.value
-
-
-def _serialize_var_value(variable: WorkflowDraftVariable) -> Any:
- value = variable.get_value()
- # create a copy of the value to avoid affecting the model cache.
- value = value.model_copy(deep=True)
- # Refresh the url signature before returning it to client.
- if isinstance(value, FileSegment):
- file = value.value
- file.remote_url = file.generate_url()
- elif isinstance(value, ArrayFileSegment):
- files = value.value
- for file in files:
- file.remote_url = file.generate_url()
- return _convert_values_to_json_serializable_object(value)
-
-
def _create_pagination_parser():
parser = reqparse.RequestParser()
parser.add_argument(
@@ -104,13 +76,14 @@ def _api_prerequisite(f):
@account_initialization_required
@get_rag_pipeline
def wrapper(*args, **kwargs):
- if not isinstance(current_user, Account) or not current_user.is_editor:
+ if not isinstance(current_user, Account) or not current_user.has_edit_permission:
raise Forbidden()
return f(*args, **kwargs)
return wrapper
+@console_ns.route("/rag/pipelines//workflows/draft/variables")
class RagPipelineVariableCollectionApi(Resource):
@_api_prerequisite
@marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_WITHOUT_VALUE_FIELDS)
@@ -168,6 +141,7 @@ def validate_node_id(node_id: str) -> NoReturn | None:
return None
+@console_ns.route("/rag/pipelines//workflows/draft/nodes//variables")
class RagPipelineNodeVariableCollectionApi(Resource):
@_api_prerequisite
@marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS)
@@ -190,6 +164,7 @@ class RagPipelineNodeVariableCollectionApi(Resource):
return Response("", 204)
+@console_ns.route("/rag/pipelines//workflows/draft/variables/")
class RagPipelineVariableApi(Resource):
_PATCH_NAME_FIELD = "name"
_PATCH_VALUE_FIELD = "value"
@@ -284,6 +259,7 @@ class RagPipelineVariableApi(Resource):
return Response("", 204)
+@console_ns.route("/rag/pipelines//workflows/draft/variables//reset")
class RagPipelineVariableResetApi(Resource):
@_api_prerequisite
def put(self, pipeline: Pipeline, variable_id: str):
@@ -325,6 +301,7 @@ def _get_variable_list(pipeline: Pipeline, node_id) -> WorkflowDraftVariableList
return draft_vars
+@console_ns.route("/rag/pipelines//workflows/draft/system-variables")
class RagPipelineSystemVariableCollectionApi(Resource):
@_api_prerequisite
@marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS)
@@ -332,6 +309,7 @@ class RagPipelineSystemVariableCollectionApi(Resource):
return _get_variable_list(pipeline, SYSTEM_VARIABLE_NODE_ID)
+@console_ns.route("/rag/pipelines//workflows/draft/environment-variables")
class RagPipelineEnvironmentVariableCollectionApi(Resource):
@_api_prerequisite
def get(self, pipeline: Pipeline):
@@ -364,26 +342,3 @@ class RagPipelineEnvironmentVariableCollectionApi(Resource):
)
return {"items": env_vars_list}
-
-
-api.add_resource(
- RagPipelineVariableCollectionApi,
- "/rag/pipelines//workflows/draft/variables",
-)
-api.add_resource(
- RagPipelineNodeVariableCollectionApi,
- "/rag/pipelines//workflows/draft/nodes//variables",
-)
-api.add_resource(
- RagPipelineVariableApi, "/rag/pipelines//workflows/draft/variables/"
-)
-api.add_resource(
- RagPipelineVariableResetApi, "/rag/pipelines//workflows/draft/variables//reset"
-)
-api.add_resource(
- RagPipelineSystemVariableCollectionApi, "/rag/pipelines//workflows/draft/system-variables"
-)
-api.add_resource(
- RagPipelineEnvironmentVariableCollectionApi,
- "/rag/pipelines//workflows/draft/environment-variables",
-)
diff --git a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_import.py b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_import.py
index a447f2848a..e0b918456b 100644
--- a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_import.py
+++ b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_import.py
@@ -5,7 +5,7 @@ from flask_restx import Resource, marshal_with, reqparse # type: ignore
from sqlalchemy.orm import Session
from werkzeug.exceptions import Forbidden
-from controllers.console import api
+from controllers.console import console_ns
from controllers.console.datasets.wraps import get_rag_pipeline
from controllers.console.wraps import (
account_initialization_required,
@@ -20,6 +20,7 @@ from services.app_dsl_service import ImportStatus
from services.rag_pipeline.rag_pipeline_dsl_service import RagPipelineDslService
+@console_ns.route("/rag/pipelines/imports")
class RagPipelineImportApi(Resource):
@setup_required
@login_required
@@ -66,6 +67,7 @@ class RagPipelineImportApi(Resource):
return result.model_dump(mode="json"), 200
+@console_ns.route("/rag/pipelines/imports//confirm")
class RagPipelineImportConfirmApi(Resource):
@setup_required
@login_required
@@ -90,6 +92,7 @@ class RagPipelineImportConfirmApi(Resource):
return result.model_dump(mode="json"), 200
+@console_ns.route("/rag/pipelines/imports//check-dependencies")
class RagPipelineImportCheckDependenciesApi(Resource):
@setup_required
@login_required
@@ -107,6 +110,7 @@ class RagPipelineImportCheckDependenciesApi(Resource):
return result.model_dump(mode="json"), 200
+@console_ns.route("/rag/pipelines//exports")
class RagPipelineExportApi(Resource):
@setup_required
@login_required
@@ -128,22 +132,3 @@ class RagPipelineExportApi(Resource):
)
return {"data": result}, 200
-
-
-# Import Rag Pipeline
-api.add_resource(
- RagPipelineImportApi,
- "/rag/pipelines/imports",
-)
-api.add_resource(
- RagPipelineImportConfirmApi,
- "/rag/pipelines/imports//confirm",
-)
-api.add_resource(
- RagPipelineImportCheckDependenciesApi,
- "/rag/pipelines/imports//check-dependencies",
-)
-api.add_resource(
- RagPipelineExportApi,
- "/rag/pipelines//exports",
-)
diff --git a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_workflow.py b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_workflow.py
index 01ddb8a871..a75c121fbe 100644
--- a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_workflow.py
+++ b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_workflow.py
@@ -9,7 +9,7 @@ from sqlalchemy.orm import Session
from werkzeug.exceptions import Forbidden, InternalServerError, NotFound
import services
-from controllers.console import api
+from controllers.console import console_ns
from controllers.console.app.error import (
ConversationCompletedError,
DraftWorkflowNotExist,
@@ -50,6 +50,7 @@ from services.rag_pipeline.rag_pipeline_transform_service import RagPipelineTran
logger = logging.getLogger(__name__)
+@console_ns.route("/rag/pipelines//workflows/draft")
class DraftRagPipelineApi(Resource):
@setup_required
@login_required
@@ -147,6 +148,7 @@ class DraftRagPipelineApi(Resource):
}
+@console_ns.route("/rag/pipelines//workflows/draft/iteration/nodes//run")
class RagPipelineDraftRunIterationNodeApi(Resource):
@setup_required
@login_required
@@ -181,6 +183,7 @@ class RagPipelineDraftRunIterationNodeApi(Resource):
raise InternalServerError()
+@console_ns.route("/rag/pipelines//workflows/draft/loop/nodes//run")
class RagPipelineDraftRunLoopNodeApi(Resource):
@setup_required
@login_required
@@ -215,6 +218,7 @@ class RagPipelineDraftRunLoopNodeApi(Resource):
raise InternalServerError()
+@console_ns.route("/rag/pipelines//workflows/draft/run")
class DraftRagPipelineRunApi(Resource):
@setup_required
@login_required
@@ -249,6 +253,7 @@ class DraftRagPipelineRunApi(Resource):
raise InvokeRateLimitHttpError(ex.description)
+@console_ns.route("/rag/pipelines//workflows/published/run")
class PublishedRagPipelineRunApi(Resource):
@setup_required
@login_required
@@ -369,6 +374,7 @@ class PublishedRagPipelineRunApi(Resource):
#
# return result
#
+@console_ns.route("/rag/pipelines//workflows/published/datasource/nodes//run")
class RagPipelinePublishedDatasourceNodeRunApi(Resource):
@setup_required
@login_required
@@ -411,6 +417,7 @@ class RagPipelinePublishedDatasourceNodeRunApi(Resource):
)
+@console_ns.route("/rag/pipelines//workflows/draft/datasource/nodes//run")
class RagPipelineDraftDatasourceNodeRunApi(Resource):
@setup_required
@login_required
@@ -453,6 +460,7 @@ class RagPipelineDraftDatasourceNodeRunApi(Resource):
)
+@console_ns.route("/rag/pipelines//workflows/draft/nodes//run")
class RagPipelineDraftNodeRunApi(Resource):
@setup_required
@login_required
@@ -486,6 +494,7 @@ class RagPipelineDraftNodeRunApi(Resource):
return workflow_node_execution
+@console_ns.route("/rag/pipelines//workflow-runs/tasks//stop")
class RagPipelineTaskStopApi(Resource):
@setup_required
@login_required
@@ -504,6 +513,7 @@ class RagPipelineTaskStopApi(Resource):
return {"result": "success"}
+@console_ns.route("/rag/pipelines//workflows/publish")
class PublishedRagPipelineApi(Resource):
@setup_required
@login_required
@@ -559,6 +569,7 @@ class PublishedRagPipelineApi(Resource):
}
+@console_ns.route("/rag/pipelines//workflows/default-workflow-block-configs")
class DefaultRagPipelineBlockConfigsApi(Resource):
@setup_required
@login_required
@@ -577,6 +588,7 @@ class DefaultRagPipelineBlockConfigsApi(Resource):
return rag_pipeline_service.get_default_block_configs()
+@console_ns.route("/rag/pipelines//workflows/default-workflow-block-configs/")
class DefaultRagPipelineBlockConfigApi(Resource):
@setup_required
@login_required
@@ -608,6 +620,7 @@ class DefaultRagPipelineBlockConfigApi(Resource):
return rag_pipeline_service.get_default_block_config(node_type=block_type, filters=filters)
+@console_ns.route("/rag/pipelines//workflows")
class PublishedAllRagPipelineApi(Resource):
@setup_required
@login_required
@@ -656,6 +669,7 @@ class PublishedAllRagPipelineApi(Resource):
}
+@console_ns.route("/rag/pipelines//workflows/")
class RagPipelineByIdApi(Resource):
@setup_required
@login_required
@@ -713,6 +727,7 @@ class RagPipelineByIdApi(Resource):
return workflow
+@console_ns.route("/rag/pipelines//workflows/published/processing/parameters")
class PublishedRagPipelineSecondStepApi(Resource):
@setup_required
@login_required
@@ -738,6 +753,7 @@ class PublishedRagPipelineSecondStepApi(Resource):
}
+@console_ns.route("/rag/pipelines//workflows/published/pre-processing/parameters")
class PublishedRagPipelineFirstStepApi(Resource):
@setup_required
@login_required
@@ -763,6 +779,7 @@ class PublishedRagPipelineFirstStepApi(Resource):
}
+@console_ns.route("/rag/pipelines//workflows/draft/pre-processing/parameters")
class DraftRagPipelineFirstStepApi(Resource):
@setup_required
@login_required
@@ -788,6 +805,7 @@ class DraftRagPipelineFirstStepApi(Resource):
}
+@console_ns.route("/rag/pipelines//workflows/draft/processing/parameters")
class DraftRagPipelineSecondStepApi(Resource):
@setup_required
@login_required
@@ -814,6 +832,7 @@ class DraftRagPipelineSecondStepApi(Resource):
}
+@console_ns.route("/rag/pipelines//workflow-runs")
class RagPipelineWorkflowRunListApi(Resource):
@setup_required
@login_required
@@ -835,6 +854,7 @@ class RagPipelineWorkflowRunListApi(Resource):
return result
+@console_ns.route("/rag/pipelines//workflow-runs/")
class RagPipelineWorkflowRunDetailApi(Resource):
@setup_required
@login_required
@@ -853,6 +873,7 @@ class RagPipelineWorkflowRunDetailApi(Resource):
return workflow_run
+@console_ns.route("/rag/pipelines//workflow-runs//node-executions")
class RagPipelineWorkflowRunNodeExecutionListApi(Resource):
@setup_required
@login_required
@@ -876,6 +897,7 @@ class RagPipelineWorkflowRunNodeExecutionListApi(Resource):
return {"data": node_executions}
+@console_ns.route("/rag/pipelines/datasource-plugins")
class DatasourceListApi(Resource):
@setup_required
@login_required
@@ -891,6 +913,7 @@ class DatasourceListApi(Resource):
return jsonable_encoder(RagPipelineManageService.list_rag_pipeline_datasources(tenant_id))
+@console_ns.route("/rag/pipelines//workflows/draft/nodes//last-run")
class RagPipelineWorkflowLastRunApi(Resource):
@setup_required
@login_required
@@ -912,6 +935,7 @@ class RagPipelineWorkflowLastRunApi(Resource):
return node_exec
+@console_ns.route("/rag/pipelines/transform/datasets/")
class RagPipelineTransformApi(Resource):
@setup_required
@login_required
@@ -929,6 +953,7 @@ class RagPipelineTransformApi(Resource):
return result
+@console_ns.route("/rag/pipelines//workflows/draft/datasource/variables-inspect")
class RagPipelineDatasourceVariableApi(Resource):
@setup_required
@login_required
@@ -958,6 +983,7 @@ class RagPipelineDatasourceVariableApi(Resource):
return workflow_node_execution
+@console_ns.route("/rag/pipelines/recommended-plugins")
class RagPipelineRecommendedPluginApi(Resource):
@setup_required
@login_required
@@ -966,114 +992,3 @@ class RagPipelineRecommendedPluginApi(Resource):
rag_pipeline_service = RagPipelineService()
recommended_plugins = rag_pipeline_service.get_recommended_plugins()
return recommended_plugins
-
-
-api.add_resource(
- DraftRagPipelineApi,
- "/rag/pipelines//workflows/draft",
-)
-api.add_resource(
- DraftRagPipelineRunApi,
- "/rag/pipelines//workflows/draft/run",
-)
-api.add_resource(
- PublishedRagPipelineRunApi,
- "/rag/pipelines//workflows/published/run",
-)
-api.add_resource(
- RagPipelineTaskStopApi,
- "/rag/pipelines//workflow-runs/tasks//stop",
-)
-api.add_resource(
- RagPipelineDraftNodeRunApi,
- "/rag/pipelines//workflows/draft/nodes//run",
-)
-api.add_resource(
- RagPipelinePublishedDatasourceNodeRunApi,
- "/rag/pipelines//workflows/published/datasource/nodes//run",
-)
-
-api.add_resource(
- RagPipelineDraftDatasourceNodeRunApi,
- "/rag/pipelines//workflows/draft/datasource/nodes//run",
-)
-
-api.add_resource(
- RagPipelineDraftRunIterationNodeApi,
- "/rag/pipelines//workflows/draft/iteration/nodes//run",
-)
-
-api.add_resource(
- RagPipelineDraftRunLoopNodeApi,
- "/rag/pipelines//workflows/draft/loop/nodes//run",
-)
-
-api.add_resource(
- PublishedRagPipelineApi,
- "/rag/pipelines//workflows/publish",
-)
-api.add_resource(
- PublishedAllRagPipelineApi,
- "/rag/pipelines//workflows",
-)
-api.add_resource(
- DefaultRagPipelineBlockConfigsApi,
- "/rag/pipelines//workflows/default-workflow-block-configs",
-)
-api.add_resource(
- DefaultRagPipelineBlockConfigApi,
- "/rag/pipelines//workflows/default-workflow-block-configs/",
-)
-api.add_resource(
- RagPipelineByIdApi,
- "/rag/pipelines//workflows/",
-)
-api.add_resource(
- RagPipelineWorkflowRunListApi,
- "/rag/pipelines//workflow-runs",
-)
-api.add_resource(
- RagPipelineWorkflowRunDetailApi,
- "/rag/pipelines//workflow-runs/",
-)
-api.add_resource(
- RagPipelineWorkflowRunNodeExecutionListApi,
- "/rag/pipelines//workflow-runs//node-executions",
-)
-api.add_resource(
- DatasourceListApi,
- "/rag/pipelines/datasource-plugins",
-)
-api.add_resource(
- PublishedRagPipelineSecondStepApi,
- "/rag/pipelines//workflows/published/processing/parameters",
-)
-api.add_resource(
- PublishedRagPipelineFirstStepApi,
- "/rag/pipelines//workflows/published/pre-processing/parameters",
-)
-api.add_resource(
- DraftRagPipelineSecondStepApi,
- "/rag/pipelines//workflows/draft/processing/parameters",
-)
-api.add_resource(
- DraftRagPipelineFirstStepApi,
- "/rag/pipelines//workflows/draft/pre-processing/parameters",
-)
-api.add_resource(
- RagPipelineWorkflowLastRunApi,
- "/rag/pipelines//workflows/draft/nodes//last-run",
-)
-api.add_resource(
- RagPipelineTransformApi,
- "/rag/pipelines/transform/datasets/",
-)
-api.add_resource(
- RagPipelineDatasourceVariableApi,
- "/rag/pipelines/