Merge branch 'main' into feat/queue-based-graph-engine

This commit is contained in:
-LAN- 2025-09-15 12:21:18 +08:00
commit b8ee1d4697
No known key found for this signature in database
GPG Key ID: 6BA0D108DED011FF
51 changed files with 1840 additions and 340 deletions

12
.github/dependabot.yml vendored Normal file
View File

@ -0,0 +1,12 @@
version: 2
updates:
- package-ecosystem: "npm"
directory: "/web"
schedule:
interval: "weekly"
open-pull-requests-limit: 2
- package-ecosystem: "uv"
directory: "/api"
schedule:
interval: "weekly"
open-pull-requests-limit: 2

View File

@ -4,10 +4,13 @@ WEB_IMAGE=$(DOCKER_REGISTRY)/dify-web
API_IMAGE=$(DOCKER_REGISTRY)/dify-api
VERSION=latest
# Default target - show help
.DEFAULT_GOAL := help
# Backend Development Environment Setup
.PHONY: dev-setup prepare-docker prepare-web prepare-api
# Default dev setup target
# Dev setup target
dev-setup: prepare-docker prepare-web prepare-api
@echo "✅ Backend development environment setup complete!"
@ -46,6 +49,27 @@ dev-clean:
@rm -rf api/storage
@echo "✅ Cleanup complete"
# Backend Code Quality Commands
format:
@echo "🎨 Running ruff format..."
@uv run --project api --dev ruff format ./api
@echo "✅ Code formatting complete"
check:
@echo "🔍 Running ruff check..."
@uv run --project api --dev ruff check ./api
@echo "✅ Code check complete"
lint:
@echo "🔧 Running ruff format and check with fixes..."
@uv run --directory api --dev sh -c 'ruff format ./api && ruff check --fix ./api'
@echo "✅ Linting complete"
type-check:
@echo "📝 Running type check with basedpyright..."
@uv run --directory api --dev basedpyright
@echo "✅ Type check complete"
# Build Docker images
build-web:
@echo "Building web Docker image: $(WEB_IMAGE):$(VERSION)..."
@ -90,6 +114,12 @@ help:
@echo " make prepare-api - Set up API environment"
@echo " make dev-clean - Stop Docker middleware containers"
@echo ""
@echo "Backend Code Quality:"
@echo " make format - Format code with ruff"
@echo " make check - Check code with ruff"
@echo " make lint - Format and fix code with ruff"
@echo " make type-check - Run type checking with basedpyright"
@echo ""
@echo "Docker Build Targets:"
@echo " make build-web - Build web Docker image"
@echo " make build-api - Build API Docker image"
@ -98,4 +128,4 @@ help:
@echo " make build-push-all - Build and push all Docker images"
# Phony targets
.PHONY: build-web build-api push-web push-api build-all push-all build-push-all dev-setup prepare-docker prepare-web prepare-api dev-clean help
.PHONY: build-web build-api push-web push-api build-all push-all build-push-all dev-setup prepare-docker prepare-web prepare-api dev-clean help format check lint type-check

View File

@ -54,90 +54,90 @@ api.add_resource(AppImportCheckDependenciesApi, "/apps/imports/<string:app_id>/c
# Import other controllers
from . import (
admin, # pyright: ignore[reportUnusedImport]
apikey, # pyright: ignore[reportUnusedImport]
extension, # pyright: ignore[reportUnusedImport]
feature, # pyright: ignore[reportUnusedImport]
init_validate, # pyright: ignore[reportUnusedImport]
ping, # pyright: ignore[reportUnusedImport]
setup, # pyright: ignore[reportUnusedImport]
version, # pyright: ignore[reportUnusedImport]
admin,
apikey,
extension,
feature,
init_validate,
ping,
setup,
version,
)
# Import app controllers
from .app import (
advanced_prompt_template, # pyright: ignore[reportUnusedImport]
agent, # pyright: ignore[reportUnusedImport]
annotation, # pyright: ignore[reportUnusedImport]
app, # pyright: ignore[reportUnusedImport]
audio, # pyright: ignore[reportUnusedImport]
completion, # pyright: ignore[reportUnusedImport]
conversation, # pyright: ignore[reportUnusedImport]
conversation_variables, # pyright: ignore[reportUnusedImport]
generator, # pyright: ignore[reportUnusedImport]
mcp_server, # pyright: ignore[reportUnusedImport]
message, # pyright: ignore[reportUnusedImport]
model_config, # pyright: ignore[reportUnusedImport]
ops_trace, # pyright: ignore[reportUnusedImport]
site, # pyright: ignore[reportUnusedImport]
statistic, # pyright: ignore[reportUnusedImport]
workflow, # pyright: ignore[reportUnusedImport]
workflow_app_log, # pyright: ignore[reportUnusedImport]
workflow_draft_variable, # pyright: ignore[reportUnusedImport]
workflow_run, # pyright: ignore[reportUnusedImport]
workflow_statistic, # pyright: ignore[reportUnusedImport]
advanced_prompt_template,
agent,
annotation,
app,
audio,
completion,
conversation,
conversation_variables,
generator,
mcp_server,
message,
model_config,
ops_trace,
site,
statistic,
workflow,
workflow_app_log,
workflow_draft_variable,
workflow_run,
workflow_statistic,
)
# Import auth controllers
from .auth import (
activate, # pyright: ignore[reportUnusedImport]
data_source_bearer_auth, # pyright: ignore[reportUnusedImport]
data_source_oauth, # pyright: ignore[reportUnusedImport]
email_register, # pyright: ignore[reportUnusedImport]
forgot_password, # pyright: ignore[reportUnusedImport]
login, # pyright: ignore[reportUnusedImport]
oauth, # pyright: ignore[reportUnusedImport]
oauth_server, # pyright: ignore[reportUnusedImport]
activate,
data_source_bearer_auth,
data_source_oauth,
email_register,
forgot_password,
login,
oauth,
oauth_server,
)
# Import billing controllers
from .billing import billing, compliance # pyright: ignore[reportUnusedImport]
from .billing import billing, compliance
# Import datasets controllers
from .datasets import (
data_source, # pyright: ignore[reportUnusedImport]
datasets, # pyright: ignore[reportUnusedImport]
datasets_document, # pyright: ignore[reportUnusedImport]
datasets_segments, # pyright: ignore[reportUnusedImport]
external, # pyright: ignore[reportUnusedImport]
hit_testing, # pyright: ignore[reportUnusedImport]
metadata, # pyright: ignore[reportUnusedImport]
website, # pyright: ignore[reportUnusedImport]
data_source,
datasets,
datasets_document,
datasets_segments,
external,
hit_testing,
metadata,
website,
)
# Import explore controllers
from .explore import (
installed_app, # pyright: ignore[reportUnusedImport]
parameter, # pyright: ignore[reportUnusedImport]
recommended_app, # pyright: ignore[reportUnusedImport]
saved_message, # pyright: ignore[reportUnusedImport]
installed_app,
parameter,
recommended_app,
saved_message,
)
# Import tag controllers
from .tag import tags # pyright: ignore[reportUnusedImport]
from .tag import tags
# Import workspace controllers
from .workspace import (
account, # pyright: ignore[reportUnusedImport]
agent_providers, # pyright: ignore[reportUnusedImport]
endpoint, # pyright: ignore[reportUnusedImport]
load_balancing_config, # pyright: ignore[reportUnusedImport]
members, # pyright: ignore[reportUnusedImport]
model_providers, # pyright: ignore[reportUnusedImport]
models, # pyright: ignore[reportUnusedImport]
plugin, # pyright: ignore[reportUnusedImport]
tool_providers, # pyright: ignore[reportUnusedImport]
workspace, # pyright: ignore[reportUnusedImport]
account,
agent_providers,
endpoint,
load_balancing_config,
members,
model_providers,
models,
plugin,
tool_providers,
workspace,
)
# Explore Audio
@ -212,3 +212,70 @@ api.add_resource(
)
api.add_namespace(console_ns)
__all__ = [
"account",
"activate",
"admin",
"advanced_prompt_template",
"agent",
"agent_providers",
"annotation",
"api",
"apikey",
"app",
"audio",
"billing",
"bp",
"completion",
"compliance",
"console_ns",
"conversation",
"conversation_variables",
"data_source",
"data_source_bearer_auth",
"data_source_oauth",
"datasets",
"datasets_document",
"datasets_segments",
"email_register",
"endpoint",
"extension",
"external",
"feature",
"forgot_password",
"generator",
"hit_testing",
"init_validate",
"installed_app",
"load_balancing_config",
"login",
"mcp_server",
"members",
"message",
"metadata",
"model_config",
"model_providers",
"models",
"oauth",
"oauth_server",
"ops_trace",
"parameter",
"ping",
"plugin",
"recommended_app",
"saved_message",
"setup",
"site",
"statistic",
"tags",
"tool_providers",
"version",
"website",
"workflow",
"workflow_app_log",
"workflow_draft_variable",
"workflow_run",
"workflow_statistic",
"workspace",
]

View File

@ -4,13 +4,13 @@ from collections.abc import Sequence
from typing import cast
from flask import abort, request
from flask_restx import Resource, inputs, marshal_with, reqparse
from flask_restx import Resource, fields, inputs, marshal_with, reqparse
from sqlalchemy.orm import Session
from werkzeug.exceptions import Forbidden, InternalServerError, NotFound
import services
from configs import dify_config
from controllers.console import api
from controllers.console import api, console_ns
from controllers.console.app.error import ConversationCompletedError, DraftWorkflowNotExist, DraftWorkflowNotSync
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import account_initialization_required, setup_required
@ -58,7 +58,13 @@ def _parse_file(workflow: Workflow, files: list[dict] | None = None) -> Sequence
return file_objs
@console_ns.route("/apps/<uuid:app_id>/workflows/draft")
class DraftWorkflowApi(Resource):
@api.doc("get_draft_workflow")
@api.doc(description="Get draft workflow for an application")
@api.doc(params={"app_id": "Application ID"})
@api.response(200, "Draft workflow retrieved successfully", workflow_fields)
@api.response(404, "Draft workflow not found")
@setup_required
@login_required
@account_initialization_required
@ -87,6 +93,23 @@ class DraftWorkflowApi(Resource):
@login_required
@account_initialization_required
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
@api.doc("sync_draft_workflow")
@api.doc(description="Sync draft workflow configuration")
@api.expect(
api.model(
"SyncDraftWorkflowRequest",
{
"graph": fields.Raw(required=True, description="Workflow graph configuration"),
"features": fields.Raw(required=True, description="Workflow features configuration"),
"hash": fields.String(description="Workflow hash for validation"),
"environment_variables": fields.List(fields.Raw, required=True, description="Environment variables"),
"conversation_variables": fields.List(fields.Raw, description="Conversation variables"),
},
)
)
@api.response(200, "Draft workflow synced successfully", workflow_fields)
@api.response(400, "Invalid workflow configuration")
@api.response(403, "Permission denied")
def post(self, app_model: App):
"""
Sync draft workflow
@ -160,7 +183,25 @@ class DraftWorkflowApi(Resource):
}
@console_ns.route("/apps/<uuid:app_id>/advanced-chat/workflows/draft/run")
class AdvancedChatDraftWorkflowRunApi(Resource):
@api.doc("run_advanced_chat_draft_workflow")
@api.doc(description="Run draft workflow for advanced chat application")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.model(
"AdvancedChatWorkflowRunRequest",
{
"query": fields.String(required=True, description="User query"),
"inputs": fields.Raw(description="Input variables"),
"files": fields.List(fields.Raw, description="File uploads"),
"conversation_id": fields.String(description="Conversation ID"),
},
)
)
@api.response(200, "Workflow run started successfully")
@api.response(400, "Invalid request parameters")
@api.response(403, "Permission denied")
@setup_required
@login_required
@account_initialization_required
@ -209,7 +250,23 @@ class AdvancedChatDraftWorkflowRunApi(Resource):
raise InternalServerError()
@console_ns.route("/apps/<uuid:app_id>/advanced-chat/workflows/draft/iteration/nodes/<string:node_id>/run")
class AdvancedChatDraftRunIterationNodeApi(Resource):
@api.doc("run_advanced_chat_draft_iteration_node")
@api.doc(description="Run draft workflow iteration node for advanced chat")
@api.doc(params={"app_id": "Application ID", "node_id": "Node ID"})
@api.expect(
api.model(
"IterationNodeRunRequest",
{
"task_id": fields.String(required=True, description="Task ID"),
"inputs": fields.Raw(description="Input variables"),
},
)
)
@api.response(200, "Iteration node run started successfully")
@api.response(403, "Permission denied")
@api.response(404, "Node not found")
@setup_required
@login_required
@account_initialization_required
@ -245,7 +302,23 @@ class AdvancedChatDraftRunIterationNodeApi(Resource):
raise InternalServerError()
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/iteration/nodes/<string:node_id>/run")
class WorkflowDraftRunIterationNodeApi(Resource):
@api.doc("run_workflow_draft_iteration_node")
@api.doc(description="Run draft workflow iteration node")
@api.doc(params={"app_id": "Application ID", "node_id": "Node ID"})
@api.expect(
api.model(
"WorkflowIterationNodeRunRequest",
{
"task_id": fields.String(required=True, description="Task ID"),
"inputs": fields.Raw(description="Input variables"),
},
)
)
@api.response(200, "Workflow iteration node run started successfully")
@api.response(403, "Permission denied")
@api.response(404, "Node not found")
@setup_required
@login_required
@account_initialization_required
@ -281,7 +354,23 @@ class WorkflowDraftRunIterationNodeApi(Resource):
raise InternalServerError()
@console_ns.route("/apps/<uuid:app_id>/advanced-chat/workflows/draft/loop/nodes/<string:node_id>/run")
class AdvancedChatDraftRunLoopNodeApi(Resource):
@api.doc("run_advanced_chat_draft_loop_node")
@api.doc(description="Run draft workflow loop node for advanced chat")
@api.doc(params={"app_id": "Application ID", "node_id": "Node ID"})
@api.expect(
api.model(
"LoopNodeRunRequest",
{
"task_id": fields.String(required=True, description="Task ID"),
"inputs": fields.Raw(description="Input variables"),
},
)
)
@api.response(200, "Loop node run started successfully")
@api.response(403, "Permission denied")
@api.response(404, "Node not found")
@setup_required
@login_required
@account_initialization_required
@ -318,7 +407,23 @@ class AdvancedChatDraftRunLoopNodeApi(Resource):
raise InternalServerError()
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/loop/nodes/<string:node_id>/run")
class WorkflowDraftRunLoopNodeApi(Resource):
@api.doc("run_workflow_draft_loop_node")
@api.doc(description="Run draft workflow loop node")
@api.doc(params={"app_id": "Application ID", "node_id": "Node ID"})
@api.expect(
api.model(
"WorkflowLoopNodeRunRequest",
{
"task_id": fields.String(required=True, description="Task ID"),
"inputs": fields.Raw(description="Input variables"),
},
)
)
@api.response(200, "Workflow loop node run started successfully")
@api.response(403, "Permission denied")
@api.response(404, "Node not found")
@setup_required
@login_required
@account_initialization_required
@ -355,7 +460,22 @@ class WorkflowDraftRunLoopNodeApi(Resource):
raise InternalServerError()
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/run")
class DraftWorkflowRunApi(Resource):
@api.doc("run_draft_workflow")
@api.doc(description="Run draft workflow")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.model(
"DraftWorkflowRunRequest",
{
"inputs": fields.Raw(required=True, description="Input variables"),
"files": fields.List(fields.Raw, description="File uploads"),
},
)
)
@api.response(200, "Draft workflow run started successfully")
@api.response(403, "Permission denied")
@setup_required
@login_required
@account_initialization_required
@ -394,7 +514,14 @@ class DraftWorkflowRunApi(Resource):
raise InvokeRateLimitHttpError(ex.description)
@console_ns.route("/apps/<uuid:app_id>/workflows/tasks/<string:task_id>/stop")
class WorkflowTaskStopApi(Resource):
@api.doc("stop_workflow_task")
@api.doc(description="Stop running workflow task")
@api.doc(params={"app_id": "Application ID", "task_id": "Task ID"})
@api.response(200, "Task stopped successfully")
@api.response(404, "Task not found")
@api.response(403, "Permission denied")
@setup_required
@login_required
@account_initialization_required
@ -420,7 +547,22 @@ class WorkflowTaskStopApi(Resource):
return {"result": "success"}
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/nodes/<string:node_id>/run")
class DraftWorkflowNodeRunApi(Resource):
@api.doc("run_draft_workflow_node")
@api.doc(description="Run draft workflow node")
@api.doc(params={"app_id": "Application ID", "node_id": "Node ID"})
@api.expect(
api.model(
"DraftWorkflowNodeRunRequest",
{
"inputs": fields.Raw(description="Input variables"),
},
)
)
@api.response(200, "Node run started successfully", workflow_run_node_execution_fields)
@api.response(403, "Permission denied")
@api.response(404, "Node not found")
@setup_required
@login_required
@account_initialization_required
@ -468,7 +610,13 @@ class DraftWorkflowNodeRunApi(Resource):
return workflow_node_execution
@console_ns.route("/apps/<uuid:app_id>/workflows/publish")
class PublishedWorkflowApi(Resource):
@api.doc("get_published_workflow")
@api.doc(description="Get published workflow for an application")
@api.doc(params={"app_id": "Application ID"})
@api.response(200, "Published workflow retrieved successfully", workflow_fields)
@api.response(404, "Published workflow not found")
@setup_required
@login_required
@account_initialization_required
@ -540,7 +688,12 @@ class PublishedWorkflowApi(Resource):
}
@console_ns.route("/apps/<uuid:app_id>/workflows/default-block-configs")
class DefaultBlockConfigsApi(Resource):
@api.doc("get_default_block_configs")
@api.doc(description="Get default block configurations for workflow")
@api.doc(params={"app_id": "Application ID"})
@api.response(200, "Default block configurations retrieved successfully")
@setup_required
@login_required
@account_initialization_required
@ -561,7 +714,13 @@ class DefaultBlockConfigsApi(Resource):
return workflow_service.get_default_block_configs()
@console_ns.route("/apps/<uuid:app_id>/workflows/default-block-configs/<string:block_type>")
class DefaultBlockConfigApi(Resource):
@api.doc("get_default_block_config")
@api.doc(description="Get default block configuration by type")
@api.doc(params={"app_id": "Application ID", "block_type": "Block type"})
@api.response(200, "Default block configuration retrieved successfully")
@api.response(404, "Block type not found")
@setup_required
@login_required
@account_initialization_required
@ -594,7 +753,14 @@ class DefaultBlockConfigApi(Resource):
return workflow_service.get_default_block_config(node_type=block_type, filters=filters)
@console_ns.route("/apps/<uuid:app_id>/convert-to-workflow")
class ConvertToWorkflowApi(Resource):
@api.doc("convert_to_workflow")
@api.doc(description="Convert application to workflow mode")
@api.doc(params={"app_id": "Application ID"})
@api.response(200, "Application converted to workflow successfully")
@api.response(400, "Application cannot be converted")
@api.response(403, "Permission denied")
@setup_required
@login_required
@account_initialization_required
@ -631,9 +797,14 @@ class ConvertToWorkflowApi(Resource):
}
@console_ns.route("/apps/<uuid:app_id>/workflows/config")
class WorkflowConfigApi(Resource):
"""Resource for workflow configuration."""
@api.doc("get_workflow_config")
@api.doc(description="Get workflow configuration")
@api.doc(params={"app_id": "Application ID"})
@api.response(200, "Workflow configuration retrieved successfully")
@setup_required
@login_required
@account_initialization_required
@ -644,7 +815,12 @@ class WorkflowConfigApi(Resource):
}
@console_ns.route("/apps/<uuid:app_id>/workflows/published")
class PublishedAllWorkflowApi(Resource):
@api.doc("get_all_published_workflows")
@api.doc(description="Get all published workflows for an application")
@api.doc(params={"app_id": "Application ID"})
@api.response(200, "Published workflows retrieved successfully", workflow_pagination_fields)
@setup_required
@login_required
@account_initialization_required
@ -695,7 +871,23 @@ class PublishedAllWorkflowApi(Resource):
}
@console_ns.route("/apps/<uuid:app_id>/workflows/<uuid:workflow_id>")
class WorkflowByIdApi(Resource):
@api.doc("update_workflow_by_id")
@api.doc(description="Update workflow by ID")
@api.doc(params={"app_id": "Application ID", "workflow_id": "Workflow ID"})
@api.expect(
api.model(
"UpdateWorkflowRequest",
{
"environment_variables": fields.List(fields.Raw, description="Environment variables"),
"conversation_variables": fields.List(fields.Raw, description="Conversation variables"),
},
)
)
@api.response(200, "Workflow updated successfully", workflow_fields)
@api.response(404, "Workflow not found")
@api.response(403, "Permission denied")
@setup_required
@login_required
@account_initialization_required
@ -786,7 +978,14 @@ class WorkflowByIdApi(Resource):
return None, 204
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/nodes/<string:node_id>/last-run")
class DraftWorkflowNodeLastRunApi(Resource):
@api.doc("get_draft_workflow_node_last_run")
@api.doc(description="Get last run result for draft workflow node")
@api.doc(params={"app_id": "Application ID", "node_id": "Node ID"})
@api.response(200, "Node last run retrieved successfully", workflow_run_node_execution_fields)
@api.response(404, "Node last run not found")
@api.response(403, "Permission denied")
@setup_required
@login_required
@account_initialization_required
@ -805,73 +1004,3 @@ class DraftWorkflowNodeLastRunApi(Resource):
if node_exec is None:
raise NotFound("last run not found")
return node_exec
api.add_resource(
DraftWorkflowApi,
"/apps/<uuid:app_id>/workflows/draft",
)
api.add_resource(
WorkflowConfigApi,
"/apps/<uuid:app_id>/workflows/draft/config",
)
api.add_resource(
AdvancedChatDraftWorkflowRunApi,
"/apps/<uuid:app_id>/advanced-chat/workflows/draft/run",
)
api.add_resource(
DraftWorkflowRunApi,
"/apps/<uuid:app_id>/workflows/draft/run",
)
api.add_resource(
WorkflowTaskStopApi,
"/apps/<uuid:app_id>/workflow-runs/tasks/<string:task_id>/stop",
)
api.add_resource(
DraftWorkflowNodeRunApi,
"/apps/<uuid:app_id>/workflows/draft/nodes/<string:node_id>/run",
)
api.add_resource(
AdvancedChatDraftRunIterationNodeApi,
"/apps/<uuid:app_id>/advanced-chat/workflows/draft/iteration/nodes/<string:node_id>/run",
)
api.add_resource(
WorkflowDraftRunIterationNodeApi,
"/apps/<uuid:app_id>/workflows/draft/iteration/nodes/<string:node_id>/run",
)
api.add_resource(
AdvancedChatDraftRunLoopNodeApi,
"/apps/<uuid:app_id>/advanced-chat/workflows/draft/loop/nodes/<string:node_id>/run",
)
api.add_resource(
WorkflowDraftRunLoopNodeApi,
"/apps/<uuid:app_id>/workflows/draft/loop/nodes/<string:node_id>/run",
)
api.add_resource(
PublishedWorkflowApi,
"/apps/<uuid:app_id>/workflows/publish",
)
api.add_resource(
PublishedAllWorkflowApi,
"/apps/<uuid:app_id>/workflows",
)
api.add_resource(
DefaultBlockConfigsApi,
"/apps/<uuid:app_id>/workflows/default-workflow-block-configs",
)
api.add_resource(
DefaultBlockConfigApi,
"/apps/<uuid:app_id>/workflows/default-workflow-block-configs/<string:block_type>",
)
api.add_resource(
ConvertToWorkflowApi,
"/apps/<uuid:app_id>/convert-to-workflow",
)
api.add_resource(
WorkflowByIdApi,
"/apps/<uuid:app_id>/workflows/<string:workflow_id>",
)
api.add_resource(
DraftWorkflowNodeLastRunApi,
"/apps/<uuid:app_id>/workflows/draft/nodes/<string:node_id>/last-run",
)

View File

@ -3,7 +3,7 @@ from flask_restx import Resource, marshal_with, reqparse
from flask_restx.inputs import int_range
from sqlalchemy.orm import Session
from controllers.console import api
from controllers.console import api, console_ns
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import account_initialization_required, setup_required
from core.workflow.enums import WorkflowExecutionStatus
@ -15,7 +15,24 @@ from models.model import AppMode
from services.workflow_app_service import WorkflowAppService
@console_ns.route("/apps/<uuid:app_id>/workflow-app-logs")
class WorkflowAppLogApi(Resource):
@api.doc("get_workflow_app_logs")
@api.doc(description="Get workflow application execution logs")
@api.doc(params={"app_id": "Application ID"})
@api.doc(
params={
"keyword": "Search keyword for filtering logs",
"status": "Filter by execution status (succeeded, failed, stopped, partial-succeeded)",
"created_at__before": "Filter logs created before this timestamp",
"created_at__after": "Filter logs created after this timestamp",
"created_by_end_user_session_id": "Filter by end user session ID",
"created_by_account": "Filter by account",
"page": "Page number (1-99999)",
"limit": "Number of items per page (1-100)",
}
)
@api.response(200, "Workflow app logs retrieved successfully", workflow_app_log_pagination_fields)
@setup_required
@login_required
@account_initialization_required
@ -78,6 +95,3 @@ class WorkflowAppLogApi(Resource):
)
return workflow_app_log_pagination
api.add_resource(WorkflowAppLogApi, "/apps/<uuid:app_id>/workflow-app-logs")

View File

@ -6,7 +6,7 @@ from flask_restx import Resource, fields, inputs, marshal, marshal_with, reqpars
from sqlalchemy.orm import Session
from werkzeug.exceptions import Forbidden
from controllers.console import api
from controllers.console import api, console_ns
from controllers.console.app.error import (
DraftWorkflowNotExist,
)
@ -145,7 +145,13 @@ def _api_prerequisite(f):
return wrapper
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/variables")
class WorkflowVariableCollectionApi(Resource):
@api.doc("get_workflow_variables")
@api.doc(description="Get draft workflow variables")
@api.doc(params={"app_id": "Application ID"})
@api.doc(params={"page": "Page number (1-100000)", "limit": "Number of items per page (1-100)"})
@api.response(200, "Workflow variables retrieved successfully", _WORKFLOW_DRAFT_VARIABLE_LIST_WITHOUT_VALUE_FIELDS)
@_api_prerequisite
@marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_WITHOUT_VALUE_FIELDS)
def get(self, app_model: App):
@ -174,6 +180,9 @@ class WorkflowVariableCollectionApi(Resource):
return workflow_vars
@api.doc("delete_workflow_variables")
@api.doc(description="Delete all draft workflow variables")
@api.response(204, "Workflow variables deleted successfully")
@_api_prerequisite
def delete(self, app_model: App):
draft_var_srv = WorkflowDraftVariableService(
@ -202,7 +211,12 @@ def validate_node_id(node_id: str) -> NoReturn | None:
return None
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/nodes/<string:node_id>/variables")
class NodeVariableCollectionApi(Resource):
@api.doc("get_node_variables")
@api.doc(description="Get variables for a specific node")
@api.doc(params={"app_id": "Application ID", "node_id": "Node ID"})
@api.response(200, "Node variables retrieved successfully", _WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS)
@_api_prerequisite
@marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS)
def get(self, app_model: App, node_id: str):
@ -215,6 +229,9 @@ class NodeVariableCollectionApi(Resource):
return node_vars
@api.doc("delete_node_variables")
@api.doc(description="Delete all variables for a specific node")
@api.response(204, "Node variables deleted successfully")
@_api_prerequisite
def delete(self, app_model: App, node_id: str):
validate_node_id(node_id)
@ -224,10 +241,16 @@ class NodeVariableCollectionApi(Resource):
return Response("", 204)
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/variables/<uuid:variable_id>")
class VariableApi(Resource):
_PATCH_NAME_FIELD = "name"
_PATCH_VALUE_FIELD = "value"
@api.doc("get_variable")
@api.doc(description="Get a specific workflow variable")
@api.doc(params={"app_id": "Application ID", "variable_id": "Variable ID"})
@api.response(200, "Variable retrieved successfully", _WORKFLOW_DRAFT_VARIABLE_FIELDS)
@api.response(404, "Variable not found")
@_api_prerequisite
@marshal_with(_WORKFLOW_DRAFT_VARIABLE_FIELDS)
def get(self, app_model: App, variable_id: str):
@ -241,6 +264,19 @@ class VariableApi(Resource):
raise NotFoundError(description=f"variable not found, id={variable_id}")
return variable
@api.doc("update_variable")
@api.doc(description="Update a workflow variable")
@api.expect(
api.model(
"UpdateVariableRequest",
{
"name": fields.String(description="Variable name"),
"value": fields.Raw(description="Variable value"),
},
)
)
@api.response(200, "Variable updated successfully", _WORKFLOW_DRAFT_VARIABLE_FIELDS)
@api.response(404, "Variable not found")
@_api_prerequisite
@marshal_with(_WORKFLOW_DRAFT_VARIABLE_FIELDS)
def patch(self, app_model: App, variable_id: str):
@ -303,6 +339,10 @@ class VariableApi(Resource):
db.session.commit()
return variable
@api.doc("delete_variable")
@api.doc(description="Delete a workflow variable")
@api.response(204, "Variable deleted successfully")
@api.response(404, "Variable not found")
@_api_prerequisite
def delete(self, app_model: App, variable_id: str):
draft_var_srv = WorkflowDraftVariableService(
@ -318,7 +358,14 @@ class VariableApi(Resource):
return Response("", 204)
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/variables/<uuid:variable_id>/reset")
class VariableResetApi(Resource):
@api.doc("reset_variable")
@api.doc(description="Reset a workflow variable to its default value")
@api.doc(params={"app_id": "Application ID", "variable_id": "Variable ID"})
@api.response(200, "Variable reset successfully", _WORKFLOW_DRAFT_VARIABLE_FIELDS)
@api.response(204, "Variable reset (no content)")
@api.response(404, "Variable not found")
@_api_prerequisite
def put(self, app_model: App, variable_id: str):
draft_var_srv = WorkflowDraftVariableService(
@ -359,7 +406,13 @@ def _get_variable_list(app_model: App, node_id) -> WorkflowDraftVariableList:
return draft_vars
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/conversation-variables")
class ConversationVariableCollectionApi(Resource):
@api.doc("get_conversation_variables")
@api.doc(description="Get conversation variables for workflow")
@api.doc(params={"app_id": "Application ID"})
@api.response(200, "Conversation variables retrieved successfully", _WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS)
@api.response(404, "Draft workflow not found")
@_api_prerequisite
@marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS)
def get(self, app_model: App):
@ -375,14 +428,25 @@ class ConversationVariableCollectionApi(Resource):
return _get_variable_list(app_model, CONVERSATION_VARIABLE_NODE_ID)
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/system-variables")
class SystemVariableCollectionApi(Resource):
@api.doc("get_system_variables")
@api.doc(description="Get system variables for workflow")
@api.doc(params={"app_id": "Application ID"})
@api.response(200, "System variables retrieved successfully", _WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS)
@_api_prerequisite
@marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS)
def get(self, app_model: App):
return _get_variable_list(app_model, SYSTEM_VARIABLE_NODE_ID)
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/environment-variables")
class EnvironmentVariableCollectionApi(Resource):
@api.doc("get_environment_variables")
@api.doc(description="Get environment variables for workflow")
@api.doc(params={"app_id": "Application ID"})
@api.response(200, "Environment variables retrieved successfully")
@api.response(404, "Draft workflow not found")
@_api_prerequisite
def get(self, app_model: App):
"""
@ -414,16 +478,3 @@ class EnvironmentVariableCollectionApi(Resource):
)
return {"items": env_vars_list}
api.add_resource(
WorkflowVariableCollectionApi,
"/apps/<uuid:app_id>/workflows/draft/variables",
)
api.add_resource(NodeVariableCollectionApi, "/apps/<uuid:app_id>/workflows/draft/nodes/<string:node_id>/variables")
api.add_resource(VariableApi, "/apps/<uuid:app_id>/workflows/draft/variables/<uuid:variable_id>")
api.add_resource(VariableResetApi, "/apps/<uuid:app_id>/workflows/draft/variables/<uuid:variable_id>/reset")
api.add_resource(ConversationVariableCollectionApi, "/apps/<uuid:app_id>/workflows/draft/conversation-variables")
api.add_resource(SystemVariableCollectionApi, "/apps/<uuid:app_id>/workflows/draft/system-variables")
api.add_resource(EnvironmentVariableCollectionApi, "/apps/<uuid:app_id>/workflows/draft/environment-variables")

View File

@ -4,7 +4,7 @@ from flask_login import current_user
from flask_restx import Resource, marshal_with, reqparse
from flask_restx.inputs import int_range
from controllers.console import api
from controllers.console import api, console_ns
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import account_initialization_required, setup_required
from fields.workflow_run_fields import (
@ -19,7 +19,13 @@ from models import Account, App, AppMode, EndUser
from services.workflow_run_service import WorkflowRunService
@console_ns.route("/apps/<uuid:app_id>/advanced-chat/workflow-runs")
class AdvancedChatAppWorkflowRunListApi(Resource):
@api.doc("get_advanced_chat_workflow_runs")
@api.doc(description="Get advanced chat workflow run list")
@api.doc(params={"app_id": "Application ID"})
@api.doc(params={"last_id": "Last run ID for pagination", "limit": "Number of items per page (1-100)"})
@api.response(200, "Workflow runs retrieved successfully", advanced_chat_workflow_run_pagination_fields)
@setup_required
@login_required
@account_initialization_required
@ -40,7 +46,13 @@ class AdvancedChatAppWorkflowRunListApi(Resource):
return result
@console_ns.route("/apps/<uuid:app_id>/workflow-runs")
class WorkflowRunListApi(Resource):
@api.doc("get_workflow_runs")
@api.doc(description="Get workflow run list")
@api.doc(params={"app_id": "Application ID"})
@api.doc(params={"last_id": "Last run ID for pagination", "limit": "Number of items per page (1-100)"})
@api.response(200, "Workflow runs retrieved successfully", workflow_run_pagination_fields)
@setup_required
@login_required
@account_initialization_required
@ -61,7 +73,13 @@ class WorkflowRunListApi(Resource):
return result
@console_ns.route("/apps/<uuid:app_id>/workflow-runs/<uuid:run_id>")
class WorkflowRunDetailApi(Resource):
@api.doc("get_workflow_run_detail")
@api.doc(description="Get workflow run detail")
@api.doc(params={"app_id": "Application ID", "run_id": "Workflow run ID"})
@api.response(200, "Workflow run detail retrieved successfully", workflow_run_detail_fields)
@api.response(404, "Workflow run not found")
@setup_required
@login_required
@account_initialization_required
@ -79,7 +97,13 @@ class WorkflowRunDetailApi(Resource):
return workflow_run
@console_ns.route("/apps/<uuid:app_id>/workflow-runs/<uuid:run_id>/node-executions")
class WorkflowRunNodeExecutionListApi(Resource):
@api.doc("get_workflow_run_node_executions")
@api.doc(description="Get workflow run node execution list")
@api.doc(params={"app_id": "Application ID", "run_id": "Workflow run ID"})
@api.response(200, "Node executions retrieved successfully", workflow_run_node_execution_list_fields)
@api.response(404, "Workflow run not found")
@setup_required
@login_required
@account_initialization_required
@ -100,9 +124,3 @@ class WorkflowRunNodeExecutionListApi(Resource):
)
return {"data": node_executions}
api.add_resource(AdvancedChatAppWorkflowRunListApi, "/apps/<uuid:app_id>/advanced-chat/workflow-runs")
api.add_resource(WorkflowRunListApi, "/apps/<uuid:app_id>/workflow-runs")
api.add_resource(WorkflowRunDetailApi, "/apps/<uuid:app_id>/workflow-runs/<uuid:run_id>")
api.add_resource(WorkflowRunNodeExecutionListApi, "/apps/<uuid:app_id>/workflow-runs/<uuid:run_id>/node-executions")

View File

@ -7,7 +7,7 @@ from flask import jsonify
from flask_login import current_user
from flask_restx import Resource, reqparse
from controllers.console import api
from controllers.console import api, console_ns
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import account_initialization_required, setup_required
from extensions.ext_database import db
@ -17,7 +17,13 @@ from models.enums import WorkflowRunTriggeredFrom
from models.model import AppMode
@console_ns.route("/apps/<uuid:app_id>/workflow/statistics/daily-conversations")
class WorkflowDailyRunsStatistic(Resource):
@api.doc("get_workflow_daily_runs_statistic")
@api.doc(description="Get workflow daily runs statistics")
@api.doc(params={"app_id": "Application ID"})
@api.doc(params={"start": "Start date and time (YYYY-MM-DD HH:MM)", "end": "End date and time (YYYY-MM-DD HH:MM)"})
@api.response(200, "Daily runs statistics retrieved successfully")
@get_app_model
@setup_required
@login_required
@ -79,7 +85,13 @@ WHERE
return jsonify({"data": response_data})
@console_ns.route("/apps/<uuid:app_id>/workflow/statistics/daily-terminals")
class WorkflowDailyTerminalsStatistic(Resource):
@api.doc("get_workflow_daily_terminals_statistic")
@api.doc(description="Get workflow daily terminals statistics")
@api.doc(params={"app_id": "Application ID"})
@api.doc(params={"start": "Start date and time (YYYY-MM-DD HH:MM)", "end": "End date and time (YYYY-MM-DD HH:MM)"})
@api.response(200, "Daily terminals statistics retrieved successfully")
@get_app_model
@setup_required
@login_required
@ -141,7 +153,13 @@ WHERE
return jsonify({"data": response_data})
@console_ns.route("/apps/<uuid:app_id>/workflow/statistics/token-costs")
class WorkflowDailyTokenCostStatistic(Resource):
@api.doc("get_workflow_daily_token_cost_statistic")
@api.doc(description="Get workflow daily token cost statistics")
@api.doc(params={"app_id": "Application ID"})
@api.doc(params={"start": "Start date and time (YYYY-MM-DD HH:MM)", "end": "End date and time (YYYY-MM-DD HH:MM)"})
@api.response(200, "Daily token cost statistics retrieved successfully")
@get_app_model
@setup_required
@login_required
@ -208,7 +226,13 @@ WHERE
return jsonify({"data": response_data})
@console_ns.route("/apps/<uuid:app_id>/workflow/statistics/average-app-interactions")
class WorkflowAverageAppInteractionStatistic(Resource):
@api.doc("get_workflow_average_app_interaction_statistic")
@api.doc(description="Get workflow average app interaction statistics")
@api.doc(params={"app_id": "Application ID"})
@api.doc(params={"start": "Start date and time (YYYY-MM-DD HH:MM)", "end": "End date and time (YYYY-MM-DD HH:MM)"})
@api.response(200, "Average app interaction statistics retrieved successfully")
@setup_required
@login_required
@account_initialization_required
@ -285,11 +309,3 @@ GROUP BY
)
return jsonify({"data": response_data})
api.add_resource(WorkflowDailyRunsStatistic, "/apps/<uuid:app_id>/workflow/statistics/daily-conversations")
api.add_resource(WorkflowDailyTerminalsStatistic, "/apps/<uuid:app_id>/workflow/statistics/daily-terminals")
api.add_resource(WorkflowDailyTokenCostStatistic, "/apps/<uuid:app_id>/workflow/statistics/token-costs")
api.add_resource(
WorkflowAverageAppInteractionStatistic, "/apps/<uuid:app_id>/workflow/statistics/average-app-interactions"
)

View File

@ -1,13 +1,13 @@
import flask_restx
from flask import request
from flask_login import current_user
from flask_restx import Resource, marshal, marshal_with, reqparse
from flask_restx import Resource, fields, marshal, marshal_with, reqparse
from sqlalchemy import select
from werkzeug.exceptions import Forbidden, NotFound
import services
from configs import dify_config
from controllers.console import api
from controllers.console import api, console_ns
from controllers.console.apikey import api_key_fields, api_key_list
from controllers.console.app.error import ProviderNotInitializeError
from controllers.console.datasets.error import DatasetInUseError, DatasetNameDuplicateError, IndexingEstimateError
@ -48,7 +48,21 @@ def _validate_description_length(description):
return description
@console_ns.route("/datasets")
class DatasetListApi(Resource):
@api.doc("get_datasets")
@api.doc(description="Get list of datasets")
@api.doc(
params={
"page": "Page number (default: 1)",
"limit": "Number of items per page (default: 20)",
"ids": "Filter by dataset IDs (list)",
"keyword": "Search keyword",
"tag_ids": "Filter by tag IDs (list)",
"include_all": "Include all datasets (default: false)",
}
)
@api.response(200, "Datasets retrieved successfully")
@setup_required
@login_required
@account_initialization_required
@ -100,6 +114,24 @@ class DatasetListApi(Resource):
response = {"data": data, "has_more": len(datasets) == limit, "limit": limit, "total": total, "page": page}
return response, 200
@api.doc("create_dataset")
@api.doc(description="Create a new dataset")
@api.expect(
api.model(
"CreateDatasetRequest",
{
"name": fields.String(required=True, description="Dataset name (1-40 characters)"),
"description": fields.String(description="Dataset description (max 400 characters)"),
"indexing_technique": fields.String(description="Indexing technique"),
"permission": fields.String(description="Dataset permission"),
"provider": fields.String(description="Provider"),
"external_knowledge_api_id": fields.String(description="External knowledge API ID"),
"external_knowledge_id": fields.String(description="External knowledge ID"),
},
)
)
@api.response(201, "Dataset created successfully")
@api.response(400, "Invalid request parameters")
@setup_required
@login_required
@account_initialization_required
@ -172,7 +204,14 @@ class DatasetListApi(Resource):
return marshal(dataset, dataset_detail_fields), 201
@console_ns.route("/datasets/<uuid:dataset_id>")
class DatasetApi(Resource):
@api.doc("get_dataset")
@api.doc(description="Get dataset details")
@api.doc(params={"dataset_id": "Dataset ID"})
@api.response(200, "Dataset retrieved successfully", dataset_detail_fields)
@api.response(404, "Dataset not found")
@api.response(403, "Permission denied")
@setup_required
@login_required
@account_initialization_required
@ -215,6 +254,23 @@ class DatasetApi(Resource):
return data, 200
@api.doc("update_dataset")
@api.doc(description="Update dataset details")
@api.expect(
api.model(
"UpdateDatasetRequest",
{
"name": fields.String(description="Dataset name"),
"description": fields.String(description="Dataset description"),
"permission": fields.String(description="Dataset permission"),
"indexing_technique": fields.String(description="Indexing technique"),
"external_retrieval_model": fields.Raw(description="External retrieval model settings"),
},
)
)
@api.response(200, "Dataset updated successfully", dataset_detail_fields)
@api.response(404, "Dataset not found")
@api.response(403, "Permission denied")
@setup_required
@login_required
@account_initialization_required
@ -344,7 +400,12 @@ class DatasetApi(Resource):
raise DatasetInUseError()
@console_ns.route("/datasets/<uuid:dataset_id>/use-check")
class DatasetUseCheckApi(Resource):
@api.doc("check_dataset_use")
@api.doc(description="Check if dataset is in use")
@api.doc(params={"dataset_id": "Dataset ID"})
@api.response(200, "Dataset use status retrieved successfully")
@setup_required
@login_required
@account_initialization_required
@ -355,7 +416,12 @@ class DatasetUseCheckApi(Resource):
return {"is_using": dataset_is_using}, 200
@console_ns.route("/datasets/<uuid:dataset_id>/queries")
class DatasetQueryApi(Resource):
@api.doc("get_dataset_queries")
@api.doc(description="Get dataset query history")
@api.doc(params={"dataset_id": "Dataset ID"})
@api.response(200, "Query history retrieved successfully", dataset_query_detail_fields)
@setup_required
@login_required
@account_initialization_required
@ -385,7 +451,11 @@ class DatasetQueryApi(Resource):
return response, 200
@console_ns.route("/datasets/indexing-estimate")
class DatasetIndexingEstimateApi(Resource):
@api.doc("estimate_dataset_indexing")
@api.doc(description="Estimate dataset indexing cost")
@api.response(200, "Indexing estimate calculated successfully")
@setup_required
@login_required
@account_initialization_required
@ -486,7 +556,12 @@ class DatasetIndexingEstimateApi(Resource):
return response.model_dump(), 200
@console_ns.route("/datasets/<uuid:dataset_id>/related-apps")
class DatasetRelatedAppListApi(Resource):
@api.doc("get_dataset_related_apps")
@api.doc(description="Get applications related to dataset")
@api.doc(params={"dataset_id": "Dataset ID"})
@api.response(200, "Related apps retrieved successfully", related_app_list)
@setup_required
@login_required
@account_initialization_required
@ -513,7 +588,12 @@ class DatasetRelatedAppListApi(Resource):
return {"data": related_apps, "total": len(related_apps)}, 200
@console_ns.route("/datasets/<uuid:dataset_id>/indexing-status")
class DatasetIndexingStatusApi(Resource):
@api.doc("get_dataset_indexing_status")
@api.doc(description="Get dataset indexing status")
@api.doc(params={"dataset_id": "Dataset ID"})
@api.response(200, "Indexing status retrieved successfully")
@setup_required
@login_required
@account_initialization_required
@ -560,11 +640,15 @@ class DatasetIndexingStatusApi(Resource):
return data, 200
@console_ns.route("/datasets/api-keys")
class DatasetApiKeyApi(Resource):
max_keys = 10
token_prefix = "dataset-"
resource_type = "dataset"
@api.doc("get_dataset_api_keys")
@api.doc(description="Get dataset API keys")
@api.response(200, "API keys retrieved successfully", api_key_list)
@setup_required
@login_required
@account_initialization_required
@ -609,9 +693,14 @@ class DatasetApiKeyApi(Resource):
return api_token, 200
@console_ns.route("/datasets/api-keys/<uuid:api_key_id>")
class DatasetApiDeleteApi(Resource):
resource_type = "dataset"
@api.doc("delete_dataset_api_key")
@api.doc(description="Delete dataset API key")
@api.doc(params={"api_key_id": "API key ID"})
@api.response(204, "API key deleted successfully")
@setup_required
@login_required
@account_initialization_required
@ -641,7 +730,11 @@ class DatasetApiDeleteApi(Resource):
return {"result": "success"}, 204
@console_ns.route("/datasets/api-base-info")
class DatasetApiBaseUrlApi(Resource):
@api.doc("get_dataset_api_base_info")
@api.doc(description="Get dataset API base information")
@api.response(200, "API base info retrieved successfully")
@setup_required
@login_required
@account_initialization_required
@ -649,7 +742,11 @@ class DatasetApiBaseUrlApi(Resource):
return {"api_base_url": (dify_config.SERVICE_API_URL or request.host_url.rstrip("/")) + "/v1"}
@console_ns.route("/datasets/retrieval-setting")
class DatasetRetrievalSettingApi(Resource):
@api.doc("get_dataset_retrieval_setting")
@api.doc(description="Get dataset retrieval settings")
@api.response(200, "Retrieval settings retrieved successfully")
@setup_required
@login_required
@account_initialization_required
@ -700,7 +797,12 @@ class DatasetRetrievalSettingApi(Resource):
raise ValueError(f"Unsupported vector db type {vector_type}.")
@console_ns.route("/datasets/retrieval-setting/<string:vector_type>")
class DatasetRetrievalSettingMockApi(Resource):
@api.doc("get_dataset_retrieval_setting_mock")
@api.doc(description="Get mock dataset retrieval settings by vector type")
@api.doc(params={"vector_type": "Vector store type"})
@api.response(200, "Mock retrieval settings retrieved successfully")
@setup_required
@login_required
@account_initialization_required
@ -749,7 +851,13 @@ class DatasetRetrievalSettingMockApi(Resource):
raise ValueError(f"Unsupported vector db type {vector_type}.")
@console_ns.route("/datasets/<uuid:dataset_id>/error-docs")
class DatasetErrorDocs(Resource):
@api.doc("get_dataset_error_docs")
@api.doc(description="Get dataset error documents")
@api.doc(params={"dataset_id": "Dataset ID"})
@api.response(200, "Error documents retrieved successfully")
@api.response(404, "Dataset not found")
@setup_required
@login_required
@account_initialization_required
@ -763,7 +871,14 @@ class DatasetErrorDocs(Resource):
return {"data": [marshal(item, document_status_fields) for item in results], "total": len(results)}, 200
@console_ns.route("/datasets/<uuid:dataset_id>/permission-part-users")
class DatasetPermissionUserListApi(Resource):
@api.doc("get_dataset_permission_users")
@api.doc(description="Get dataset permission user list")
@api.doc(params={"dataset_id": "Dataset ID"})
@api.response(200, "Permission users retrieved successfully")
@api.response(404, "Dataset not found")
@api.response(403, "Permission denied")
@setup_required
@login_required
@account_initialization_required
@ -784,7 +899,13 @@ class DatasetPermissionUserListApi(Resource):
}, 200
@console_ns.route("/datasets/<uuid:dataset_id>/auto-disable-logs")
class DatasetAutoDisableLogApi(Resource):
@api.doc("get_dataset_auto_disable_logs")
@api.doc(description="Get dataset auto disable logs")
@api.doc(params={"dataset_id": "Dataset ID"})
@api.response(200, "Auto disable logs retrieved successfully")
@api.response(404, "Dataset not found")
@setup_required
@login_required
@account_initialization_required
@ -794,20 +915,3 @@ class DatasetAutoDisableLogApi(Resource):
if dataset is None:
raise NotFound("Dataset not found.")
return DatasetService.get_dataset_auto_disable_logs(dataset_id_str), 200
api.add_resource(DatasetListApi, "/datasets")
api.add_resource(DatasetApi, "/datasets/<uuid:dataset_id>")
api.add_resource(DatasetUseCheckApi, "/datasets/<uuid:dataset_id>/use-check")
api.add_resource(DatasetQueryApi, "/datasets/<uuid:dataset_id>/queries")
api.add_resource(DatasetErrorDocs, "/datasets/<uuid:dataset_id>/error-docs")
api.add_resource(DatasetIndexingEstimateApi, "/datasets/indexing-estimate")
api.add_resource(DatasetRelatedAppListApi, "/datasets/<uuid:dataset_id>/related-apps")
api.add_resource(DatasetIndexingStatusApi, "/datasets/<uuid:dataset_id>/indexing-status")
api.add_resource(DatasetApiKeyApi, "/datasets/api-keys")
api.add_resource(DatasetApiDeleteApi, "/datasets/api-keys/<uuid:api_key_id>")
api.add_resource(DatasetApiBaseUrlApi, "/datasets/api-base-info")
api.add_resource(DatasetRetrievalSettingApi, "/datasets/retrieval-setting")
api.add_resource(DatasetRetrievalSettingMockApi, "/datasets/retrieval-setting/<string:vector_type>")
api.add_resource(DatasetPermissionUserListApi, "/datasets/<uuid:dataset_id>/permission-part-users")
api.add_resource(DatasetAutoDisableLogApi, "/datasets/<uuid:dataset_id>/auto-disable-logs")

View File

@ -5,12 +5,12 @@ from typing import Literal, cast
from flask import request
from flask_login import current_user
from flask_restx import Resource, marshal, marshal_with, reqparse
from flask_restx import Resource, fields, marshal, marshal_with, reqparse
from sqlalchemy import asc, desc, select
from werkzeug.exceptions import Forbidden, NotFound
import services
from controllers.console import api
from controllers.console import api, console_ns
from controllers.console.app.error import (
ProviderModelCurrentlyNotSupportError,
ProviderNotInitializeError,
@ -98,7 +98,12 @@ class DocumentResource(Resource):
return documents
@console_ns.route("/datasets/process-rule")
class GetProcessRuleApi(Resource):
@api.doc("get_process_rule")
@api.doc(description="Get dataset document processing rules")
@api.doc(params={"document_id": "Document ID (optional)"})
@api.response(200, "Process rules retrieved successfully")
@setup_required
@login_required
@account_initialization_required
@ -140,7 +145,21 @@ class GetProcessRuleApi(Resource):
return {"mode": mode, "rules": rules, "limits": limits}
@console_ns.route("/datasets/<uuid:dataset_id>/documents")
class DatasetDocumentListApi(Resource):
@api.doc("get_dataset_documents")
@api.doc(description="Get documents in a dataset")
@api.doc(
params={
"dataset_id": "Dataset ID",
"page": "Page number (default: 1)",
"limit": "Number of items per page (default: 20)",
"keyword": "Search keyword",
"sort": "Sort order (default: -created_at)",
"fetch": "Fetch full details (default: false)",
}
)
@api.response(200, "Documents retrieved successfully")
@setup_required
@login_required
@account_initialization_required
@ -324,7 +343,23 @@ class DatasetDocumentListApi(Resource):
return {"result": "success"}, 204
@console_ns.route("/datasets/init")
class DatasetInitApi(Resource):
@api.doc("init_dataset")
@api.doc(description="Initialize dataset with documents")
@api.expect(
api.model(
"DatasetInitRequest",
{
"upload_file_id": fields.String(required=True, description="Upload file ID"),
"indexing_technique": fields.String(description="Indexing technique"),
"process_rule": fields.Raw(description="Processing rules"),
"data_source": fields.Raw(description="Data source configuration"),
},
)
)
@api.response(201, "Dataset initialized successfully", dataset_and_document_fields)
@api.response(400, "Invalid request parameters")
@setup_required
@login_required
@account_initialization_required
@ -394,7 +429,14 @@ class DatasetInitApi(Resource):
return response
@console_ns.route("/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/indexing-estimate")
class DocumentIndexingEstimateApi(DocumentResource):
@api.doc("estimate_document_indexing")
@api.doc(description="Estimate document indexing cost")
@api.doc(params={"dataset_id": "Dataset ID", "document_id": "Document ID"})
@api.response(200, "Indexing estimate calculated successfully")
@api.response(404, "Document not found")
@api.response(400, "Document already finished")
@setup_required
@login_required
@account_initialization_required
@ -593,7 +635,13 @@ class DocumentBatchIndexingStatusApi(DocumentResource):
return data
@console_ns.route("/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/indexing-status")
class DocumentIndexingStatusApi(DocumentResource):
@api.doc("get_document_indexing_status")
@api.doc(description="Get document indexing status")
@api.doc(params={"dataset_id": "Dataset ID", "document_id": "Document ID"})
@api.response(200, "Indexing status retrieved successfully")
@api.response(404, "Document not found")
@setup_required
@login_required
@account_initialization_required
@ -635,9 +683,21 @@ class DocumentIndexingStatusApi(DocumentResource):
return marshal(document_dict, document_status_fields)
@console_ns.route("/datasets/<uuid:dataset_id>/documents/<uuid:document_id>")
class DocumentApi(DocumentResource):
METADATA_CHOICES = {"all", "only", "without"}
@api.doc("get_document")
@api.doc(description="Get document details")
@api.doc(
params={
"dataset_id": "Dataset ID",
"document_id": "Document ID",
"metadata": "Metadata inclusion (all/only/without)",
}
)
@api.response(200, "Document retrieved successfully")
@api.response(404, "Document not found")
@setup_required
@login_required
@account_initialization_required
@ -746,7 +806,16 @@ class DocumentApi(DocumentResource):
return {"result": "success"}, 204
@console_ns.route("/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/processing/<string:action>")
class DocumentProcessingApi(DocumentResource):
@api.doc("update_document_processing")
@api.doc(description="Update document processing status (pause/resume)")
@api.doc(
params={"dataset_id": "Dataset ID", "document_id": "Document ID", "action": "Action to perform (pause/resume)"}
)
@api.response(200, "Processing status updated successfully")
@api.response(404, "Document not found")
@api.response(400, "Invalid action")
@setup_required
@login_required
@account_initialization_required
@ -781,7 +850,23 @@ class DocumentProcessingApi(DocumentResource):
return {"result": "success"}, 200
@console_ns.route("/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/metadata")
class DocumentMetadataApi(DocumentResource):
@api.doc("update_document_metadata")
@api.doc(description="Update document metadata")
@api.doc(params={"dataset_id": "Dataset ID", "document_id": "Document ID"})
@api.expect(
api.model(
"UpdateDocumentMetadataRequest",
{
"doc_type": fields.String(description="Document type"),
"doc_metadata": fields.Raw(description="Document metadata"),
},
)
)
@api.response(200, "Document metadata updated successfully")
@api.response(404, "Document not found")
@api.response(403, "Permission denied")
@setup_required
@login_required
@account_initialization_required
@ -1015,26 +1100,3 @@ class WebsiteDocumentSyncApi(DocumentResource):
DocumentService.sync_website_document(dataset_id, document)
return {"result": "success"}, 200
api.add_resource(GetProcessRuleApi, "/datasets/process-rule")
api.add_resource(DatasetDocumentListApi, "/datasets/<uuid:dataset_id>/documents")
api.add_resource(DatasetInitApi, "/datasets/init")
api.add_resource(
DocumentIndexingEstimateApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/indexing-estimate"
)
api.add_resource(DocumentBatchIndexingEstimateApi, "/datasets/<uuid:dataset_id>/batch/<string:batch>/indexing-estimate")
api.add_resource(DocumentBatchIndexingStatusApi, "/datasets/<uuid:dataset_id>/batch/<string:batch>/indexing-status")
api.add_resource(DocumentIndexingStatusApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/indexing-status")
api.add_resource(DocumentApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>")
api.add_resource(
DocumentProcessingApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/processing/<string:action>"
)
api.add_resource(DocumentMetadataApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/metadata")
api.add_resource(DocumentStatusApi, "/datasets/<uuid:dataset_id>/documents/status/<string:action>/batch")
api.add_resource(DocumentPauseApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/processing/pause")
api.add_resource(DocumentRecoverApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/processing/resume")
api.add_resource(DocumentRetryApi, "/datasets/<uuid:dataset_id>/retry")
api.add_resource(DocumentRenameApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/rename")
api.add_resource(WebsiteDocumentSyncApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/website-sync")

View File

@ -1,10 +1,10 @@
from flask import request
from flask_login import current_user
from flask_restx import Resource, marshal, reqparse
from flask_restx import Resource, fields, marshal, reqparse
from werkzeug.exceptions import Forbidden, InternalServerError, NotFound
import services
from controllers.console import api
from controllers.console import api, console_ns
from controllers.console.datasets.error import DatasetNameDuplicateError
from controllers.console.wraps import account_initialization_required, setup_required
from fields.dataset_fields import dataset_detail_fields
@ -21,7 +21,18 @@ def _validate_name(name):
return name
@console_ns.route("/datasets/external-knowledge-api")
class ExternalApiTemplateListApi(Resource):
@api.doc("get_external_api_templates")
@api.doc(description="Get external knowledge API templates")
@api.doc(
params={
"page": "Page number (default: 1)",
"limit": "Number of items per page (default: 20)",
"keyword": "Search keyword",
}
)
@api.response(200, "External API templates retrieved successfully")
@setup_required
@login_required
@account_initialization_required
@ -79,7 +90,13 @@ class ExternalApiTemplateListApi(Resource):
return external_knowledge_api.to_dict(), 201
@console_ns.route("/datasets/external-knowledge-api/<uuid:external_knowledge_api_id>")
class ExternalApiTemplateApi(Resource):
@api.doc("get_external_api_template")
@api.doc(description="Get external knowledge API template details")
@api.doc(params={"external_knowledge_api_id": "External knowledge API ID"})
@api.response(200, "External API template retrieved successfully")
@api.response(404, "Template not found")
@setup_required
@login_required
@account_initialization_required
@ -138,7 +155,12 @@ class ExternalApiTemplateApi(Resource):
return {"result": "success"}, 204
@console_ns.route("/datasets/external-knowledge-api/<uuid:external_knowledge_api_id>/use-check")
class ExternalApiUseCheckApi(Resource):
@api.doc("check_external_api_usage")
@api.doc(description="Check if external knowledge API is being used")
@api.doc(params={"external_knowledge_api_id": "External knowledge API ID"})
@api.response(200, "Usage check completed successfully")
@setup_required
@login_required
@account_initialization_required
@ -151,7 +173,24 @@ class ExternalApiUseCheckApi(Resource):
return {"is_using": external_knowledge_api_is_using, "count": count}, 200
@console_ns.route("/datasets/external")
class ExternalDatasetCreateApi(Resource):
@api.doc("create_external_dataset")
@api.doc(description="Create external knowledge dataset")
@api.expect(
api.model(
"CreateExternalDatasetRequest",
{
"external_knowledge_api_id": fields.String(required=True, description="External knowledge API ID"),
"external_knowledge_id": fields.String(required=True, description="External knowledge ID"),
"name": fields.String(required=True, description="Dataset name"),
"description": fields.String(description="Dataset description"),
},
)
)
@api.response(201, "External dataset created successfully", dataset_detail_fields)
@api.response(400, "Invalid parameters")
@api.response(403, "Permission denied")
@setup_required
@login_required
@account_initialization_required
@ -191,7 +230,24 @@ class ExternalDatasetCreateApi(Resource):
return marshal(dataset, dataset_detail_fields), 201
@console_ns.route("/datasets/<uuid:dataset_id>/external-hit-testing")
class ExternalKnowledgeHitTestingApi(Resource):
@api.doc("test_external_knowledge_retrieval")
@api.doc(description="Test external knowledge retrieval for dataset")
@api.doc(params={"dataset_id": "Dataset ID"})
@api.expect(
api.model(
"ExternalHitTestingRequest",
{
"query": fields.String(required=True, description="Query text for testing"),
"retrieval_model": fields.Raw(description="Retrieval model configuration"),
"external_retrieval_model": fields.Raw(description="External retrieval model configuration"),
},
)
)
@api.response(200, "External hit testing completed successfully")
@api.response(404, "Dataset not found")
@api.response(400, "Invalid parameters")
@setup_required
@login_required
@account_initialization_required
@ -228,8 +284,22 @@ class ExternalKnowledgeHitTestingApi(Resource):
raise InternalServerError(str(e))
@console_ns.route("/test/retrieval")
class BedrockRetrievalApi(Resource):
# this api is only for internal testing
@api.doc("bedrock_retrieval_test")
@api.doc(description="Bedrock retrieval test (internal use only)")
@api.expect(
api.model(
"BedrockRetrievalTestRequest",
{
"retrieval_setting": fields.Raw(required=True, description="Retrieval settings"),
"query": fields.String(required=True, description="Query text"),
"knowledge_id": fields.String(required=True, description="Knowledge ID"),
},
)
)
@api.response(200, "Bedrock retrieval test completed")
def post(self):
parser = reqparse.RequestParser()
parser.add_argument("retrieval_setting", nullable=False, required=True, type=dict, location="json")
@ -247,12 +317,3 @@ class BedrockRetrievalApi(Resource):
args["retrieval_setting"], args["query"], args["knowledge_id"]
)
return result, 200
api.add_resource(ExternalKnowledgeHitTestingApi, "/datasets/<uuid:dataset_id>/external-hit-testing")
api.add_resource(ExternalDatasetCreateApi, "/datasets/external")
api.add_resource(ExternalApiTemplateListApi, "/datasets/external-knowledge-api")
api.add_resource(ExternalApiTemplateApi, "/datasets/external-knowledge-api/<uuid:external_knowledge_api_id>")
api.add_resource(ExternalApiUseCheckApi, "/datasets/external-knowledge-api/<uuid:external_knowledge_api_id>/use-check")
# this api is only for internal test
api.add_resource(BedrockRetrievalApi, "/test/retrieval")

View File

@ -1,6 +1,6 @@
from flask_restx import Resource
from flask_restx import Resource, fields
from controllers.console import api
from controllers.console import api, console_ns
from controllers.console.datasets.hit_testing_base import DatasetsHitTestingBase
from controllers.console.wraps import (
account_initialization_required,
@ -10,7 +10,25 @@ from controllers.console.wraps import (
from libs.login import login_required
@console_ns.route("/datasets/<uuid:dataset_id>/hit-testing")
class HitTestingApi(Resource, DatasetsHitTestingBase):
@api.doc("test_dataset_retrieval")
@api.doc(description="Test dataset knowledge retrieval")
@api.doc(params={"dataset_id": "Dataset ID"})
@api.expect(
api.model(
"HitTestingRequest",
{
"query": fields.String(required=True, description="Query text for testing"),
"retrieval_model": fields.Raw(description="Retrieval model configuration"),
"top_k": fields.Integer(description="Number of top results to return"),
"score_threshold": fields.Float(description="Score threshold for filtering results"),
},
)
)
@api.response(200, "Hit testing completed successfully")
@api.response(404, "Dataset not found")
@api.response(400, "Invalid parameters")
@setup_required
@login_required
@account_initialization_required
@ -23,6 +41,3 @@ class HitTestingApi(Resource, DatasetsHitTestingBase):
self.hit_testing_args_check(args)
return self.perform_hit_testing(dataset, args)
api.add_resource(HitTestingApi, "/datasets/<uuid:dataset_id>/hit-testing")

View File

@ -1,13 +1,32 @@
from flask_restx import Resource, reqparse
from flask_restx import Resource, fields, reqparse
from controllers.console import api
from controllers.console import api, console_ns
from controllers.console.datasets.error import WebsiteCrawlError
from controllers.console.wraps import account_initialization_required, setup_required
from libs.login import login_required
from services.website_service import WebsiteCrawlApiRequest, WebsiteCrawlStatusApiRequest, WebsiteService
@console_ns.route("/website/crawl")
class WebsiteCrawlApi(Resource):
@api.doc("crawl_website")
@api.doc(description="Crawl website content")
@api.expect(
api.model(
"WebsiteCrawlRequest",
{
"provider": fields.String(
required=True,
description="Crawl provider (firecrawl/watercrawl/jinareader)",
enum=["firecrawl", "watercrawl", "jinareader"],
),
"url": fields.String(required=True, description="URL to crawl"),
"options": fields.Raw(required=True, description="Crawl options"),
},
)
)
@api.response(200, "Website crawl initiated successfully")
@api.response(400, "Invalid crawl parameters")
@setup_required
@login_required
@account_initialization_required
@ -39,7 +58,14 @@ class WebsiteCrawlApi(Resource):
return result, 200
@console_ns.route("/website/crawl/status/<string:job_id>")
class WebsiteCrawlStatusApi(Resource):
@api.doc("get_crawl_status")
@api.doc(description="Get website crawl status")
@api.doc(params={"job_id": "Crawl job ID", "provider": "Crawl provider (firecrawl/watercrawl/jinareader)"})
@api.response(200, "Crawl status retrieved successfully")
@api.response(404, "Crawl job not found")
@api.response(400, "Invalid provider")
@setup_required
@login_required
@account_initialization_required
@ -62,7 +88,3 @@ class WebsiteCrawlStatusApi(Resource):
except Exception as e:
raise WebsiteCrawlError(str(e))
return result, 200
api.add_resource(WebsiteCrawlApi, "/website/crawl")
api.add_resource(WebsiteCrawlStatusApi, "/website/crawl/status/<string:job_id>")

View File

@ -14,6 +14,15 @@ api = ExternalApi(
files_ns = Namespace("files", description="File operations", path="/")
from . import image_preview, tool_files, upload # pyright: ignore[reportUnusedImport]
from . import image_preview, tool_files, upload
api.add_namespace(files_ns)
__all__ = [
"api",
"bp",
"files_ns",
"image_preview",
"tool_files",
"upload",
]

View File

@ -15,8 +15,17 @@ api = ExternalApi(
# Create namespace
inner_api_ns = Namespace("inner_api", description="Internal API operations", path="/")
from . import mail as _mail # pyright: ignore[reportUnusedImport]
from .plugin import plugin as _plugin # pyright: ignore[reportUnusedImport]
from .workspace import workspace as _workspace # pyright: ignore[reportUnusedImport]
from . import mail as _mail
from .plugin import plugin as _plugin
from .workspace import workspace as _workspace
api.add_namespace(inner_api_ns)
__all__ = [
"_mail",
"_plugin",
"_workspace",
"api",
"bp",
"inner_api_ns",
]

View File

@ -14,6 +14,13 @@ api = ExternalApi(
mcp_ns = Namespace("mcp", description="MCP operations", path="/")
from . import mcp # pyright: ignore[reportUnusedImport]
from . import mcp
api.add_namespace(mcp_ns)
__all__ = [
"api",
"bp",
"mcp",
"mcp_ns",
]

View File

@ -14,26 +14,46 @@ api = ExternalApi(
service_api_ns = Namespace("service_api", description="Service operations", path="/")
from . import index # pyright: ignore[reportUnusedImport]
from . import index
from .app import (
annotation, # pyright: ignore[reportUnusedImport]
app, # pyright: ignore[reportUnusedImport]
audio, # pyright: ignore[reportUnusedImport]
completion, # pyright: ignore[reportUnusedImport]
conversation, # pyright: ignore[reportUnusedImport]
file, # pyright: ignore[reportUnusedImport]
file_preview, # pyright: ignore[reportUnusedImport]
message, # pyright: ignore[reportUnusedImport]
site, # pyright: ignore[reportUnusedImport]
workflow, # pyright: ignore[reportUnusedImport]
annotation,
app,
audio,
completion,
conversation,
file,
file_preview,
message,
site,
workflow,
)
from .dataset import (
dataset, # pyright: ignore[reportUnusedImport]
document, # pyright: ignore[reportUnusedImport]
hit_testing, # pyright: ignore[reportUnusedImport]
metadata, # pyright: ignore[reportUnusedImport]
segment, # pyright: ignore[reportUnusedImport]
dataset,
document,
hit_testing,
metadata,
segment,
)
from .workspace import models # pyright: ignore[reportUnusedImport]
from .workspace import models
__all__ = [
"annotation",
"app",
"audio",
"completion",
"conversation",
"dataset",
"document",
"file",
"file_preview",
"hit_testing",
"index",
"message",
"metadata",
"models",
"segment",
"site",
"workflow",
]
api.add_namespace(service_api_ns)

View File

@ -16,20 +16,40 @@ api = ExternalApi(
web_ns = Namespace("web", description="Web application API operations", path="/")
from . import (
app, # pyright: ignore[reportUnusedImport]
audio, # pyright: ignore[reportUnusedImport]
completion, # pyright: ignore[reportUnusedImport]
conversation, # pyright: ignore[reportUnusedImport]
feature, # pyright: ignore[reportUnusedImport]
files, # pyright: ignore[reportUnusedImport]
forgot_password, # pyright: ignore[reportUnusedImport]
login, # pyright: ignore[reportUnusedImport]
message, # pyright: ignore[reportUnusedImport]
passport, # pyright: ignore[reportUnusedImport]
remote_files, # pyright: ignore[reportUnusedImport]
saved_message, # pyright: ignore[reportUnusedImport]
site, # pyright: ignore[reportUnusedImport]
workflow, # pyright: ignore[reportUnusedImport]
app,
audio,
completion,
conversation,
feature,
files,
forgot_password,
login,
message,
passport,
remote_files,
saved_message,
site,
workflow,
)
api.add_namespace(web_ns)
__all__ = [
"api",
"app",
"audio",
"bp",
"completion",
"conversation",
"feature",
"files",
"forgot_password",
"login",
"message",
"passport",
"remote_files",
"saved_message",
"site",
"web_ns",
"workflow",
]

View File

@ -23,7 +23,7 @@ class UnstructuredWordExtractor(BaseExtractor):
unstructured_version = tuple(int(x) for x in __unstructured_version__.split("."))
# check the file extension
try:
import magic # noqa: F401 # pyright: ignore[reportUnusedImport]
import magic # noqa: F401
is_doc = detect_filetype(self._file_path) == FileType.DOC
except ImportError:

View File

@ -110,9 +110,7 @@ class TextSplitter(BaseDocumentTransformer, ABC):
docs = []
current_doc: list[str] = []
total = 0
index = 0
for d in splits:
_len = lengths[index]
for d, _len in zip(splits, lengths):
if total + _len + (separator_len if len(current_doc) > 0 else 0) > self._chunk_size:
if total > self._chunk_size:
logger.warning(
@ -134,7 +132,6 @@ class TextSplitter(BaseDocumentTransformer, ABC):
current_doc = current_doc[1:]
current_doc.append(d)
total += _len + (separator_len if len(current_doc) > 1 else 0)
index += 1
doc = self._join_docs(current_doc, separator)
if doc is not None:
docs.append(doc)

View File

@ -79,7 +79,7 @@ dependencies = [
"sqlalchemy~=2.0.29",
"starlette==0.47.2",
"tiktoken~=0.9.0",
"transformers~=4.53.0",
"transformers~=4.56.1",
"unstructured[docx,epub,md,ppt,pptx]~=0.16.1",
"weave~=0.51.0",
"yarl~=1.18.3",

View File

@ -1,3 +1,5 @@
import time
import uuid
from unittest.mock import patch
import pytest
@ -248,9 +250,15 @@ class TestWebAppAuthService:
- Proper error handling for non-existent accounts
- Correct exception type and message
"""
# Arrange: Use non-existent email
fake = Faker()
non_existent_email = fake.email()
# Arrange: Generate a guaranteed non-existent email
# Use UUID and timestamp to ensure uniqueness
unique_id = str(uuid.uuid4()).replace("-", "")
timestamp = str(int(time.time() * 1000000)) # microseconds
non_existent_email = f"nonexistent_{unique_id}_{timestamp}@test-domain-that-never-exists.invalid"
# Double-check this email doesn't exist in the database
existing_account = db_session_with_containers.query(Account).filter_by(email=non_existent_email).first()
assert existing_account is None, f"Test email {non_existent_email} already exists in database"
# Act & Assert: Verify proper error handling
with pytest.raises(AccountNotFoundError):

View File

@ -0,0 +1,727 @@
"""
TestContainers-based integration tests for disable_segments_from_index_task.
This module provides comprehensive integration testing for the disable_segments_from_index_task
using TestContainers to ensure realistic database interactions and proper isolation.
The task is responsible for removing document segments from the search index when they are disabled.
"""
from unittest.mock import MagicMock, patch
from faker import Faker
from models import Account, Dataset, DocumentSegment
from models import Document as DatasetDocument
from models.dataset import DatasetProcessRule
from tasks.disable_segments_from_index_task import disable_segments_from_index_task
class TestDisableSegmentsFromIndexTask:
"""
Comprehensive integration tests for disable_segments_from_index_task using testcontainers.
This test class covers all major functionality of the disable_segments_from_index_task:
- Successful segment disabling with proper index cleanup
- Error handling for various edge cases
- Database state validation after task execution
- Redis cache cleanup verification
- Index processor integration testing
All tests use the testcontainers infrastructure to ensure proper database isolation
and realistic testing environment with actual database interactions.
"""
def _create_test_account(self, db_session_with_containers, fake=None):
"""
Helper method to create a test account with realistic data.
Args:
db_session_with_containers: Database session from testcontainers infrastructure
fake: Faker instance for generating test data
Returns:
Account: Created test account instance
"""
fake = fake or Faker()
account = Account()
account.id = fake.uuid4()
account.email = fake.email()
account.name = fake.name()
account.avatar_url = fake.url()
account.tenant_id = fake.uuid4()
account.status = "active"
account.type = "normal"
account.role = "owner"
account.interface_language = "en-US"
account.created_at = fake.date_time_this_year()
account.updated_at = account.created_at
# Create a tenant for the account
from models.account import Tenant
tenant = Tenant()
tenant.id = account.tenant_id
tenant.name = f"Test Tenant {fake.company()}"
tenant.plan = "basic"
tenant.status = "active"
tenant.created_at = fake.date_time_this_year()
tenant.updated_at = tenant.created_at
from extensions.ext_database import db
db.session.add(tenant)
db.session.add(account)
db.session.commit()
# Set the current tenant for the account
account.current_tenant = tenant
return account
def _create_test_dataset(self, db_session_with_containers, account, fake=None):
"""
Helper method to create a test dataset with realistic data.
Args:
db_session_with_containers: Database session from testcontainers infrastructure
account: The account creating the dataset
fake: Faker instance for generating test data
Returns:
Dataset: Created test dataset instance
"""
fake = fake or Faker()
dataset = Dataset()
dataset.id = fake.uuid4()
dataset.tenant_id = account.tenant_id
dataset.name = f"Test Dataset {fake.word()}"
dataset.description = fake.text(max_nb_chars=200)
dataset.provider = "vendor"
dataset.permission = "only_me"
dataset.data_source_type = "upload_file"
dataset.indexing_technique = "high_quality"
dataset.created_by = account.id
dataset.updated_by = account.id
dataset.embedding_model = "text-embedding-ada-002"
dataset.embedding_model_provider = "openai"
dataset.built_in_field_enabled = False
from extensions.ext_database import db
db.session.add(dataset)
db.session.commit()
return dataset
def _create_test_document(self, db_session_with_containers, dataset, account, fake=None):
"""
Helper method to create a test document with realistic data.
Args:
db_session_with_containers: Database session from testcontainers infrastructure
dataset: The dataset containing the document
account: The account creating the document
fake: Faker instance for generating test data
Returns:
DatasetDocument: Created test document instance
"""
fake = fake or Faker()
document = DatasetDocument()
document.id = fake.uuid4()
document.tenant_id = dataset.tenant_id
document.dataset_id = dataset.id
document.position = 1
document.data_source_type = "upload_file"
document.data_source_info = '{"upload_file_id": "test_file_id"}'
document.batch = fake.uuid4()
document.name = f"Test Document {fake.word()}.txt"
document.created_from = "upload_file"
document.created_by = account.id
document.created_api_request_id = fake.uuid4()
document.processing_started_at = fake.date_time_this_year()
document.file_id = fake.uuid4()
document.word_count = fake.random_int(min=100, max=1000)
document.parsing_completed_at = fake.date_time_this_year()
document.cleaning_completed_at = fake.date_time_this_year()
document.splitting_completed_at = fake.date_time_this_year()
document.tokens = fake.random_int(min=50, max=500)
document.indexing_started_at = fake.date_time_this_year()
document.indexing_completed_at = fake.date_time_this_year()
document.indexing_status = "completed"
document.enabled = True
document.archived = False
document.doc_form = "text_model" # Use text_model form for testing
document.doc_language = "en"
from extensions.ext_database import db
db.session.add(document)
db.session.commit()
return document
def _create_test_segments(self, db_session_with_containers, document, dataset, account, count=3, fake=None):
"""
Helper method to create test document segments with realistic data.
Args:
db_session_with_containers: Database session from testcontainers infrastructure
document: The document containing the segments
dataset: The dataset containing the document
account: The account creating the segments
count: Number of segments to create
fake: Faker instance for generating test data
Returns:
List[DocumentSegment]: Created test segment instances
"""
fake = fake or Faker()
segments = []
for i in range(count):
segment = DocumentSegment()
segment.id = fake.uuid4()
segment.tenant_id = dataset.tenant_id
segment.dataset_id = dataset.id
segment.document_id = document.id
segment.position = i + 1
segment.content = f"Test segment content {i + 1}: {fake.text(max_nb_chars=200)}"
segment.answer = f"Test answer {i + 1}" if i % 2 == 0 else None
segment.word_count = fake.random_int(min=10, max=100)
segment.tokens = fake.random_int(min=5, max=50)
segment.keywords = [fake.word() for _ in range(3)]
segment.index_node_id = f"node_{segment.id}"
segment.index_node_hash = fake.sha256()
segment.hit_count = 0
segment.enabled = True
segment.disabled_at = None
segment.disabled_by = None
segment.status = "completed"
segment.created_by = account.id
segment.updated_by = account.id
segment.indexing_at = fake.date_time_this_year()
segment.completed_at = fake.date_time_this_year()
segment.error = None
segment.stopped_at = None
segments.append(segment)
from extensions.ext_database import db
for segment in segments:
db.session.add(segment)
db.session.commit()
return segments
def _create_dataset_process_rule(self, db_session_with_containers, dataset, fake=None):
"""
Helper method to create a dataset process rule.
Args:
db_session_with_containers: Database session from testcontainers infrastructure
dataset: The dataset for the process rule
fake: Faker instance for generating test data
Returns:
DatasetProcessRule: Created process rule instance
"""
fake = fake or Faker()
process_rule = DatasetProcessRule()
process_rule.id = fake.uuid4()
process_rule.tenant_id = dataset.tenant_id
process_rule.dataset_id = dataset.id
process_rule.mode = "automatic"
process_rule.rules = (
"{"
'"mode": "automatic", '
'"rules": {'
'"pre_processing_rules": [], "segmentation": '
'{"separator": "\\n\\n", "max_tokens": 1000, "chunk_overlap": 50}}'
"}"
)
process_rule.created_by = dataset.created_by
process_rule.updated_by = dataset.updated_by
from extensions.ext_database import db
db.session.add(process_rule)
db.session.commit()
return process_rule
def test_disable_segments_success(self, db_session_with_containers):
"""
Test successful disabling of segments from index.
This test verifies that the task can correctly disable segments from the index
when all conditions are met, including proper index cleanup and database state updates.
"""
# Arrange
fake = Faker()
account = self._create_test_account(db_session_with_containers, fake)
dataset = self._create_test_dataset(db_session_with_containers, account, fake)
document = self._create_test_document(db_session_with_containers, dataset, account, fake)
segments = self._create_test_segments(db_session_with_containers, document, dataset, account, 3, fake)
self._create_dataset_process_rule(db_session_with_containers, dataset, fake)
segment_ids = [segment.id for segment in segments]
# Mock the index processor to avoid external dependencies
with patch("tasks.disable_segments_from_index_task.IndexProcessorFactory") as mock_factory:
mock_processor = MagicMock()
mock_factory.return_value.init_index_processor.return_value = mock_processor
# Mock Redis client
with patch("tasks.disable_segments_from_index_task.redis_client") as mock_redis:
mock_redis.delete.return_value = True
# Act
result = disable_segments_from_index_task(segment_ids, dataset.id, document.id)
# Assert
assert result is None # Task should complete without returning a value
# Verify index processor was called correctly
mock_factory.assert_called_once_with(document.doc_form)
mock_processor.clean.assert_called_once()
# Verify the call arguments (checking by attributes rather than object identity)
call_args = mock_processor.clean.call_args
assert call_args[0][0].id == dataset.id # First argument should be the dataset
assert call_args[0][1] == [
segment.index_node_id for segment in segments
] # Second argument should be node IDs
assert call_args[1]["with_keywords"] is True
assert call_args[1]["delete_child_chunks"] is False
# Verify Redis cache cleanup was called for each segment
assert mock_redis.delete.call_count == len(segments)
for segment in segments:
expected_key = f"segment_{segment.id}_indexing"
mock_redis.delete.assert_any_call(expected_key)
def test_disable_segments_dataset_not_found(self, db_session_with_containers):
"""
Test handling when dataset is not found.
This test ensures that the task correctly handles cases where the specified
dataset doesn't exist, logging appropriate messages and returning early.
"""
# Arrange
fake = Faker()
non_existent_dataset_id = fake.uuid4()
non_existent_document_id = fake.uuid4()
segment_ids = [fake.uuid4()]
# Mock Redis client
with patch("tasks.disable_segments_from_index_task.redis_client") as mock_redis:
# Act
result = disable_segments_from_index_task(segment_ids, non_existent_dataset_id, non_existent_document_id)
# Assert
assert result is None # Task should complete without returning a value
# Redis should not be called when dataset is not found
mock_redis.delete.assert_not_called()
def test_disable_segments_document_not_found(self, db_session_with_containers):
"""
Test handling when document is not found.
This test ensures that the task correctly handles cases where the specified
document doesn't exist, logging appropriate messages and returning early.
"""
# Arrange
fake = Faker()
account = self._create_test_account(db_session_with_containers, fake)
dataset = self._create_test_dataset(db_session_with_containers, account, fake)
non_existent_document_id = fake.uuid4()
segment_ids = [fake.uuid4()]
# Mock Redis client
with patch("tasks.disable_segments_from_index_task.redis_client") as mock_redis:
# Act
result = disable_segments_from_index_task(segment_ids, dataset.id, non_existent_document_id)
# Assert
assert result is None # Task should complete without returning a value
# Redis should not be called when document is not found
mock_redis.delete.assert_not_called()
def test_disable_segments_document_invalid_status(self, db_session_with_containers):
"""
Test handling when document has invalid status for disabling.
This test ensures that the task correctly handles cases where the document
is not enabled, archived, or not completed, preventing invalid operations.
"""
# Arrange
fake = Faker()
account = self._create_test_account(db_session_with_containers, fake)
dataset = self._create_test_dataset(db_session_with_containers, account, fake)
document = self._create_test_document(db_session_with_containers, dataset, account, fake)
segments = self._create_test_segments(db_session_with_containers, document, dataset, account, 2, fake)
# Test case 1: Document not enabled
document.enabled = False
from extensions.ext_database import db
db.session.commit()
segment_ids = [segment.id for segment in segments]
# Mock Redis client
with patch("tasks.disable_segments_from_index_task.redis_client") as mock_redis:
# Act
result = disable_segments_from_index_task(segment_ids, dataset.id, document.id)
# Assert
assert result is None # Task should complete without returning a value
# Redis should not be called when document status is invalid
mock_redis.delete.assert_not_called()
# Test case 2: Document archived
document.enabled = True
document.archived = True
db.session.commit()
with patch("tasks.disable_segments_from_index_task.redis_client") as mock_redis:
# Act
result = disable_segments_from_index_task(segment_ids, dataset.id, document.id)
# Assert
assert result is None # Task should complete without returning a value
mock_redis.delete.assert_not_called()
# Test case 3: Document indexing not completed
document.enabled = True
document.archived = False
document.indexing_status = "indexing"
db.session.commit()
with patch("tasks.disable_segments_from_index_task.redis_client") as mock_redis:
# Act
result = disable_segments_from_index_task(segment_ids, dataset.id, document.id)
# Assert
assert result is None # Task should complete without returning a value
mock_redis.delete.assert_not_called()
def test_disable_segments_no_segments_found(self, db_session_with_containers):
"""
Test handling when no segments are found for the given IDs.
This test ensures that the task correctly handles cases where the specified
segment IDs don't exist or don't match the dataset/document criteria.
"""
# Arrange
fake = Faker()
account = self._create_test_account(db_session_with_containers, fake)
dataset = self._create_test_dataset(db_session_with_containers, account, fake)
document = self._create_test_document(db_session_with_containers, dataset, account, fake)
self._create_dataset_process_rule(db_session_with_containers, dataset, fake)
# Use non-existent segment IDs
non_existent_segment_ids = [fake.uuid4() for _ in range(3)]
# Mock Redis client
with patch("tasks.disable_segments_from_index_task.redis_client") as mock_redis:
# Act
result = disable_segments_from_index_task(non_existent_segment_ids, dataset.id, document.id)
# Assert
assert result is None # Task should complete without returning a value
# Redis should not be called when no segments are found
mock_redis.delete.assert_not_called()
def test_disable_segments_index_processor_error(self, db_session_with_containers):
"""
Test handling when index processor encounters an error.
This test verifies that the task correctly handles index processor errors
by rolling back segment states and ensuring proper cleanup.
"""
# Arrange
fake = Faker()
account = self._create_test_account(db_session_with_containers, fake)
dataset = self._create_test_dataset(db_session_with_containers, account, fake)
document = self._create_test_document(db_session_with_containers, dataset, account, fake)
segments = self._create_test_segments(db_session_with_containers, document, dataset, account, 2, fake)
self._create_dataset_process_rule(db_session_with_containers, dataset, fake)
segment_ids = [segment.id for segment in segments]
# Mock the index processor to raise an exception
with patch("tasks.disable_segments_from_index_task.IndexProcessorFactory") as mock_factory:
mock_processor = MagicMock()
mock_processor.clean.side_effect = Exception("Index processor error")
mock_factory.return_value.init_index_processor.return_value = mock_processor
# Mock Redis client
with patch("tasks.disable_segments_from_index_task.redis_client") as mock_redis:
mock_redis.delete.return_value = True
# Act
result = disable_segments_from_index_task(segment_ids, dataset.id, document.id)
# Assert
assert result is None # Task should complete without returning a value
# Verify segments were rolled back to enabled state
from extensions.ext_database import db
db.session.refresh(segments[0])
db.session.refresh(segments[1])
# Check that segments are re-enabled after error
updated_segments = db.session.query(DocumentSegment).where(DocumentSegment.id.in_(segment_ids)).all()
for segment in updated_segments:
assert segment.enabled is True
assert segment.disabled_at is None
assert segment.disabled_by is None
# Verify Redis cache cleanup was still called
assert mock_redis.delete.call_count == len(segments)
def test_disable_segments_with_different_doc_forms(self, db_session_with_containers):
"""
Test disabling segments with different document forms.
This test verifies that the task correctly handles different document forms
(paragraph, qa, parent_child) and initializes the appropriate index processor.
"""
# Arrange
fake = Faker()
account = self._create_test_account(db_session_with_containers, fake)
dataset = self._create_test_dataset(db_session_with_containers, account, fake)
document = self._create_test_document(db_session_with_containers, dataset, account, fake)
segments = self._create_test_segments(db_session_with_containers, document, dataset, account, 2, fake)
self._create_dataset_process_rule(db_session_with_containers, dataset, fake)
segment_ids = [segment.id for segment in segments]
# Test different document forms
doc_forms = ["text_model", "qa_model", "hierarchical_model"]
for doc_form in doc_forms:
# Update document form
document.doc_form = doc_form
from extensions.ext_database import db
db.session.commit()
# Mock the index processor factory
with patch("tasks.disable_segments_from_index_task.IndexProcessorFactory") as mock_factory:
mock_processor = MagicMock()
mock_factory.return_value.init_index_processor.return_value = mock_processor
# Mock Redis client
with patch("tasks.disable_segments_from_index_task.redis_client") as mock_redis:
mock_redis.delete.return_value = True
# Act
result = disable_segments_from_index_task(segment_ids, dataset.id, document.id)
# Assert
assert result is None # Task should complete without returning a value
mock_factory.assert_called_with(doc_form)
def test_disable_segments_performance_timing(self, db_session_with_containers):
"""
Test that the task properly measures and logs performance timing.
This test verifies that the task correctly measures execution time
and logs performance metrics for monitoring purposes.
"""
# Arrange
fake = Faker()
account = self._create_test_account(db_session_with_containers, fake)
dataset = self._create_test_dataset(db_session_with_containers, account, fake)
document = self._create_test_document(db_session_with_containers, dataset, account, fake)
segments = self._create_test_segments(db_session_with_containers, document, dataset, account, 3, fake)
self._create_dataset_process_rule(db_session_with_containers, dataset, fake)
segment_ids = [segment.id for segment in segments]
# Mock the index processor
with patch("tasks.disable_segments_from_index_task.IndexProcessorFactory") as mock_factory:
mock_processor = MagicMock()
mock_factory.return_value.init_index_processor.return_value = mock_processor
# Mock Redis client
with patch("tasks.disable_segments_from_index_task.redis_client") as mock_redis:
mock_redis.delete.return_value = True
# Mock time.perf_counter to control timing
with patch("tasks.disable_segments_from_index_task.time.perf_counter") as mock_perf_counter:
mock_perf_counter.side_effect = [1000.0, 1000.5] # 0.5 seconds execution time
# Mock logger to capture log messages
with patch("tasks.disable_segments_from_index_task.logger") as mock_logger:
# Act
result = disable_segments_from_index_task(segment_ids, dataset.id, document.id)
# Assert
assert result is None # Task should complete without returning a value
# Verify performance logging
mock_logger.info.assert_called()
log_calls = [call[0][0] for call in mock_logger.info.call_args_list]
performance_log = next((call for call in log_calls if "latency" in call), None)
assert performance_log is not None
assert "0.5" in performance_log # Should log the execution time
def test_disable_segments_redis_cache_cleanup(self, db_session_with_containers):
"""
Test that Redis cache is properly cleaned up for all segments.
This test verifies that the task correctly removes indexing cache entries
from Redis for all processed segments, preventing stale cache issues.
"""
# Arrange
fake = Faker()
account = self._create_test_account(db_session_with_containers, fake)
dataset = self._create_test_dataset(db_session_with_containers, account, fake)
document = self._create_test_document(db_session_with_containers, dataset, account, fake)
segments = self._create_test_segments(db_session_with_containers, document, dataset, account, 5, fake)
self._create_dataset_process_rule(db_session_with_containers, dataset, fake)
segment_ids = [segment.id for segment in segments]
# Mock the index processor
with patch("tasks.disable_segments_from_index_task.IndexProcessorFactory") as mock_factory:
mock_processor = MagicMock()
mock_factory.return_value.init_index_processor.return_value = mock_processor
# Mock Redis client to track delete calls
with patch("tasks.disable_segments_from_index_task.redis_client") as mock_redis:
mock_redis.delete.return_value = True
# Act
result = disable_segments_from_index_task(segment_ids, dataset.id, document.id)
# Assert
assert result is None # Task should complete without returning a value
# Verify Redis delete was called for each segment
assert mock_redis.delete.call_count == len(segments)
# Verify correct cache keys were used
expected_keys = [f"segment_{segment.id}_indexing" for segment in segments]
actual_calls = [call[0][0] for call in mock_redis.delete.call_args_list]
for expected_key in expected_keys:
assert expected_key in actual_calls
def test_disable_segments_database_session_cleanup(self, db_session_with_containers):
"""
Test that database session is properly closed after task execution.
This test verifies that the task correctly manages database sessions
and ensures proper cleanup to prevent connection leaks.
"""
# Arrange
fake = Faker()
account = self._create_test_account(db_session_with_containers, fake)
dataset = self._create_test_dataset(db_session_with_containers, account, fake)
document = self._create_test_document(db_session_with_containers, dataset, account, fake)
segments = self._create_test_segments(db_session_with_containers, document, dataset, account, 2, fake)
self._create_dataset_process_rule(db_session_with_containers, dataset, fake)
segment_ids = [segment.id for segment in segments]
# Mock the index processor
with patch("tasks.disable_segments_from_index_task.IndexProcessorFactory") as mock_factory:
mock_processor = MagicMock()
mock_factory.return_value.init_index_processor.return_value = mock_processor
# Mock Redis client
with patch("tasks.disable_segments_from_index_task.redis_client") as mock_redis:
mock_redis.delete.return_value = True
# Mock db.session.close to verify it's called
with patch("tasks.disable_segments_from_index_task.db.session.close") as mock_close:
# Act
result = disable_segments_from_index_task(segment_ids, dataset.id, document.id)
# Assert
assert result is None # Task should complete without returning a value
# Verify session was closed
mock_close.assert_called()
def test_disable_segments_empty_segment_ids(self, db_session_with_containers):
"""
Test handling when empty segment IDs list is provided.
This test ensures that the task correctly handles edge cases where
an empty list of segment IDs is provided.
"""
# Arrange
fake = Faker()
account = self._create_test_account(db_session_with_containers, fake)
dataset = self._create_test_dataset(db_session_with_containers, account, fake)
document = self._create_test_document(db_session_with_containers, dataset, account, fake)
self._create_dataset_process_rule(db_session_with_containers, dataset, fake)
empty_segment_ids = []
# Mock Redis client
with patch("tasks.disable_segments_from_index_task.redis_client") as mock_redis:
# Act
result = disable_segments_from_index_task(empty_segment_ids, dataset.id, document.id)
# Assert
assert result is None # Task should complete without returning a value
# Redis should not be called when no segments are provided
mock_redis.delete.assert_not_called()
def test_disable_segments_mixed_valid_invalid_ids(self, db_session_with_containers):
"""
Test handling when some segment IDs are valid and others are invalid.
This test verifies that the task correctly processes only the valid
segment IDs and ignores invalid ones.
"""
# Arrange
fake = Faker()
account = self._create_test_account(db_session_with_containers, fake)
dataset = self._create_test_dataset(db_session_with_containers, account, fake)
document = self._create_test_document(db_session_with_containers, dataset, account, fake)
segments = self._create_test_segments(db_session_with_containers, document, dataset, account, 2, fake)
self._create_dataset_process_rule(db_session_with_containers, dataset, fake)
# Mix valid and invalid segment IDs
valid_segment_ids = [segment.id for segment in segments]
invalid_segment_ids = [fake.uuid4() for _ in range(2)]
mixed_segment_ids = valid_segment_ids + invalid_segment_ids
# Mock the index processor
with patch("tasks.disable_segments_from_index_task.IndexProcessorFactory") as mock_factory:
mock_processor = MagicMock()
mock_factory.return_value.init_index_processor.return_value = mock_processor
# Mock Redis client
with patch("tasks.disable_segments_from_index_task.redis_client") as mock_redis:
mock_redis.delete.return_value = True
# Act
result = disable_segments_from_index_task(mixed_segment_ids, dataset.id, document.id)
# Assert
assert result is None # Task should complete without returning a value
# Verify index processor was called with only valid segment node IDs
expected_node_ids = [segment.index_node_id for segment in segments]
mock_processor.clean.assert_called_once()
# Verify the call arguments
call_args = mock_processor.clean.call_args
assert call_args[0][0].id == dataset.id # First argument should be the dataset
assert call_args[0][1] == expected_node_ids # Second argument should be node IDs
assert call_args[1]["with_keywords"] is True
assert call_args[1]["delete_child_chunks"] is False
# Verify Redis cleanup was called only for valid segments
assert mock_redis.delete.call_count == len(segments)

View File

@ -1567,7 +1567,7 @@ requires-dist = [
{ name = "sseclient-py", specifier = "~=1.8.0" },
{ name = "starlette", specifier = "==0.47.2" },
{ name = "tiktoken", specifier = "~=0.9.0" },
{ name = "transformers", specifier = "~=4.53.0" },
{ name = "transformers", specifier = "~=4.56.1" },
{ name = "unstructured", extras = ["docx", "epub", "md", "ppt", "pptx"], specifier = "~=0.16.1" },
{ name = "weave", specifier = "~=0.51.0" },
{ name = "webvtt-py", specifier = "~=0.5.1" },
@ -6286,7 +6286,7 @@ wheels = [
[[package]]
name = "transformers"
version = "4.53.3"
version = "4.56.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "filelock" },
@ -6300,9 +6300,9 @@ dependencies = [
{ name = "tokenizers" },
{ name = "tqdm" },
]
sdist = { url = "https://files.pythonhosted.org/packages/f1/5c/49182918b58eaa0b4c954fd0e37c79fc299e5643e69d70089d0b0eb0cd9b/transformers-4.53.3.tar.gz", hash = "sha256:b2eda1a261de79b78b97f7888fe2005fc0c3fabf5dad33d52cc02983f9f675d8", size = 9197478, upload-time = "2025-07-22T07:30:51.51Z" }
sdist = { url = "https://files.pythonhosted.org/packages/89/21/dc88ef3da1e49af07ed69386a11047a31dcf1aaf4ded3bc4b173fbf94116/transformers-4.56.1.tar.gz", hash = "sha256:0d88b1089a563996fc5f2c34502f10516cad3ea1aa89f179f522b54c8311fe74", size = 9855473, upload-time = "2025-09-04T20:47:13.14Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/41/b1/d7520cc5cb69c825599042eb3a7c986fa9baa8a8d2dea9acd78e152c81e2/transformers-4.53.3-py3-none-any.whl", hash = "sha256:5aba81c92095806b6baf12df35d756cf23b66c356975fb2a7fa9e536138d7c75", size = 10826382, upload-time = "2025-07-22T07:30:48.458Z" },
{ url = "https://files.pythonhosted.org/packages/71/7c/283c3dd35e00e22a7803a0b2a65251347b745474a82399be058bde1c9f15/transformers-4.56.1-py3-none-any.whl", hash = "sha256:1697af6addfb6ddbce9618b763f4b52d5a756f6da4899ffd1b4febf58b779248", size = 11608197, upload-time = "2025-09-04T20:47:04.895Z" },
]
[[package]]

View File

@ -1,5 +1,5 @@
import json
from typing import Literal
import requests
@ -8,7 +8,7 @@ class DifyClient:
self.api_key = api_key
self.base_url = base_url
def _send_request(self, method, endpoint, json=None, params=None, stream=False):
def _send_request(self, method: str, endpoint: str, json: dict | None = None, params: dict | None = None, stream: bool = False):
headers = {
"Authorization": f"Bearer {self.api_key}",
"Content-Type": "application/json",
@ -31,15 +31,15 @@ class DifyClient:
return response
def message_feedback(self, message_id, rating, user):
def message_feedback(self, message_id: str, rating: Literal["like", "dislike"], user: str):
data = {"rating": rating, "user": user}
return self._send_request("POST", f"/messages/{message_id}/feedbacks", data)
def get_application_parameters(self, user):
def get_application_parameters(self, user: str):
params = {"user": user}
return self._send_request("GET", "/parameters", params=params)
def file_upload(self, user, files):
def file_upload(self, user: str, files: dict):
data = {"user": user}
return self._send_request_with_files(
"POST", "/files/upload", data=data, files=files
@ -49,13 +49,13 @@ class DifyClient:
data = {"text": text, "user": user, "streaming": streaming}
return self._send_request("POST", "/text-to-audio", json=data)
def get_meta(self, user):
def get_meta(self, user: str):
params = {"user": user}
return self._send_request("GET", "/meta", params=params)
class CompletionClient(DifyClient):
def create_completion_message(self, inputs, response_mode, user, files=None):
def create_completion_message(self, inputs: dict, response_mode: Literal["blocking", "streaming"], user: str, files: dict | None = None):
data = {
"inputs": inputs,
"response_mode": response_mode,
@ -76,7 +76,7 @@ class ChatClient(DifyClient):
inputs: dict,
query: str,
user: str,
response_mode: str = "blocking",
response_mode: Literal["blocking", "streaming"] = "blocking",
conversation_id: str | None = None,
files: dict | None = None,
):
@ -156,7 +156,7 @@ class ChatClient(DifyClient):
class WorkflowClient(DifyClient):
def run(
self, inputs: dict, response_mode: str = "streaming", user: str = "abc-123"
self, inputs: dict, response_mode: Literal["blocking", "streaming"] = "streaming", user: str = "abc-123"
):
data = {"inputs": inputs, "response_mode": response_mode, "user": user}
return self._send_request("POST", "/workflows/run", data)
@ -172,7 +172,7 @@ class WorkflowClient(DifyClient):
class KnowledgeBaseClient(DifyClient):
def __init__(
self,
api_key,
api_key: str,
base_url: str = "https://api.dify.ai/v1",
dataset_id: str | None = None,
):
@ -241,7 +241,7 @@ class KnowledgeBaseClient(DifyClient):
return self._send_request("POST", url, json=data, **kwargs)
def update_document_by_text(
self, document_id, name, text, extra_params: dict | None = None, **kwargs
self, document_id: str, name: str, text: str, extra_params: dict | None = None, **kwargs
):
"""
Update a document by text.
@ -278,7 +278,7 @@ class KnowledgeBaseClient(DifyClient):
return self._send_request("POST", url, json=data, **kwargs)
def create_document_by_file(
self, file_path, original_document_id=None, extra_params: dict | None = None
self, file_path: str, original_document_id: str | None = None, extra_params: dict | None = None
):
"""
Create a document by file.
@ -320,7 +320,7 @@ class KnowledgeBaseClient(DifyClient):
)
def update_document_by_file(
self, document_id, file_path, extra_params: dict | None = None
self, document_id: str, file_path: str, extra_params: dict | None = None
):
"""
Update a document by file.
@ -377,7 +377,7 @@ class KnowledgeBaseClient(DifyClient):
url = f"/datasets/{self._get_dataset_id()}"
return self._send_request("DELETE", url)
def delete_document(self, document_id):
def delete_document(self, document_id: str):
"""
Delete a document.
@ -409,7 +409,7 @@ class KnowledgeBaseClient(DifyClient):
url = f"/datasets/{self._get_dataset_id()}/documents"
return self._send_request("GET", url, params=params, **kwargs)
def add_segments(self, document_id, segments, **kwargs):
def add_segments(self, document_id: str, segments: list[dict], **kwargs):
"""
Add segments to a document.
@ -423,7 +423,7 @@ class KnowledgeBaseClient(DifyClient):
def query_segments(
self,
document_id,
document_id: str,
keyword: str | None = None,
status: str | None = None,
**kwargs,
@ -445,7 +445,7 @@ class KnowledgeBaseClient(DifyClient):
params.update(kwargs["params"])
return self._send_request("GET", url, params=params, **kwargs)
def delete_document_segment(self, document_id, segment_id):
def delete_document_segment(self, document_id: str, segment_id: str):
"""
Delete a segment from a document.
@ -456,7 +456,7 @@ class KnowledgeBaseClient(DifyClient):
url = f"/datasets/{self._get_dataset_id()}/documents/{document_id}/segments/{segment_id}"
return self._send_request("DELETE", url)
def update_document_segment(self, document_id, segment_id, segment_data, **kwargs):
def update_document_segment(self, document_id: str, segment_id: str, segment_data: dict, **kwargs):
"""
Update a segment in a document.

View File

@ -50,6 +50,7 @@ export type IGetAutomaticResProps = {
onFinished: (res: GenRes) => void
flowId?: string
nodeId?: string
editorId?: string
currentPrompt?: string
isBasicMode?: boolean
}
@ -76,6 +77,7 @@ const GetAutomaticRes: FC<IGetAutomaticResProps> = ({
onClose,
flowId,
nodeId,
editorId,
currentPrompt,
isBasicMode,
onFinished,
@ -132,7 +134,8 @@ const GetAutomaticRes: FC<IGetAutomaticResProps> = ({
},
]
const [instructionFromSessionStorage, setInstruction] = useSessionStorageState<string>(`improve-instruction-${flowId}${isBasicMode ? '' : `-${nodeId}`}`)
// eslint-disable-next-line sonarjs/no-nested-template-literals, sonarjs/no-nested-conditional
const [instructionFromSessionStorage, setInstruction] = useSessionStorageState<string>(`improve-instruction-${flowId}${isBasicMode ? '' : `-${nodeId}${editorId ? `-${editorId}` : ''}`}`)
const instruction = instructionFromSessionStorage || ''
const [ideaOutput, setIdeaOutput] = useState<string>('')
@ -166,7 +169,7 @@ const GetAutomaticRes: FC<IGetAutomaticResProps> = ({
return true
}
const [isLoading, { setTrue: setLoadingTrue, setFalse: setLoadingFalse }] = useBoolean(false)
const storageKey = `${flowId}${isBasicMode ? '' : `-${nodeId}`}`
const storageKey = `${flowId}${isBasicMode ? '' : `-${nodeId}${editorId ? `-${editorId}` : ''}`}`
const { addVersion, current, currentVersionIndex, setCurrentVersionIndex, versions } = useGenData({
storageKey,
})

View File

@ -284,6 +284,7 @@ const GotoAnything: FC<Props> = ({
value={cmdVal}
onValueChange={setCmdVal}
disablePointerSelection
loop
>
<div className='flex items-center gap-3 border-b border-divider-subtle bg-components-panel-bg-blur px-4 py-3'>
<RiSearchLine className='h-4 w-4 text-text-quaternary' />

View File

@ -42,6 +42,7 @@ type Props = {
headerClassName?: string
instanceId?: string
nodeId?: string
editorId?: string
title: string | React.JSX.Element
value: string
onChange: (value: string) => void
@ -85,6 +86,7 @@ const Editor: FC<Props> = ({
headerClassName,
instanceId,
nodeId,
editorId,
title,
value,
onChange,
@ -163,6 +165,7 @@ const Editor: FC<Props> = ({
{isSupportPromptGenerator && (
<PromptGeneratorBtn
nodeId={nodeId!}
editorId={editorId}
className='ml-[5px]'
onGenerated={onGenerated}
modelConfig={modelConfig}

View File

@ -136,7 +136,8 @@ const ConfigPromptItem: FC<Props> = ({
nodesOutputVars={availableVars}
availableNodes={availableNodes}
nodeId={nodeId}
isSupportPromptGenerator={payload.role === PromptRole.system}
editorId={id}
isSupportPromptGenerator
onGenerated={handleGenerated}
modelConfig={modelConfig}
isSupportJinja

View File

@ -16,6 +16,7 @@ type Props = {
onGenerated?: (prompt: string) => void
modelConfig?: ModelConfig
nodeId: string
editorId?: string
currentPrompt?: string
}
@ -23,6 +24,7 @@ const PromptGeneratorBtn: FC<Props> = ({
className,
onGenerated,
nodeId,
editorId,
currentPrompt,
}) => {
const [showAutomatic, { setTrue: showAutomaticTrue, setFalse: showAutomaticFalse }] = useBoolean(false)
@ -46,6 +48,7 @@ const PromptGeneratorBtn: FC<Props> = ({
onFinished={handleAutomaticRes}
flowId={configsMap?.flowId || ''}
nodeId={nodeId}
editorId={editorId}
currentPrompt={currentPrompt}
/>
)}

View File

@ -71,7 +71,7 @@ const ValueContent = ({
setValue(currentVar.value)
}
if (showJSONEditor)
setJson(currentVar.value ? JSON.stringify(currentVar.value, null, 2) : '')
setJson(currentVar.value != null ? JSON.stringify(currentVar.value, null, 2) : '')
if (showFileEditor)
setFileValue(formatFileValue(currentVar))

View File

@ -318,6 +318,10 @@ const translation = {
noMatchingCommands: 'Keine übereinstimmenden Befehle gefunden',
tryDifferentSearch: 'Versuchen Sie es mit einem anderen Suchbegriff',
slashHint: 'Geben Sie / ein, um alle verfügbaren Befehle anzuzeigen.',
tips: 'Drücken Sie ↑↓, um zu navigieren',
pressEscToClose: 'Drücken Sie ESC, um zu schließen',
startTyping: 'Beginnen Sie mit der Eingabe, um zu suchen',
selectToNavigate: 'Auswählen, um zu navigieren',
},
}

View File

@ -316,6 +316,10 @@ const translation = {
tryDifferentSearch: 'Prueba con un término de búsqueda diferente',
noMatchingCommands: 'No se encontraron comandos coincidentes',
slashHint: 'Escribe / para ver todos los comandos disponibles',
selectToNavigate: 'Seleccionar para navegar',
pressEscToClose: 'Presiona ESC para cerrar',
startTyping: 'Empieza a escribir para buscar',
tips: 'Presiona ↑↓ para navegar',
},
}

View File

@ -316,6 +316,10 @@ const translation = {
noMatchingCommands: 'هیچ دستوری منطبق یافت نشد',
tryDifferentSearch: 'عبارت جستجوی دیگری را امتحان کنید',
slashHint: 'برای مشاهده تمام دستورات موجود / را تایپ کنید',
startTyping: 'برای جستجو شروع به تایپ کنید',
selectToNavigate: 'انتخاب کنید تا برای حرکت',
pressEscToClose: 'برای بستن ESC را فشار دهید',
tips: 'برای حرکت به بالا و پایین کلیدهای ↑ و ↓ را فشار دهید',
},
}

View File

@ -316,6 +316,10 @@ const translation = {
noMatchingCommands: 'Aucune commande correspondante na été trouvée',
tryDifferentSearch: 'Essayez un autre terme de recherche',
slashHint: 'Tapez / pour voir toutes les commandes disponibles',
pressEscToClose: 'Appuyez sur Échap pour fermer',
tips: 'Appuyez sur ↑↓ pour naviguer',
startTyping: 'Commencez à taper pour rechercher',
selectToNavigate: 'Sélectionnez pour naviguer',
},
}

View File

@ -316,6 +316,10 @@ const translation = {
tryDifferentSearch: 'एक अलग खोज शब्द आजमाएँ',
noMatchingCommands: 'कोई मिलती-जुलती कमांड्स नहीं मिलीं',
slashHint: 'सभी उपलब्ध कमांड देखने के लिए टाइप करें /',
pressEscToClose: 'बंद करने के लिए ESC दबाएं',
startTyping: 'खोजने के लिए टाइप करना शुरू करें',
selectToNavigate: 'नेविगेट करने के लिए चुनें',
tips: 'नेविगेट करने के लिए ↑↓ दबाएँ',
},
}

View File

@ -263,6 +263,10 @@ const translation = {
tryDifferentSearch: 'Coba istilah penelusuran lain',
noMatchingCommands: 'Tidak ada perintah yang cocok ditemukan',
searchFailed: 'Pencarian gagal',
tips: 'Tekan ↑↓ untuk menavigasi',
startTyping: 'Mulai mengetik untuk mencari',
selectToNavigate: 'Pilih untuk menavigasi',
pressEscToClose: 'Tekan ESC untuk menutup',
},
createApp: 'BUAT APLIKASI',
accessControl: 'Kontrol Akses Aplikasi Web',

View File

@ -322,6 +322,10 @@ const translation = {
tryDifferentSearch: 'Prova un termine di ricerca diverso',
noMatchingCommands: 'Nessun comando corrispondente trovato',
slashHint: 'Digita / per vedere tutti i comandi disponibili',
selectToNavigate: 'Seleziona per navigare',
startTyping: 'Inizia a digitare per cercare',
tips: 'Premi ↑↓ per navigare',
pressEscToClose: 'Premi ESC per chiudere',
},
}

View File

@ -336,6 +336,10 @@ const translation = {
tryDifferentSearch: '다른 검색어 사용해 보기',
noMatchingCommands: '일치하는 명령을 찾을 수 없습니다.',
slashHint: '모든 사용 가능한 명령을 보려면 /를 입력하세요.',
tips: '↑↓ 키를 눌러 탐색하세요',
pressEscToClose: 'ESC를 눌러 닫기',
selectToNavigate: '선택하여 탐색하기',
startTyping: '검색하려면 타이핑을 시작하세요',
},
}

View File

@ -317,6 +317,10 @@ const translation = {
noMatchingCommands: 'Nie znaleziono pasujących poleceń',
tryDifferentSearch: 'Spróbuj użyć innego hasła',
slashHint: 'Wpisz / aby zobaczyć wszystkie dostępne polecenia',
selectToNavigate: 'Wybierz, aby nawigować',
tips: 'Naciśnij ↑↓, aby nawigować',
startTyping: 'Zacznij pisać, aby wyszukać',
pressEscToClose: 'Naciśnij ESC, aby zamknąć',
},
}

View File

@ -316,6 +316,10 @@ const translation = {
noMatchingCommands: 'Nenhum comando correspondente encontrado',
tryDifferentSearch: 'Tente um termo de pesquisa diferente',
slashHint: 'Digite / para ver todos os comandos disponíveis',
tips: 'Pressione ↑↓ para navegar',
selectToNavigate: 'Selecione para navegar',
pressEscToClose: 'Pressione ESC para fechar',
startTyping: 'Comece a digitar para pesquisar',
},
}

View File

@ -316,6 +316,10 @@ const translation = {
noMatchingCommands: 'Nu s-au găsit comenzi potrivite',
tryDifferentSearch: 'Încercați un alt termen de căutare',
slashHint: 'Tastați / pentru a vedea toate comenzile disponibile',
selectToNavigate: 'Selectați pentru a naviga',
startTyping: 'Începeți să tastați pentru a căuta',
tips: 'Apăsați ↑↓ pentru a naviga',
pressEscToClose: 'Apăsați ESC pentru a închide',
},
}

View File

@ -316,6 +316,10 @@ const translation = {
noMatchingCommands: 'Соответствующие команды не найдены',
tryDifferentSearch: 'Попробуйте использовать другой поисковый запрос',
slashHint: 'Введите / чтобы увидеть все доступные команды',
startTyping: 'Начните вводить для поиска',
tips: 'Нажмите ↑↓ для навигации',
selectToNavigate: 'Выберите для навигации',
pressEscToClose: 'Нажмите ESC для закрытия',
},
}

View File

@ -316,6 +316,10 @@ const translation = {
tryDifferentSearch: 'Poskusite uporabiti drug iskalni izraz',
noMatchingCommands: 'Ujemajoči se ukazi niso našli',
slashHint: 'Vnesite / za ogled vseh razpoložljivih ukazov',
startTyping: 'Začnite vnašati za iskanje',
pressEscToClose: 'Pritisnite ESC za zapiranje',
selectToNavigate: 'Izberite za navigacijo',
tips: 'Pritisnite ↑↓ za navigacijo',
},
}

View File

@ -312,6 +312,10 @@ const translation = {
noMatchingCommands: 'ไม่พบคำสั่งที่ตรงกัน',
tryDifferentSearch: 'ลองใช้ข้อความค้นหาอื่น',
slashHint: 'พิมพ์ / เพื่อดูคำสั่งที่มีให้ทั้งหมด',
pressEscToClose: 'กด ESC เพื่อปิด',
selectToNavigate: 'เลือกเพื่อนำทาง',
startTyping: 'เริ่มพิมพ์เพื่อค้นหา',
tips: 'กด ↑↓ เพื่อเลื่อนดู',
},
}

View File

@ -312,6 +312,10 @@ const translation = {
tryDifferentSearch: 'Farklı bir arama terimi deneyin',
noMatchingCommands: 'Eşleşen komut bulunamadı',
slashHint: 'Tüm mevcut komutları görmek için / yazın',
tips: 'Navigasyon için ↑↓ tuşlarına basın',
selectToNavigate: 'Gezinmek için seçin',
pressEscToClose: 'Kapatmak için ESC tuşuna basın',
startTyping: 'Arama yapmak için yazmaya başlayın',
},
}

View File

@ -316,6 +316,10 @@ const translation = {
noMatchingCommands: 'Відповідних команд не знайдено',
tryDifferentSearch: 'Спробуйте інший пошуковий термін',
slashHint: 'Наберіть / , щоб побачити всі доступні команди',
selectToNavigate: 'Виберіть, щоб перейти',
tips: 'Натисніть ↑↓ для навігації',
startTyping: 'Почніть вводити для пошуку',
pressEscToClose: 'Натисніть ESC, щоб закрити',
},
}

View File

@ -316,6 +316,10 @@ const translation = {
tryDifferentSearch: 'Thử một cụm từ tìm kiếm khác',
noMatchingCommands: 'Không tìm thấy lệnh phù hợp',
slashHint: 'Gõ / để xem tất cả các lệnh có sẵn',
selectToNavigate: 'Chọn để điều hướng',
startTyping: 'Bắt đầu gõ để tìm kiếm',
pressEscToClose: 'Nhấn ESC để đóng',
tips: 'Nhấn ↑↓ để duyệt',
},
}

View File

@ -315,6 +315,10 @@ const translation = {
noMatchingCommands: '未找到匹配的命令',
tryDifferentSearch: '嘗試其他搜尋字詞',
slashHint: '輸入 / 以查看所有可用的指令',
tips: '按 ↑ ↓ 鍵進行導航',
startTyping: '開始輸入以進行搜尋',
pressEscToClose: '按 ESC 鍵關閉',
selectToNavigate: '選擇以進行導航',
},
}

View File

@ -2,7 +2,7 @@
"name": "dify-web",
"version": "1.8.1",
"private": true,
"packageManager": "pnpm@10.15.1",
"packageManager": "pnpm@10.16.0",
"engines": {
"node": ">=v22.11.0"
},
@ -59,7 +59,7 @@
"@lexical/list": "^0.30.0",
"@lexical/react": "^0.30.0",
"@lexical/selection": "^0.30.0",
"@lexical/text": "^0.30.0",
"@lexical/text": "^0.35.0",
"@lexical/utils": "^0.30.0",
"@monaco-editor/react": "^4.6.0",
"@octokit/core": "^6.1.2",
@ -139,7 +139,7 @@
"remark-breaks": "^4.0.0",
"remark-gfm": "^4.0.0",
"remark-math": "^6.0.0",
"scheduler": "^0.23.0",
"scheduler": "^0.26.0",
"semver": "^7.6.3",
"server-only": "^0.0.1",
"sharp": "^0.33.2",
@ -166,7 +166,7 @@
"@happy-dom/jest-environment": "^17.4.4",
"@mdx-js/loader": "^3.1.0",
"@mdx-js/react": "^3.1.0",
"@next/bundle-analyzer": "15.5.0",
"@next/bundle-analyzer": "15.5.3",
"@next/eslint-plugin-next": "15.5.0",
"@next/mdx": "15.5.0",
"@rgrove/parse-xml": "^4.1.0",
@ -220,7 +220,7 @@
"lodash": "^4.17.21",
"magicast": "^0.3.4",
"postcss": "^8.4.47",
"sass": "^1.80.3",
"sass": "^1.92.1",
"storybook": "8.5.0",
"tailwindcss": "^3.4.14",
"ts-node": "^10.9.2",

View File

@ -91,8 +91,8 @@ importers:
specifier: ^0.30.0
version: 0.30.0
'@lexical/text':
specifier: ^0.30.0
version: 0.30.0
specifier: ^0.35.0
version: 0.35.0
'@lexical/utils':
specifier: ^0.30.0
version: 0.30.0
@ -230,10 +230,10 @@ importers:
version: 0.6.4
next:
specifier: 15.5.0
version: 15.5.0(@babel/core@7.28.3)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(sass@1.89.2)
version: 15.5.0(@babel/core@7.28.3)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(sass@1.92.1)
next-pwa:
specifier: ^5.6.0
version: 5.6.0(@babel/core@7.28.3)(@types/babel__core@7.20.5)(esbuild@0.25.0)(next@15.5.0(@babel/core@7.28.3)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(sass@1.89.2))(uglify-js@3.19.3)(webpack@5.100.2(esbuild@0.25.0)(uglify-js@3.19.3))
version: 5.6.0(@babel/core@7.28.3)(@types/babel__core@7.20.5)(esbuild@0.25.0)(next@15.5.0(@babel/core@7.28.3)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(sass@1.92.1))(uglify-js@3.19.3)(webpack@5.100.2(esbuild@0.25.0)(uglify-js@3.19.3))
next-themes:
specifier: ^0.4.3
version: 0.4.6(react-dom@19.1.1(react@19.1.1))(react@19.1.1)
@ -331,8 +331,8 @@ importers:
specifier: ^6.0.0
version: 6.0.0
scheduler:
specifier: ^0.23.0
version: 0.23.2
specifier: ^0.26.0
version: 0.26.0
semver:
specifier: ^7.6.3
version: 7.7.2
@ -359,7 +359,7 @@ importers:
version: 7.0.10
use-context-selector:
specifier: ^2.0.0
version: 2.0.0(react@19.1.1)(scheduler@0.23.2)
version: 2.0.0(react@19.1.1)(scheduler@0.26.0)
uuid:
specifier: ^10.0.0
version: 10.0.0
@ -407,8 +407,8 @@ importers:
specifier: ^3.1.0
version: 3.1.0(@types/react@19.1.11)(react@19.1.1)
'@next/bundle-analyzer':
specifier: 15.5.0
version: 15.5.0
specifier: 15.5.3
version: 15.5.3
'@next/eslint-plugin-next':
specifier: 15.5.0
version: 15.5.0
@ -438,7 +438,7 @@ importers:
version: 8.5.0(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(storybook@8.5.0)
'@storybook/nextjs':
specifier: 8.5.0
version: 8.5.0(esbuild@0.25.0)(next@15.5.0(@babel/core@7.28.3)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(sass@1.89.2))(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(sass@1.89.2)(storybook@8.5.0)(type-fest@2.19.0)(typescript@5.8.3)(uglify-js@3.19.3)(webpack-hot-middleware@2.26.1)(webpack@5.100.2(esbuild@0.25.0)(uglify-js@3.19.3))
version: 8.5.0(esbuild@0.25.0)(next@15.5.0(@babel/core@7.28.3)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(sass@1.92.1))(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(sass@1.92.1)(storybook@8.5.0)(type-fest@2.19.0)(typescript@5.8.3)(uglify-js@3.19.3)(webpack-hot-middleware@2.26.1)(webpack@5.100.2(esbuild@0.25.0)(uglify-js@3.19.3))
'@storybook/react':
specifier: 8.5.0
version: 8.5.0(@storybook/test@8.5.0(storybook@8.5.0))(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(storybook@8.5.0)(typescript@5.8.3)
@ -569,8 +569,8 @@ importers:
specifier: ^8.4.47
version: 8.5.6
sass:
specifier: ^1.80.3
version: 1.89.2
specifier: ^1.92.1
version: 1.92.1
storybook:
specifier: 8.5.0
version: 8.5.0
@ -2119,6 +2119,9 @@ packages:
'@lexical/text@0.30.0':
resolution: {integrity: sha512-P0ptriFwwP/hoDpz/MoBbzHxrFHqh0kCGzASWUdRZ1zrU0yPvJ9vV/UNMhyolH7xx+eAGI1Yl+m74NlpGmXqTg==}
'@lexical/text@0.35.0':
resolution: {integrity: sha512-uaMh46BkysV8hK8wQwp5g/ByZW+2hPDt8ahAErxtf8NuzQem1FHG/f5RTchmFqqUDVHO3qLNTv4AehEGmXv8MA==}
'@lexical/utils@0.30.0':
resolution: {integrity: sha512-VJlAUhupCZmnbYYX3zMWovd4viu2guR01sAqKGbbOMbP+4rlaymixFbinvNPaRKDBloOARi+fpiveQFxnyr/Ew==}
@ -2164,8 +2167,8 @@ packages:
'@napi-rs/wasm-runtime@0.2.12':
resolution: {integrity: sha512-ZVWUcfwY4E/yPitQJl481FjFo3K22D6qF0DuFH6Y/nbnE11GY5uguDxZMGXPQ8WQ0128MXQD7TnfHyK4oWoIJQ==}
'@next/bundle-analyzer@15.5.0':
resolution: {integrity: sha512-6pSHKzl4lcxqgmdNGXeVShYBBJuJkgXIx8cP15XtdzPtFn/ZllDI8b/2OqtU0HDPcsq53yMq1hjUBze1GoSKXQ==}
'@next/bundle-analyzer@15.5.3':
resolution: {integrity: sha512-l2NxnWHP2gWHbomAlz/wFnN2jNCx/dpr7P/XWeOLhULiyKkXSac8O8SjxRO/8FNhr2l4JNtWVKk82Uya4cZYTw==}
'@next/env@15.5.0':
resolution: {integrity: sha512-sDaprBAfzCQiOgo2pO+LhnV0Wt2wBgartjrr+dpcTORYVnnXD0gwhHhiiyIih9hQbq+JnbqH4odgcFWhqCGidw==}
@ -4811,6 +4814,10 @@ packages:
resolution: {integrity: sha512-3UDv+G9CsCKO1WKMGw9fwq/SWJYbI0c5Y7LU1AXYoDdbhE2AHQ6N6Nb34sG8Fj7T5APy8qXDCKuuIHd1BR0tVA==}
engines: {node: '>=8'}
detect-libc@2.1.0:
resolution: {integrity: sha512-vEtk+OcP7VBRtQZ1EJ3bdgzSfBjgnEalLTp5zjJrS+2Z1w2KZly4SBdac/WDU3hhsNAZ9E8SC96ME4Ey8MZ7cg==}
engines: {node: '>=8'}
detect-newline@3.1.0:
resolution: {integrity: sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA==}
engines: {node: '>=8'}
@ -6354,6 +6361,9 @@ packages:
lexical@0.30.0:
resolution: {integrity: sha512-6gxYeXaJiAcreJD0whCofvO0MuJmnWoIgIl1w7L5FTigfhnEohuCx2SoI/oywzfzXE9gzZnyr3rVvZrMItPL8A==}
lexical@0.35.0:
resolution: {integrity: sha512-3VuV8xXhh5xJA6tzvfDvE0YBCMkIZUmxtRilJQDDdCgJCc+eut6qAv2qbN+pbqvarqcQqPN1UF+8YvsjmyOZpw==}
lib0@0.2.114:
resolution: {integrity: sha512-gcxmNFzA4hv8UYi8j43uPlQ7CGcyMJ2KQb5kZASw6SnAKAf10hK12i2fjrS3Cl/ugZa5Ui6WwIu1/6MIXiHttQ==}
engines: {node: '>=16'}
@ -7858,8 +7868,8 @@ packages:
webpack:
optional: true
sass@1.89.2:
resolution: {integrity: sha512-xCmtksBKd/jdJ9Bt9p7nPKiuqrlBMBuuGkQlkhZjjQk3Ty48lv93k5Dq6OPkKt4XwxDJ7tvlfrTa1MPA9bf+QA==}
sass@1.92.1:
resolution: {integrity: sha512-ffmsdbwqb3XeyR8jJR6KelIXARM9bFQe8A6Q3W4Klmwy5Ckd5gz7jgUNHo4UOqutU5Sk1DtKLbpDP0nLCg1xqQ==}
engines: {node: '>=14.0.0'}
hasBin: true
@ -10777,6 +10787,10 @@ snapshots:
dependencies:
lexical: 0.30.0
'@lexical/text@0.35.0':
dependencies:
lexical: 0.35.0
'@lexical/utils@0.30.0':
dependencies:
'@lexical/list': 0.30.0
@ -10793,7 +10807,7 @@ snapshots:
'@mapbox/node-pre-gyp@1.0.11':
dependencies:
detect-libc: 2.0.4
detect-libc: 2.1.0
https-proxy-agent: 5.0.1
make-dir: 3.1.0
node-fetch: 2.7.0
@ -10881,7 +10895,7 @@ snapshots:
'@tybys/wasm-util': 0.10.0
optional: true
'@next/bundle-analyzer@15.5.0':
'@next/bundle-analyzer@15.5.3':
dependencies:
webpack-bundle-analyzer: 4.10.1
transitivePeerDependencies:
@ -11763,7 +11777,7 @@ snapshots:
dependencies:
storybook: 8.5.0
'@storybook/nextjs@8.5.0(esbuild@0.25.0)(next@15.5.0(@babel/core@7.28.3)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(sass@1.89.2))(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(sass@1.89.2)(storybook@8.5.0)(type-fest@2.19.0)(typescript@5.8.3)(uglify-js@3.19.3)(webpack-hot-middleware@2.26.1)(webpack@5.100.2(esbuild@0.25.0)(uglify-js@3.19.3))':
'@storybook/nextjs@8.5.0(esbuild@0.25.0)(next@15.5.0(@babel/core@7.28.3)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(sass@1.92.1))(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(sass@1.92.1)(storybook@8.5.0)(type-fest@2.19.0)(typescript@5.8.3)(uglify-js@3.19.3)(webpack-hot-middleware@2.26.1)(webpack@5.100.2(esbuild@0.25.0)(uglify-js@3.19.3))':
dependencies:
'@babel/core': 7.28.3
'@babel/plugin-syntax-bigint': 7.8.3(@babel/core@7.28.3)
@ -11789,7 +11803,7 @@ snapshots:
find-up: 5.0.0
image-size: 1.2.1
loader-utils: 3.3.1
next: 15.5.0(@babel/core@7.28.3)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(sass@1.89.2)
next: 15.5.0(@babel/core@7.28.3)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(sass@1.92.1)
node-polyfill-webpack-plugin: 2.0.1(webpack@5.100.2(esbuild@0.25.0)(uglify-js@3.19.3))
pnp-webpack-plugin: 1.7.0(typescript@5.8.3)
postcss: 8.5.6
@ -11798,7 +11812,7 @@ snapshots:
react-dom: 19.1.1(react@19.1.1)
react-refresh: 0.14.2
resolve-url-loader: 5.0.0
sass-loader: 14.2.1(sass@1.89.2)(webpack@5.100.2(esbuild@0.25.0)(uglify-js@3.19.3))
sass-loader: 14.2.1(sass@1.92.1)(webpack@5.100.2(esbuild@0.25.0)(uglify-js@3.19.3))
semver: 7.7.2
storybook: 8.5.0
style-loader: 3.3.4(webpack@5.100.2(esbuild@0.25.0)(uglify-js@3.19.3))
@ -13821,6 +13835,9 @@ snapshots:
detect-libc@2.0.4: {}
detect-libc@2.1.0:
optional: true
detect-newline@3.1.0: {}
detect-node-es@1.1.0: {}
@ -15865,6 +15882,8 @@ snapshots:
lexical@0.30.0: {}
lexical@0.35.0: {}
lib0@0.2.114:
dependencies:
isomorphic.js: 0.2.5
@ -16652,12 +16671,12 @@ snapshots:
neo-async@2.6.2: {}
next-pwa@5.6.0(@babel/core@7.28.3)(@types/babel__core@7.20.5)(esbuild@0.25.0)(next@15.5.0(@babel/core@7.28.3)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(sass@1.89.2))(uglify-js@3.19.3)(webpack@5.100.2(esbuild@0.25.0)(uglify-js@3.19.3)):
next-pwa@5.6.0(@babel/core@7.28.3)(@types/babel__core@7.20.5)(esbuild@0.25.0)(next@15.5.0(@babel/core@7.28.3)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(sass@1.92.1))(uglify-js@3.19.3)(webpack@5.100.2(esbuild@0.25.0)(uglify-js@3.19.3)):
dependencies:
babel-loader: 8.4.1(@babel/core@7.28.3)(webpack@5.100.2(esbuild@0.25.0)(uglify-js@3.19.3))
clean-webpack-plugin: 4.0.0(webpack@5.100.2(esbuild@0.25.0)(uglify-js@3.19.3))
globby: 11.1.0
next: 15.5.0(@babel/core@7.28.3)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(sass@1.89.2)
next: 15.5.0(@babel/core@7.28.3)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(sass@1.92.1)
terser-webpack-plugin: 5.3.14(esbuild@0.25.0)(uglify-js@3.19.3)(webpack@5.100.2(esbuild@0.25.0)(uglify-js@3.19.3))
workbox-webpack-plugin: 6.6.0(@types/babel__core@7.20.5)(webpack@5.100.2(esbuild@0.25.0)(uglify-js@3.19.3))
workbox-window: 6.6.0
@ -16675,7 +16694,7 @@ snapshots:
react: 19.1.1
react-dom: 19.1.1(react@19.1.1)
next@15.5.0(@babel/core@7.28.3)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(sass@1.89.2):
next@15.5.0(@babel/core@7.28.3)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(sass@1.92.1):
dependencies:
'@next/env': 15.5.0
'@swc/helpers': 0.5.15
@ -16693,7 +16712,7 @@ snapshots:
'@next/swc-linux-x64-musl': 15.5.0
'@next/swc-win32-arm64-msvc': 15.5.0
'@next/swc-win32-x64-msvc': 15.5.0
sass: 1.89.2
sass: 1.92.1
sharp: 0.34.3
transitivePeerDependencies:
- '@babel/core'
@ -17793,14 +17812,14 @@ snapshots:
rw@1.3.3: {}
sass-loader@14.2.1(sass@1.89.2)(webpack@5.100.2(esbuild@0.25.0)(uglify-js@3.19.3)):
sass-loader@14.2.1(sass@1.92.1)(webpack@5.100.2(esbuild@0.25.0)(uglify-js@3.19.3)):
dependencies:
neo-async: 2.6.2
optionalDependencies:
sass: 1.89.2
sass: 1.92.1
webpack: 5.100.2(esbuild@0.25.0)(uglify-js@3.19.3)
sass@1.89.2:
sass@1.92.1:
dependencies:
chokidar: 4.0.3
immutable: 5.1.3
@ -18527,10 +18546,10 @@ snapshots:
optionalDependencies:
'@types/react': 19.1.11
use-context-selector@2.0.0(react@19.1.1)(scheduler@0.23.2):
use-context-selector@2.0.0(react@19.1.1)(scheduler@0.26.0):
dependencies:
react: 19.1.1
scheduler: 0.23.2
scheduler: 0.26.0
use-isomorphic-layout-effect@1.2.1(@types/react@19.1.11)(react@19.1.1):
dependencies: