mirror of
https://github.com/langgenius/dify.git
synced 2026-05-10 22:28:55 +08:00
Merge branch 'main' into jzh
This commit is contained in:
commit
0d47750b15
@ -7,7 +7,7 @@ cd web && pnpm install
|
||||
pipx install uv
|
||||
|
||||
echo "alias start-api=\"cd $WORKSPACE_ROOT/api && uv run python -m flask run --host 0.0.0.0 --port=5001 --debug\"" >> ~/.bashrc
|
||||
echo "alias start-worker=\"cd $WORKSPACE_ROOT/api && uv run python -m celery -A app.celery worker -P threads -c 1 --loglevel INFO -Q dataset,dataset_summary,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention\"" >> ~/.bashrc
|
||||
echo "alias start-worker=\"cd $WORKSPACE_ROOT/api && uv run python -m celery -A app.celery worker -P threads -c 1 --loglevel INFO -Q dataset,dataset_summary,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_publisher,trigger_refresh_executor,retention\"" >> ~/.bashrc
|
||||
echo "alias start-web=\"cd $WORKSPACE_ROOT/web && pnpm dev:inspect\"" >> ~/.bashrc
|
||||
echo "alias start-web-prod=\"cd $WORKSPACE_ROOT/web && pnpm build && pnpm start\"" >> ~/.bashrc
|
||||
echo "alias start-containers=\"cd $WORKSPACE_ROOT/docker && docker-compose -f docker-compose.middleware.yaml -p dify --env-file middleware.env up -d\"" >> ~/.bashrc
|
||||
|
||||
15
.vscode/launch.json.template
vendored
15
.vscode/launch.json.template
vendored
@ -2,21 +2,10 @@
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Python: Flask API",
|
||||
"name": "Python: API (gevent)",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"module": "flask",
|
||||
"env": {
|
||||
"FLASK_APP": "app.py",
|
||||
"FLASK_ENV": "development"
|
||||
},
|
||||
"args": [
|
||||
"run",
|
||||
"--host=0.0.0.0",
|
||||
"--port=5001",
|
||||
"--no-debugger",
|
||||
"--no-reload"
|
||||
],
|
||||
"program": "${workspaceFolder}/api/app.py",
|
||||
"jinja": true,
|
||||
"justMyCode": true,
|
||||
"cwd": "${workspaceFolder}/api",
|
||||
|
||||
@ -33,6 +33,9 @@ TRIGGER_URL=http://localhost:5001
|
||||
# The time in seconds after the signature is rejected
|
||||
FILES_ACCESS_TIMEOUT=300
|
||||
|
||||
# Collaboration mode toggle
|
||||
ENABLE_COLLABORATION_MODE=false
|
||||
|
||||
# Access token expiration time in minutes
|
||||
ACCESS_TOKEN_EXPIRE_MINUTES=60
|
||||
|
||||
|
||||
18
api/.vscode/launch.json.example
vendored
18
api/.vscode/launch.json.example
vendored
@ -3,29 +3,21 @@
|
||||
"compounds": [
|
||||
{
|
||||
"name": "Launch Flask and Celery",
|
||||
"configurations": ["Python: Flask", "Python: Celery"]
|
||||
"configurations": ["Python: API (gevent)", "Python: Celery"]
|
||||
}
|
||||
],
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Python: Flask",
|
||||
"consoleName": "Flask",
|
||||
"name": "Python: API (gevent)",
|
||||
"consoleName": "API",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"python": "${workspaceFolder}/.venv/bin/python",
|
||||
"cwd": "${workspaceFolder}",
|
||||
"envFile": ".env",
|
||||
"module": "flask",
|
||||
"program": "${workspaceFolder}/app.py",
|
||||
"justMyCode": true,
|
||||
"jinja": true,
|
||||
"env": {
|
||||
"FLASK_APP": "app.py",
|
||||
"GEVENT_SUPPORT": "True"
|
||||
},
|
||||
"args": [
|
||||
"run",
|
||||
"--port=5001"
|
||||
]
|
||||
"jinja": true
|
||||
},
|
||||
{
|
||||
"name": "Python: Celery",
|
||||
|
||||
29
api/app.py
29
api/app.py
@ -1,5 +1,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import sys
|
||||
from typing import TYPE_CHECKING, cast
|
||||
|
||||
@ -9,17 +10,35 @@ if TYPE_CHECKING:
|
||||
celery: Celery
|
||||
|
||||
|
||||
HOST = "0.0.0.0"
|
||||
PORT = 5001
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def is_db_command() -> bool:
|
||||
if len(sys.argv) > 1 and sys.argv[0].endswith("flask") and sys.argv[1] == "db":
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def log_startup_banner(host: str, port: int) -> None:
|
||||
debugger_attached = sys.gettrace() is not None
|
||||
logger.info("Serving Dify API via gevent WebSocket server")
|
||||
logger.info("Bound to http://%s:%s", host, port)
|
||||
logger.info("Debugger attached: %s", "on" if debugger_attached else "off")
|
||||
logger.info("Press CTRL+C to quit")
|
||||
|
||||
|
||||
# create app
|
||||
flask_app = None
|
||||
socketio_app = None
|
||||
|
||||
if is_db_command():
|
||||
from app_factory import create_migrations_app
|
||||
|
||||
app = create_migrations_app()
|
||||
socketio_app = app
|
||||
flask_app = app
|
||||
else:
|
||||
# Gunicorn and Celery handle monkey patching automatically in production by
|
||||
# specifying the `gevent` worker class. Manual monkey patching is not required here.
|
||||
@ -30,8 +49,14 @@ else:
|
||||
|
||||
from app_factory import create_app
|
||||
|
||||
app = create_app()
|
||||
socketio_app, flask_app = create_app()
|
||||
app = flask_app
|
||||
celery = cast("Celery", app.extensions["celery"])
|
||||
|
||||
if __name__ == "__main__":
|
||||
app.run(host="0.0.0.0", port=5001)
|
||||
from gevent import pywsgi
|
||||
from geventwebsocket.handler import WebSocketHandler # type: ignore[reportMissingTypeStubs]
|
||||
|
||||
log_startup_banner(HOST, PORT)
|
||||
server = pywsgi.WSGIServer((HOST, PORT), socketio_app, handler_class=WebSocketHandler)
|
||||
server.serve_forever()
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
import logging
|
||||
import time
|
||||
|
||||
import socketio # type: ignore[reportMissingTypeStubs]
|
||||
from flask import request
|
||||
from opentelemetry.trace import get_current_span
|
||||
from opentelemetry.trace.span import INVALID_SPAN_ID, INVALID_TRACE_ID
|
||||
@ -10,6 +11,7 @@ from contexts.wrapper import RecyclableContextVar
|
||||
from controllers.console.error import UnauthorizedAndForceLogout
|
||||
from core.logging.context import init_request_context
|
||||
from dify_app import DifyApp
|
||||
from extensions.ext_socketio import sio
|
||||
from services.enterprise.enterprise_service import EnterpriseService
|
||||
from services.feature_service import LicenseStatus
|
||||
|
||||
@ -122,14 +124,18 @@ def create_flask_app_with_configs() -> DifyApp:
|
||||
return dify_app
|
||||
|
||||
|
||||
def create_app() -> DifyApp:
|
||||
def create_app() -> tuple[socketio.WSGIApp, DifyApp]:
|
||||
start_time = time.perf_counter()
|
||||
app = create_flask_app_with_configs()
|
||||
initialize_extensions(app)
|
||||
|
||||
sio.app = app
|
||||
socketio_app = socketio.WSGIApp(sio, app)
|
||||
|
||||
end_time = time.perf_counter()
|
||||
if dify_config.DEBUG:
|
||||
logger.info("Finished create_app (%s ms)", round((end_time - start_time) * 1000, 2))
|
||||
return app
|
||||
return socketio_app, app
|
||||
|
||||
|
||||
def initialize_extensions(app: DifyApp):
|
||||
|
||||
@ -1274,6 +1274,13 @@ class PositionConfig(BaseSettings):
|
||||
return {item.strip() for item in self.POSITION_TOOL_EXCLUDES.split(",") if item.strip() != ""}
|
||||
|
||||
|
||||
class CollaborationConfig(BaseSettings):
|
||||
ENABLE_COLLABORATION_MODE: bool = Field(
|
||||
description="Whether to enable collaboration mode features across the workspace",
|
||||
default=False,
|
||||
)
|
||||
|
||||
|
||||
class LoginConfig(BaseSettings):
|
||||
ENABLE_EMAIL_CODE_LOGIN: bool = Field(
|
||||
description="whether to enable email code login",
|
||||
@ -1399,6 +1406,7 @@ class FeatureConfig(
|
||||
WorkflowConfig,
|
||||
WorkflowNodeExecutionConfig,
|
||||
WorkspaceConfig,
|
||||
CollaborationConfig,
|
||||
LoginConfig,
|
||||
AccountConfig,
|
||||
SwaggerUIConfig,
|
||||
|
||||
@ -65,6 +65,7 @@ from .app import (
|
||||
statistic,
|
||||
workflow,
|
||||
workflow_app_log,
|
||||
workflow_comment,
|
||||
workflow_draft_variable,
|
||||
workflow_run,
|
||||
workflow_statistic,
|
||||
@ -116,6 +117,7 @@ from .explore import (
|
||||
saved_message,
|
||||
trial,
|
||||
)
|
||||
from .socketio import workflow as socketio_workflow # pyright: ignore[reportUnusedImport]
|
||||
|
||||
# Import tag controllers
|
||||
from .tag import tags
|
||||
@ -201,6 +203,7 @@ __all__ = [
|
||||
"saved_message",
|
||||
"setup",
|
||||
"site",
|
||||
"socketio_workflow",
|
||||
"spec",
|
||||
"statistic",
|
||||
"tags",
|
||||
@ -211,6 +214,7 @@ __all__ = [
|
||||
"website",
|
||||
"workflow",
|
||||
"workflow_app_log",
|
||||
"workflow_comment",
|
||||
"workflow_draft_variable",
|
||||
"workflow_run",
|
||||
"workflow_statistic",
|
||||
|
||||
@ -7,6 +7,7 @@ from flask import abort, request
|
||||
from flask_restx import Resource, fields, marshal, marshal_with
|
||||
from graphon.enums import NodeType
|
||||
from graphon.file import File
|
||||
from graphon.file import helpers as file_helpers
|
||||
from graphon.graph_engine.manager import GraphEngineManager
|
||||
from graphon.model_runtime.utils.encoders import jsonable_encoder
|
||||
from pydantic import BaseModel, Field, ValidationError, field_validator
|
||||
@ -39,6 +40,7 @@ from extensions.ext_database import db
|
||||
from extensions.ext_redis import redis_client
|
||||
from factories import file_factory, variable_factory
|
||||
from fields.member_fields import simple_account_fields
|
||||
from fields.online_user_fields import online_user_list_fields
|
||||
from fields.workflow_fields import workflow_fields, workflow_pagination_fields
|
||||
from libs import helper
|
||||
from libs.datetime_utils import naive_utc_now
|
||||
@ -47,6 +49,7 @@ from libs.login import current_account_with_tenant, login_required
|
||||
from models import App
|
||||
from models.model import AppMode
|
||||
from models.workflow import Workflow
|
||||
from repositories.workflow_collaboration_repository import WORKFLOW_ONLINE_USERS_PREFIX
|
||||
from services.app_generate_service import AppGenerateService
|
||||
from services.errors.app import IsDraftWorkflowError, WorkflowHashNotEqualError, WorkflowNotFoundError
|
||||
from services.errors.llm import InvokeRateLimitError
|
||||
@ -57,6 +60,7 @@ _file_access_controller = DatabaseFileAccessController()
|
||||
LISTENING_RETRY_IN = 2000
|
||||
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
|
||||
RESTORE_SOURCE_WORKFLOW_MUST_BE_PUBLISHED_MESSAGE = "source workflow must be published"
|
||||
MAX_WORKFLOW_ONLINE_USERS_QUERY_IDS = 50
|
||||
|
||||
# Register models for flask_restx to avoid dict type issues in Swagger
|
||||
# Register in dependency order: base models first, then dependent models
|
||||
@ -150,6 +154,14 @@ class ConvertToWorkflowPayload(BaseModel):
|
||||
icon_background: str | None = None
|
||||
|
||||
|
||||
class WorkflowFeaturesPayload(BaseModel):
|
||||
features: dict[str, Any] = Field(..., description="Workflow feature configuration")
|
||||
|
||||
|
||||
class WorkflowOnlineUsersQuery(BaseModel):
|
||||
app_ids: str = Field(..., description="Comma-separated app IDs")
|
||||
|
||||
|
||||
class DraftWorkflowTriggerRunPayload(BaseModel):
|
||||
node_id: str
|
||||
|
||||
@ -173,6 +185,8 @@ reg(DefaultBlockConfigQuery)
|
||||
reg(ConvertToWorkflowPayload)
|
||||
reg(WorkflowListQuery)
|
||||
reg(WorkflowUpdatePayload)
|
||||
reg(WorkflowFeaturesPayload)
|
||||
reg(WorkflowOnlineUsersQuery)
|
||||
reg(DraftWorkflowTriggerRunPayload)
|
||||
reg(DraftWorkflowTriggerRunAllPayload)
|
||||
|
||||
@ -931,6 +945,32 @@ class ConvertToWorkflowApi(Resource):
|
||||
}
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/features")
|
||||
class WorkflowFeaturesApi(Resource):
|
||||
"""Update draft workflow features."""
|
||||
|
||||
@console_ns.expect(console_ns.models[WorkflowFeaturesPayload.__name__])
|
||||
@console_ns.doc("update_workflow_features")
|
||||
@console_ns.doc(description="Update draft workflow features")
|
||||
@console_ns.doc(params={"app_id": "Application ID"})
|
||||
@console_ns.response(200, "Workflow features updated successfully")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
|
||||
@edit_permission_required
|
||||
def post(self, app_model: App):
|
||||
current_user, _ = current_account_with_tenant()
|
||||
|
||||
args = WorkflowFeaturesPayload.model_validate(console_ns.payload or {})
|
||||
features = args.features
|
||||
|
||||
workflow_service = WorkflowService()
|
||||
workflow_service.update_draft_workflow_features(app_model=app_model, features=features, account=current_user)
|
||||
|
||||
return {"result": "success"}
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflows")
|
||||
class PublishedAllWorkflowApi(Resource):
|
||||
@console_ns.expect(console_ns.models[WorkflowListQuery.__name__])
|
||||
@ -1340,3 +1380,62 @@ class DraftWorkflowTriggerRunAllApi(Resource):
|
||||
"status": "error",
|
||||
}
|
||||
), 400
|
||||
|
||||
|
||||
@console_ns.route("/apps/workflows/online-users")
|
||||
class WorkflowOnlineUsersApi(Resource):
|
||||
@console_ns.expect(console_ns.models[WorkflowOnlineUsersQuery.__name__])
|
||||
@console_ns.doc("get_workflow_online_users")
|
||||
@console_ns.doc(description="Get workflow online users")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@marshal_with(online_user_list_fields)
|
||||
def get(self):
|
||||
args = WorkflowOnlineUsersQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
||||
|
||||
app_ids = list(dict.fromkeys(app_id.strip() for app_id in args.app_ids.split(",") if app_id.strip()))
|
||||
if len(app_ids) > MAX_WORKFLOW_ONLINE_USERS_QUERY_IDS:
|
||||
raise BadRequest(f"Maximum {MAX_WORKFLOW_ONLINE_USERS_QUERY_IDS} app_ids are allowed per request.")
|
||||
|
||||
if not app_ids:
|
||||
return {"data": []}
|
||||
|
||||
_, current_tenant_id = current_account_with_tenant()
|
||||
workflow_service = WorkflowService()
|
||||
accessible_app_ids = workflow_service.get_accessible_app_ids(app_ids, current_tenant_id)
|
||||
|
||||
results = []
|
||||
for app_id in app_ids:
|
||||
if app_id not in accessible_app_ids:
|
||||
continue
|
||||
|
||||
users_json = redis_client.hgetall(f"{WORKFLOW_ONLINE_USERS_PREFIX}{app_id}")
|
||||
|
||||
users = []
|
||||
for _, user_info_json in users_json.items():
|
||||
try:
|
||||
user_info = json.loads(user_info_json)
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
if not isinstance(user_info, dict):
|
||||
continue
|
||||
|
||||
avatar = user_info.get("avatar")
|
||||
if isinstance(avatar, str) and avatar and not avatar.startswith(("http://", "https://")):
|
||||
try:
|
||||
user_info["avatar"] = file_helpers.get_signed_file_url(avatar)
|
||||
except Exception as exc:
|
||||
logger.warning(
|
||||
"Failed to sign workflow online user avatar; using original value. "
|
||||
"app_id=%s avatar=%s error=%s",
|
||||
app_id,
|
||||
avatar,
|
||||
exc,
|
||||
)
|
||||
|
||||
users.append(user_info)
|
||||
results.append({"app_id": app_id, "users": users})
|
||||
|
||||
return {"data": results}
|
||||
|
||||
335
api/controllers/console/app/workflow_comment.py
Normal file
335
api/controllers/console/app/workflow_comment.py
Normal file
@ -0,0 +1,335 @@
|
||||
import logging
|
||||
|
||||
from flask_restx import Resource, marshal_with
|
||||
from pydantic import BaseModel, Field, TypeAdapter
|
||||
|
||||
from controllers.common.schema import register_schema_models
|
||||
from controllers.console import console_ns
|
||||
from controllers.console.app.wraps import get_app_model
|
||||
from controllers.console.wraps import account_initialization_required, edit_permission_required, setup_required
|
||||
from fields.member_fields import AccountWithRole
|
||||
from fields.workflow_comment_fields import (
|
||||
workflow_comment_basic_fields,
|
||||
workflow_comment_create_fields,
|
||||
workflow_comment_detail_fields,
|
||||
workflow_comment_reply_create_fields,
|
||||
workflow_comment_reply_update_fields,
|
||||
workflow_comment_resolve_fields,
|
||||
workflow_comment_update_fields,
|
||||
)
|
||||
from libs.login import current_user, login_required
|
||||
from models import App
|
||||
from services.account_service import TenantService
|
||||
from services.workflow_comment_service import WorkflowCommentService
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
|
||||
|
||||
|
||||
class WorkflowCommentCreatePayload(BaseModel):
|
||||
content: str = Field(..., description="Comment content")
|
||||
position_x: float = Field(..., description="Comment X position")
|
||||
position_y: float = Field(..., description="Comment Y position")
|
||||
mentioned_user_ids: list[str] = Field(default_factory=list, description="Mentioned user IDs")
|
||||
|
||||
|
||||
class WorkflowCommentUpdatePayload(BaseModel):
|
||||
content: str = Field(..., description="Comment content")
|
||||
position_x: float | None = Field(default=None, description="Comment X position")
|
||||
position_y: float | None = Field(default=None, description="Comment Y position")
|
||||
mentioned_user_ids: list[str] | None = Field(
|
||||
default=None,
|
||||
description="Mentioned user IDs. Omit to keep existing mentions.",
|
||||
)
|
||||
|
||||
|
||||
class WorkflowCommentReplyPayload(BaseModel):
|
||||
content: str = Field(..., description="Reply content")
|
||||
mentioned_user_ids: list[str] = Field(default_factory=list, description="Mentioned user IDs")
|
||||
|
||||
|
||||
class WorkflowCommentMentionUsersPayload(BaseModel):
|
||||
users: list[AccountWithRole]
|
||||
|
||||
|
||||
for model in (
|
||||
WorkflowCommentCreatePayload,
|
||||
WorkflowCommentUpdatePayload,
|
||||
WorkflowCommentReplyPayload,
|
||||
):
|
||||
console_ns.schema_model(model.__name__, model.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0))
|
||||
register_schema_models(console_ns, AccountWithRole, WorkflowCommentMentionUsersPayload)
|
||||
|
||||
workflow_comment_basic_model = console_ns.model("WorkflowCommentBasic", workflow_comment_basic_fields)
|
||||
workflow_comment_detail_model = console_ns.model("WorkflowCommentDetail", workflow_comment_detail_fields)
|
||||
workflow_comment_create_model = console_ns.model("WorkflowCommentCreate", workflow_comment_create_fields)
|
||||
workflow_comment_update_model = console_ns.model("WorkflowCommentUpdate", workflow_comment_update_fields)
|
||||
workflow_comment_resolve_model = console_ns.model("WorkflowCommentResolve", workflow_comment_resolve_fields)
|
||||
workflow_comment_reply_create_model = console_ns.model(
|
||||
"WorkflowCommentReplyCreate", workflow_comment_reply_create_fields
|
||||
)
|
||||
workflow_comment_reply_update_model = console_ns.model(
|
||||
"WorkflowCommentReplyUpdate", workflow_comment_reply_update_fields
|
||||
)
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflow/comments")
|
||||
class WorkflowCommentListApi(Resource):
|
||||
"""API for listing and creating workflow comments."""
|
||||
|
||||
@console_ns.doc("list_workflow_comments")
|
||||
@console_ns.doc(description="Get all comments for a workflow")
|
||||
@console_ns.doc(params={"app_id": "Application ID"})
|
||||
@console_ns.response(200, "Comments retrieved successfully", workflow_comment_basic_model)
|
||||
@login_required
|
||||
@setup_required
|
||||
@account_initialization_required
|
||||
@get_app_model()
|
||||
@marshal_with(workflow_comment_basic_model, envelope="data")
|
||||
def get(self, app_model: App):
|
||||
"""Get all comments for a workflow."""
|
||||
comments = WorkflowCommentService.get_comments(tenant_id=current_user.current_tenant_id, app_id=app_model.id)
|
||||
|
||||
return comments
|
||||
|
||||
@console_ns.doc("create_workflow_comment")
|
||||
@console_ns.doc(description="Create a new workflow comment")
|
||||
@console_ns.doc(params={"app_id": "Application ID"})
|
||||
@console_ns.expect(console_ns.models[WorkflowCommentCreatePayload.__name__])
|
||||
@console_ns.response(201, "Comment created successfully", workflow_comment_create_model)
|
||||
@login_required
|
||||
@setup_required
|
||||
@account_initialization_required
|
||||
@get_app_model()
|
||||
@marshal_with(workflow_comment_create_model)
|
||||
@edit_permission_required
|
||||
def post(self, app_model: App):
|
||||
"""Create a new workflow comment."""
|
||||
payload = WorkflowCommentCreatePayload.model_validate(console_ns.payload or {})
|
||||
|
||||
result = WorkflowCommentService.create_comment(
|
||||
tenant_id=current_user.current_tenant_id,
|
||||
app_id=app_model.id,
|
||||
created_by=current_user.id,
|
||||
content=payload.content,
|
||||
position_x=payload.position_x,
|
||||
position_y=payload.position_y,
|
||||
mentioned_user_ids=payload.mentioned_user_ids,
|
||||
)
|
||||
|
||||
return result, 201
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflow/comments/<string:comment_id>")
|
||||
class WorkflowCommentDetailApi(Resource):
|
||||
"""API for managing individual workflow comments."""
|
||||
|
||||
@console_ns.doc("get_workflow_comment")
|
||||
@console_ns.doc(description="Get a specific workflow comment")
|
||||
@console_ns.doc(params={"app_id": "Application ID", "comment_id": "Comment ID"})
|
||||
@console_ns.response(200, "Comment retrieved successfully", workflow_comment_detail_model)
|
||||
@login_required
|
||||
@setup_required
|
||||
@account_initialization_required
|
||||
@get_app_model()
|
||||
@marshal_with(workflow_comment_detail_model)
|
||||
def get(self, app_model: App, comment_id: str):
|
||||
"""Get a specific workflow comment."""
|
||||
comment = WorkflowCommentService.get_comment(
|
||||
tenant_id=current_user.current_tenant_id, app_id=app_model.id, comment_id=comment_id
|
||||
)
|
||||
|
||||
return comment
|
||||
|
||||
@console_ns.doc("update_workflow_comment")
|
||||
@console_ns.doc(description="Update a workflow comment")
|
||||
@console_ns.doc(params={"app_id": "Application ID", "comment_id": "Comment ID"})
|
||||
@console_ns.expect(console_ns.models[WorkflowCommentUpdatePayload.__name__])
|
||||
@console_ns.response(200, "Comment updated successfully", workflow_comment_update_model)
|
||||
@login_required
|
||||
@setup_required
|
||||
@account_initialization_required
|
||||
@get_app_model()
|
||||
@marshal_with(workflow_comment_update_model)
|
||||
@edit_permission_required
|
||||
def put(self, app_model: App, comment_id: str):
|
||||
"""Update a workflow comment."""
|
||||
payload = WorkflowCommentUpdatePayload.model_validate(console_ns.payload or {})
|
||||
|
||||
result = WorkflowCommentService.update_comment(
|
||||
tenant_id=current_user.current_tenant_id,
|
||||
app_id=app_model.id,
|
||||
comment_id=comment_id,
|
||||
user_id=current_user.id,
|
||||
content=payload.content,
|
||||
position_x=payload.position_x,
|
||||
position_y=payload.position_y,
|
||||
mentioned_user_ids=payload.mentioned_user_ids,
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
@console_ns.doc("delete_workflow_comment")
|
||||
@console_ns.doc(description="Delete a workflow comment")
|
||||
@console_ns.doc(params={"app_id": "Application ID", "comment_id": "Comment ID"})
|
||||
@console_ns.response(204, "Comment deleted successfully")
|
||||
@login_required
|
||||
@setup_required
|
||||
@account_initialization_required
|
||||
@get_app_model()
|
||||
@edit_permission_required
|
||||
def delete(self, app_model: App, comment_id: str):
|
||||
"""Delete a workflow comment."""
|
||||
WorkflowCommentService.delete_comment(
|
||||
tenant_id=current_user.current_tenant_id,
|
||||
app_id=app_model.id,
|
||||
comment_id=comment_id,
|
||||
user_id=current_user.id,
|
||||
)
|
||||
|
||||
return {"result": "success"}, 204
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflow/comments/<string:comment_id>/resolve")
|
||||
class WorkflowCommentResolveApi(Resource):
|
||||
"""API for resolving and reopening workflow comments."""
|
||||
|
||||
@console_ns.doc("resolve_workflow_comment")
|
||||
@console_ns.doc(description="Resolve a workflow comment")
|
||||
@console_ns.doc(params={"app_id": "Application ID", "comment_id": "Comment ID"})
|
||||
@console_ns.response(200, "Comment resolved successfully", workflow_comment_resolve_model)
|
||||
@login_required
|
||||
@setup_required
|
||||
@account_initialization_required
|
||||
@get_app_model()
|
||||
@marshal_with(workflow_comment_resolve_model)
|
||||
@edit_permission_required
|
||||
def post(self, app_model: App, comment_id: str):
|
||||
"""Resolve a workflow comment."""
|
||||
comment = WorkflowCommentService.resolve_comment(
|
||||
tenant_id=current_user.current_tenant_id,
|
||||
app_id=app_model.id,
|
||||
comment_id=comment_id,
|
||||
user_id=current_user.id,
|
||||
)
|
||||
|
||||
return comment
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflow/comments/<string:comment_id>/replies")
|
||||
class WorkflowCommentReplyApi(Resource):
|
||||
"""API for managing comment replies."""
|
||||
|
||||
@console_ns.doc("create_workflow_comment_reply")
|
||||
@console_ns.doc(description="Add a reply to a workflow comment")
|
||||
@console_ns.doc(params={"app_id": "Application ID", "comment_id": "Comment ID"})
|
||||
@console_ns.expect(console_ns.models[WorkflowCommentReplyPayload.__name__])
|
||||
@console_ns.response(201, "Reply created successfully", workflow_comment_reply_create_model)
|
||||
@login_required
|
||||
@setup_required
|
||||
@account_initialization_required
|
||||
@get_app_model()
|
||||
@marshal_with(workflow_comment_reply_create_model)
|
||||
@edit_permission_required
|
||||
def post(self, app_model: App, comment_id: str):
|
||||
"""Add a reply to a workflow comment."""
|
||||
# Validate comment access first
|
||||
WorkflowCommentService.validate_comment_access(
|
||||
comment_id=comment_id, tenant_id=current_user.current_tenant_id, app_id=app_model.id
|
||||
)
|
||||
|
||||
payload = WorkflowCommentReplyPayload.model_validate(console_ns.payload or {})
|
||||
|
||||
result = WorkflowCommentService.create_reply(
|
||||
comment_id=comment_id,
|
||||
content=payload.content,
|
||||
created_by=current_user.id,
|
||||
mentioned_user_ids=payload.mentioned_user_ids,
|
||||
)
|
||||
|
||||
return result, 201
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflow/comments/<string:comment_id>/replies/<string:reply_id>")
|
||||
class WorkflowCommentReplyDetailApi(Resource):
|
||||
"""API for managing individual comment replies."""
|
||||
|
||||
@console_ns.doc("update_workflow_comment_reply")
|
||||
@console_ns.doc(description="Update a comment reply")
|
||||
@console_ns.doc(params={"app_id": "Application ID", "comment_id": "Comment ID", "reply_id": "Reply ID"})
|
||||
@console_ns.expect(console_ns.models[WorkflowCommentReplyPayload.__name__])
|
||||
@console_ns.response(200, "Reply updated successfully", workflow_comment_reply_update_model)
|
||||
@login_required
|
||||
@setup_required
|
||||
@account_initialization_required
|
||||
@get_app_model()
|
||||
@marshal_with(workflow_comment_reply_update_model)
|
||||
@edit_permission_required
|
||||
def put(self, app_model: App, comment_id: str, reply_id: str):
|
||||
"""Update a comment reply."""
|
||||
# Validate comment access first
|
||||
WorkflowCommentService.validate_comment_access(
|
||||
comment_id=comment_id, tenant_id=current_user.current_tenant_id, app_id=app_model.id
|
||||
)
|
||||
|
||||
payload = WorkflowCommentReplyPayload.model_validate(console_ns.payload or {})
|
||||
|
||||
reply = WorkflowCommentService.update_reply(
|
||||
tenant_id=current_user.current_tenant_id,
|
||||
app_id=app_model.id,
|
||||
comment_id=comment_id,
|
||||
reply_id=reply_id,
|
||||
user_id=current_user.id,
|
||||
content=payload.content,
|
||||
mentioned_user_ids=payload.mentioned_user_ids,
|
||||
)
|
||||
|
||||
return reply
|
||||
|
||||
@console_ns.doc("delete_workflow_comment_reply")
|
||||
@console_ns.doc(description="Delete a comment reply")
|
||||
@console_ns.doc(params={"app_id": "Application ID", "comment_id": "Comment ID", "reply_id": "Reply ID"})
|
||||
@console_ns.response(204, "Reply deleted successfully")
|
||||
@login_required
|
||||
@setup_required
|
||||
@account_initialization_required
|
||||
@get_app_model()
|
||||
@edit_permission_required
|
||||
def delete(self, app_model: App, comment_id: str, reply_id: str):
|
||||
"""Delete a comment reply."""
|
||||
# Validate comment access first
|
||||
WorkflowCommentService.validate_comment_access(
|
||||
comment_id=comment_id, tenant_id=current_user.current_tenant_id, app_id=app_model.id
|
||||
)
|
||||
|
||||
WorkflowCommentService.delete_reply(
|
||||
tenant_id=current_user.current_tenant_id,
|
||||
app_id=app_model.id,
|
||||
comment_id=comment_id,
|
||||
reply_id=reply_id,
|
||||
user_id=current_user.id,
|
||||
)
|
||||
|
||||
return {"result": "success"}, 204
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflow/comments/mention-users")
|
||||
class WorkflowCommentMentionUsersApi(Resource):
|
||||
"""API for getting mentionable users for workflow comments."""
|
||||
|
||||
@console_ns.doc("workflow_comment_mention_users")
|
||||
@console_ns.doc(description="Get all users in current tenant for mentions")
|
||||
@console_ns.doc(params={"app_id": "Application ID"})
|
||||
@console_ns.response(
|
||||
200, "Mentionable users retrieved successfully", console_ns.models[WorkflowCommentMentionUsersPayload.__name__]
|
||||
)
|
||||
@login_required
|
||||
@setup_required
|
||||
@account_initialization_required
|
||||
@get_app_model()
|
||||
def get(self, app_model: App):
|
||||
"""Get all users in current tenant for mentions."""
|
||||
members = TenantService.get_tenant_members(current_user.current_tenant)
|
||||
users = TypeAdapter(list[AccountWithRole]).validate_python(members, from_attributes=True)
|
||||
response = WorkflowCommentMentionUsersPayload(users=users)
|
||||
return response.model_dump(mode="json"), 200
|
||||
@ -22,6 +22,7 @@ from controllers.web.error import InvalidArgumentError, NotFoundError
|
||||
from core.app.file_access import DatabaseFileAccessController
|
||||
from core.workflow.variable_prefixes import CONVERSATION_VARIABLE_NODE_ID, SYSTEM_VARIABLE_NODE_ID
|
||||
from extensions.ext_database import db
|
||||
from factories import variable_factory
|
||||
from factories.file_factory import build_from_mapping, build_from_mappings
|
||||
from factories.variable_factory import build_segment_with_type
|
||||
from libs.login import current_user, login_required
|
||||
@ -45,6 +46,16 @@ class WorkflowDraftVariableUpdatePayload(BaseModel):
|
||||
value: Any | None = Field(default=None, description="Variable value")
|
||||
|
||||
|
||||
class ConversationVariableUpdatePayload(BaseModel):
|
||||
conversation_variables: list[dict[str, Any]] = Field(
|
||||
..., description="Conversation variables for the draft workflow"
|
||||
)
|
||||
|
||||
|
||||
class EnvironmentVariableUpdatePayload(BaseModel):
|
||||
environment_variables: list[dict[str, Any]] = Field(..., description="Environment variables for the draft workflow")
|
||||
|
||||
|
||||
console_ns.schema_model(
|
||||
WorkflowDraftVariableListQuery.__name__,
|
||||
WorkflowDraftVariableListQuery.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0),
|
||||
@ -53,6 +64,14 @@ console_ns.schema_model(
|
||||
WorkflowDraftVariableUpdatePayload.__name__,
|
||||
WorkflowDraftVariableUpdatePayload.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0),
|
||||
)
|
||||
console_ns.schema_model(
|
||||
ConversationVariableUpdatePayload.__name__,
|
||||
ConversationVariableUpdatePayload.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0),
|
||||
)
|
||||
console_ns.schema_model(
|
||||
EnvironmentVariableUpdatePayload.__name__,
|
||||
EnvironmentVariableUpdatePayload.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0),
|
||||
)
|
||||
|
||||
|
||||
def _convert_values_to_json_serializable_object(value: Segment):
|
||||
@ -510,6 +529,34 @@ class ConversationVariableCollectionApi(Resource):
|
||||
db.session.commit()
|
||||
return _get_variable_list(app_model, CONVERSATION_VARIABLE_NODE_ID)
|
||||
|
||||
@console_ns.expect(console_ns.models[ConversationVariableUpdatePayload.__name__])
|
||||
@console_ns.doc("update_conversation_variables")
|
||||
@console_ns.doc(description="Update conversation variables for workflow draft")
|
||||
@console_ns.doc(params={"app_id": "Application ID"})
|
||||
@console_ns.response(200, "Conversation variables updated successfully")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@edit_permission_required
|
||||
@get_app_model(mode=AppMode.ADVANCED_CHAT)
|
||||
def post(self, app_model: App):
|
||||
payload = ConversationVariableUpdatePayload.model_validate(console_ns.payload or {})
|
||||
|
||||
workflow_service = WorkflowService()
|
||||
|
||||
conversation_variables_list = payload.conversation_variables
|
||||
conversation_variables = [
|
||||
variable_factory.build_conversation_variable_from_mapping(obj) for obj in conversation_variables_list
|
||||
]
|
||||
|
||||
workflow_service.update_draft_workflow_conversation_variables(
|
||||
app_model=app_model,
|
||||
account=current_user,
|
||||
conversation_variables=conversation_variables,
|
||||
)
|
||||
|
||||
return {"result": "success"}
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/system-variables")
|
||||
class SystemVariableCollectionApi(Resource):
|
||||
@ -561,3 +608,31 @@ class EnvironmentVariableCollectionApi(Resource):
|
||||
)
|
||||
|
||||
return {"items": env_vars_list}
|
||||
|
||||
@console_ns.expect(console_ns.models[EnvironmentVariableUpdatePayload.__name__])
|
||||
@console_ns.doc("update_environment_variables")
|
||||
@console_ns.doc(description="Update environment variables for workflow draft")
|
||||
@console_ns.doc(params={"app_id": "Application ID"})
|
||||
@console_ns.response(200, "Environment variables updated successfully")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@edit_permission_required
|
||||
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
|
||||
def post(self, app_model: App):
|
||||
payload = EnvironmentVariableUpdatePayload.model_validate(console_ns.payload or {})
|
||||
|
||||
workflow_service = WorkflowService()
|
||||
|
||||
environment_variables_list = payload.environment_variables
|
||||
environment_variables = [
|
||||
variable_factory.build_environment_variable_from_mapping(obj) for obj in environment_variables_list
|
||||
]
|
||||
|
||||
workflow_service.update_draft_workflow_environment_variables(
|
||||
app_model=app_model,
|
||||
account=current_user,
|
||||
environment_variables=environment_variables,
|
||||
)
|
||||
|
||||
return {"result": "success"}
|
||||
|
||||
@ -169,6 +169,7 @@ console_ns.schema_model(
|
||||
|
||||
|
||||
class TrialAppWorkflowRunApi(TrialAppResource):
|
||||
@trial_feature_enable
|
||||
@console_ns.expect(console_ns.models[WorkflowRunRequest.__name__])
|
||||
def post(self, trial_app):
|
||||
"""
|
||||
@ -210,6 +211,7 @@ class TrialAppWorkflowRunApi(TrialAppResource):
|
||||
|
||||
|
||||
class TrialAppWorkflowTaskStopApi(TrialAppResource):
|
||||
@trial_feature_enable
|
||||
def post(self, trial_app, task_id: str):
|
||||
"""
|
||||
Stop workflow task
|
||||
@ -290,7 +292,6 @@ class TrialChatApi(TrialAppResource):
|
||||
|
||||
|
||||
class TrialMessageSuggestedQuestionApi(TrialAppResource):
|
||||
@trial_feature_enable
|
||||
def get(self, trial_app, message_id):
|
||||
app_model = trial_app
|
||||
app_mode = AppMode.value_of(app_model.mode)
|
||||
@ -470,7 +471,6 @@ class TrialCompletionApi(TrialAppResource):
|
||||
class TrialSitApi(Resource):
|
||||
"""Resource for trial app sites."""
|
||||
|
||||
@trial_feature_enable
|
||||
@get_app_model_with_trial(None)
|
||||
def get(self, app_model):
|
||||
"""Retrieve app site info.
|
||||
@ -492,7 +492,6 @@ class TrialSitApi(Resource):
|
||||
class TrialAppParameterApi(Resource):
|
||||
"""Resource for app variables."""
|
||||
|
||||
@trial_feature_enable
|
||||
@get_app_model_with_trial(None)
|
||||
def get(self, app_model):
|
||||
"""Retrieve app parameters."""
|
||||
@ -521,7 +520,6 @@ class TrialAppParameterApi(Resource):
|
||||
|
||||
|
||||
class AppApi(Resource):
|
||||
@trial_feature_enable
|
||||
@get_app_model_with_trial(None)
|
||||
@marshal_with(app_detail_with_site_model)
|
||||
def get(self, app_model):
|
||||
@ -534,7 +532,6 @@ class AppApi(Resource):
|
||||
|
||||
|
||||
class AppWorkflowApi(Resource):
|
||||
@trial_feature_enable
|
||||
@get_app_model_with_trial(None)
|
||||
@marshal_with(workflow_model)
|
||||
def get(self, app_model):
|
||||
@ -547,7 +544,6 @@ class AppWorkflowApi(Resource):
|
||||
|
||||
|
||||
class DatasetListApi(Resource):
|
||||
@trial_feature_enable
|
||||
@get_app_model_with_trial(None)
|
||||
def get(self, app_model):
|
||||
page = request.args.get("page", default=1, type=int)
|
||||
|
||||
1
api/controllers/console/socketio/__init__.py
Normal file
1
api/controllers/console/socketio/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
|
||||
108
api/controllers/console/socketio/workflow.py
Normal file
108
api/controllers/console/socketio/workflow.py
Normal file
@ -0,0 +1,108 @@
|
||||
import logging
|
||||
from collections.abc import Callable
|
||||
from typing import cast
|
||||
|
||||
from flask import Request as FlaskRequest
|
||||
|
||||
from extensions.ext_socketio import sio
|
||||
from libs.passport import PassportService
|
||||
from libs.token import extract_access_token
|
||||
from repositories.workflow_collaboration_repository import WorkflowCollaborationRepository
|
||||
from services.account_service import AccountService
|
||||
from services.workflow_collaboration_service import WorkflowCollaborationService
|
||||
|
||||
repository = WorkflowCollaborationRepository()
|
||||
collaboration_service = WorkflowCollaborationService(repository, sio)
|
||||
|
||||
|
||||
def _sio_on(event: str) -> Callable[[Callable[..., object]], Callable[..., object]]:
|
||||
return cast(Callable[[Callable[..., object]], Callable[..., object]], sio.on(event))
|
||||
|
||||
|
||||
@_sio_on("connect")
|
||||
def socket_connect(sid, environ, auth):
|
||||
"""
|
||||
WebSocket connect event, do authentication here.
|
||||
"""
|
||||
try:
|
||||
request_environ = FlaskRequest(environ)
|
||||
token = extract_access_token(request_environ)
|
||||
except Exception:
|
||||
logging.exception("Failed to extract token")
|
||||
token = None
|
||||
|
||||
if not token:
|
||||
logging.warning("Socket connect rejected: missing token (sid=%s)", sid)
|
||||
return False
|
||||
|
||||
try:
|
||||
decoded = PassportService().verify(token)
|
||||
user_id = decoded.get("user_id")
|
||||
if not user_id:
|
||||
logging.warning("Socket connect rejected: missing user_id (sid=%s)", sid)
|
||||
return False
|
||||
|
||||
with sio.app.app_context():
|
||||
user = AccountService.load_logged_in_account(account_id=user_id)
|
||||
if not user:
|
||||
logging.warning("Socket connect rejected: user not found (user_id=%s, sid=%s)", user_id, sid)
|
||||
return False
|
||||
if not user.has_edit_permission:
|
||||
logging.warning("Socket connect rejected: no edit permission (user_id=%s, sid=%s)", user_id, sid)
|
||||
return False
|
||||
|
||||
collaboration_service.save_socket_identity(sid, user)
|
||||
return True
|
||||
|
||||
except Exception:
|
||||
logging.exception("Socket authentication failed")
|
||||
return False
|
||||
|
||||
|
||||
@_sio_on("user_connect")
|
||||
def handle_user_connect(sid, data):
|
||||
"""
|
||||
Handle user connect event. Each session (tab) is treated as an independent collaborator.
|
||||
"""
|
||||
workflow_id = data.get("workflow_id")
|
||||
if not workflow_id:
|
||||
return {"msg": "workflow_id is required"}, 400
|
||||
|
||||
result = collaboration_service.authorize_and_join_workflow_room(workflow_id, sid)
|
||||
if not result:
|
||||
return {"msg": "unauthorized"}, 401
|
||||
|
||||
user_id, is_leader = result
|
||||
return {"msg": "connected", "user_id": user_id, "sid": sid, "isLeader": is_leader}
|
||||
|
||||
|
||||
@_sio_on("disconnect")
|
||||
def handle_disconnect(sid):
|
||||
"""
|
||||
Handle session disconnect event. Remove the specific session from online users.
|
||||
"""
|
||||
collaboration_service.disconnect_session(sid)
|
||||
|
||||
|
||||
@_sio_on("collaboration_event")
|
||||
def handle_collaboration_event(sid, data):
|
||||
"""
|
||||
Handle general collaboration events, include:
|
||||
1. mouse_move
|
||||
2. vars_and_features_update
|
||||
3. sync_request (ask leader to update graph)
|
||||
4. app_state_update
|
||||
5. mcp_server_update
|
||||
6. workflow_update
|
||||
7. comments_update
|
||||
8. node_panel_presence
|
||||
"""
|
||||
return collaboration_service.relay_collaboration_event(sid, data)
|
||||
|
||||
|
||||
@_sio_on("graph_event")
|
||||
def handle_graph_event(sid, data):
|
||||
"""
|
||||
Handle graph events - simple broadcast relay.
|
||||
"""
|
||||
return collaboration_service.relay_graph_event(sid, data)
|
||||
@ -1,13 +1,14 @@
|
||||
from typing import Literal
|
||||
|
||||
from flask import request
|
||||
from flask_restx import Namespace, Resource, fields, marshal_with
|
||||
from pydantic import BaseModel, Field
|
||||
from flask_restx import Resource
|
||||
from pydantic import BaseModel, Field, field_validator
|
||||
from werkzeug.exceptions import Forbidden
|
||||
|
||||
from controllers.common.schema import register_schema_models
|
||||
from controllers.console import console_ns
|
||||
from controllers.console.wraps import account_initialization_required, edit_permission_required, setup_required
|
||||
from fields.base import ResponseModel
|
||||
from libs.login import current_account_with_tenant, login_required
|
||||
from models.enums import TagType
|
||||
from services.tag_service import (
|
||||
@ -18,17 +19,6 @@ from services.tag_service import (
|
||||
UpdateTagPayload,
|
||||
)
|
||||
|
||||
dataset_tag_fields = {
|
||||
"id": fields.String,
|
||||
"name": fields.String,
|
||||
"type": fields.String,
|
||||
"binding_count": fields.String,
|
||||
}
|
||||
|
||||
|
||||
def build_dataset_tag_fields(api_or_ns: Namespace):
|
||||
return api_or_ns.model("DataSetTag", dataset_tag_fields)
|
||||
|
||||
|
||||
class TagBasePayload(BaseModel):
|
||||
name: str = Field(description="Tag name", min_length=1, max_length=50)
|
||||
@ -52,12 +42,36 @@ class TagListQueryParam(BaseModel):
|
||||
keyword: str | None = Field(None, description="Search keyword")
|
||||
|
||||
|
||||
class TagResponse(ResponseModel):
|
||||
id: str
|
||||
name: str
|
||||
type: str | None = None
|
||||
binding_count: str | None = None
|
||||
|
||||
@field_validator("type", mode="before")
|
||||
@classmethod
|
||||
def normalize_type(cls, value: TagType | str | None) -> str | None:
|
||||
if value is None:
|
||||
return None
|
||||
if isinstance(value, TagType):
|
||||
return value.value
|
||||
return value
|
||||
|
||||
@field_validator("binding_count", mode="before")
|
||||
@classmethod
|
||||
def normalize_binding_count(cls, value: int | str | None) -> str | None:
|
||||
if value is None:
|
||||
return None
|
||||
return str(value)
|
||||
|
||||
|
||||
register_schema_models(
|
||||
console_ns,
|
||||
TagBasePayload,
|
||||
TagBindingPayload,
|
||||
TagBindingRemovePayload,
|
||||
TagListQueryParam,
|
||||
TagResponse,
|
||||
)
|
||||
|
||||
|
||||
@ -69,14 +83,18 @@ class TagListApi(Resource):
|
||||
@console_ns.doc(
|
||||
params={"type": 'Tag type filter. Can be "knowledge" or "app".', "keyword": "Search keyword for tag name."}
|
||||
)
|
||||
@marshal_with(dataset_tag_fields)
|
||||
@console_ns.doc(responses={200: ("Success", [console_ns.models[TagResponse.__name__]])})
|
||||
def get(self):
|
||||
_, current_tenant_id = current_account_with_tenant()
|
||||
raw_args = request.args.to_dict()
|
||||
param = TagListQueryParam.model_validate(raw_args)
|
||||
tags = TagService.get_tags(param.type, current_tenant_id, param.keyword)
|
||||
|
||||
return tags, 200
|
||||
serialized_tags = [
|
||||
TagResponse.model_validate(tag, from_attributes=True).model_dump(mode="json") for tag in tags
|
||||
]
|
||||
|
||||
return serialized_tags, 200
|
||||
|
||||
@console_ns.expect(console_ns.models[TagBasePayload.__name__])
|
||||
@setup_required
|
||||
@ -91,7 +109,9 @@ class TagListApi(Resource):
|
||||
payload = TagBasePayload.model_validate(console_ns.payload or {})
|
||||
tag = TagService.save_tags(SaveTagPayload(name=payload.name, type=payload.type))
|
||||
|
||||
response = {"id": tag.id, "name": tag.name, "type": tag.type, "binding_count": 0}
|
||||
response = TagResponse.model_validate(
|
||||
{"id": tag.id, "name": tag.name, "type": tag.type, "binding_count": 0}
|
||||
).model_dump(mode="json")
|
||||
|
||||
return response, 200
|
||||
|
||||
@ -114,7 +134,9 @@ class TagUpdateDeleteApi(Resource):
|
||||
|
||||
binding_count = TagService.get_tag_binding_count(tag_id)
|
||||
|
||||
response = {"id": tag.id, "name": tag.name, "type": tag.type, "binding_count": binding_count}
|
||||
response = TagResponse.model_validate(
|
||||
{"id": tag.id, "name": tag.name, "type": tag.type, "binding_count": binding_count}
|
||||
).model_dump(mode="json")
|
||||
|
||||
return response, 200
|
||||
|
||||
|
||||
@ -6,6 +6,7 @@ from typing import Any, Literal
|
||||
import pytz
|
||||
from flask import request
|
||||
from flask_restx import Resource
|
||||
from graphon.file import helpers as file_helpers
|
||||
from pydantic import BaseModel, Field, field_validator, model_validator
|
||||
from sqlalchemy import select
|
||||
|
||||
@ -75,6 +76,10 @@ class AccountAvatarPayload(BaseModel):
|
||||
avatar: str
|
||||
|
||||
|
||||
class AccountAvatarQuery(BaseModel):
|
||||
avatar: str = Field(..., description="Avatar file ID")
|
||||
|
||||
|
||||
class AccountInterfaceLanguagePayload(BaseModel):
|
||||
interface_language: str
|
||||
|
||||
@ -160,6 +165,7 @@ def reg(cls: type[BaseModel]):
|
||||
reg(AccountInitPayload)
|
||||
reg(AccountNamePayload)
|
||||
reg(AccountAvatarPayload)
|
||||
reg(AccountAvatarQuery)
|
||||
reg(AccountInterfaceLanguagePayload)
|
||||
reg(AccountInterfaceThemePayload)
|
||||
reg(AccountTimezonePayload)
|
||||
@ -309,6 +315,18 @@ class AccountNameApi(Resource):
|
||||
|
||||
@console_ns.route("/account/avatar")
|
||||
class AccountAvatarApi(Resource):
|
||||
@console_ns.expect(console_ns.models[AccountAvatarQuery.__name__])
|
||||
@console_ns.doc("get_account_avatar")
|
||||
@console_ns.doc(description="Get account avatar url")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def get(self):
|
||||
args = AccountAvatarQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
||||
|
||||
avatar_url = file_helpers.get_signed_file_url(args.avatar)
|
||||
return {"avatar_url": avatar_url}
|
||||
|
||||
@console_ns.expect(console_ns.models[AccountAvatarPayload.__name__])
|
||||
@setup_required
|
||||
@login_required
|
||||
|
||||
@ -35,10 +35,10 @@ if [[ "${MODE}" == "worker" ]]; then
|
||||
if [[ -z "${CELERY_QUEUES}" ]]; then
|
||||
if [[ "${EDITION}" == "CLOUD" ]]; then
|
||||
# Cloud edition: separate queues for dataset and trigger tasks
|
||||
DEFAULT_QUEUES="api_token,dataset,dataset_summary,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow_professional,workflow_team,workflow_sandbox,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention,workflow_based_app_execution"
|
||||
DEFAULT_QUEUES="api_token,dataset,dataset_summary,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow_professional,workflow_team,workflow_sandbox,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_publisher,trigger_refresh_executor,retention,workflow_based_app_execution"
|
||||
else
|
||||
# Community edition (SELF_HOSTED): dataset, pipeline and workflow have separate queues
|
||||
DEFAULT_QUEUES="api_token,dataset,dataset_summary,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention,workflow_based_app_execution"
|
||||
DEFAULT_QUEUES="api_token,dataset,dataset_summary,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_publisher,trigger_refresh_executor,retention,workflow_based_app_execution"
|
||||
fi
|
||||
else
|
||||
DEFAULT_QUEUES="${CELERY_QUEUES}"
|
||||
@ -119,14 +119,16 @@ elif [[ "${MODE}" == "job" ]]; then
|
||||
|
||||
else
|
||||
if [[ "${DEBUG}" == "true" ]]; then
|
||||
exec flask run --host=${DIFY_BIND_ADDRESS:-0.0.0.0} --port=${DIFY_PORT:-5001} --debug
|
||||
export HOST=${DIFY_BIND_ADDRESS:-0.0.0.0}
|
||||
export PORT=${DIFY_PORT:-5001}
|
||||
exec python -m app
|
||||
else
|
||||
exec gunicorn \
|
||||
--bind "${DIFY_BIND_ADDRESS:-0.0.0.0}:${DIFY_PORT:-5001}" \
|
||||
--workers ${SERVER_WORKER_AMOUNT:-1} \
|
||||
--worker-class ${SERVER_WORKER_CLASS:-gevent} \
|
||||
--worker-class ${SERVER_WORKER_CLASS:-geventwebsocket.gunicorn.workers.GeventWebSocketWorker} \
|
||||
--worker-connections ${SERVER_WORKER_CONNECTIONS:-10} \
|
||||
--timeout ${GUNICORN_TIMEOUT:-200} \
|
||||
app:app
|
||||
app:socketio_app
|
||||
fi
|
||||
fi
|
||||
|
||||
5
api/extensions/ext_socketio.py
Normal file
5
api/extensions/ext_socketio.py
Normal file
@ -0,0 +1,5 @@
|
||||
import socketio # type: ignore[reportMissingTypeStubs]
|
||||
|
||||
from configs import dify_config
|
||||
|
||||
sio = socketio.Server(async_mode="gevent", cors_allowed_origins=dify_config.CONSOLE_CORS_ALLOW_ORIGINS)
|
||||
16
api/fields/online_user_fields.py
Normal file
16
api/fields/online_user_fields.py
Normal file
@ -0,0 +1,16 @@
|
||||
from flask_restx import fields
|
||||
|
||||
online_user_partial_fields = {
|
||||
"user_id": fields.String,
|
||||
"username": fields.String,
|
||||
"avatar": fields.String,
|
||||
}
|
||||
|
||||
workflow_online_users_fields = {
|
||||
"app_id": fields.String,
|
||||
"users": fields.List(fields.Nested(online_user_partial_fields)),
|
||||
}
|
||||
|
||||
online_user_list_fields = {
|
||||
"data": fields.List(fields.Nested(workflow_online_users_fields)),
|
||||
}
|
||||
96
api/fields/workflow_comment_fields.py
Normal file
96
api/fields/workflow_comment_fields.py
Normal file
@ -0,0 +1,96 @@
|
||||
from flask_restx import fields
|
||||
|
||||
from libs.helper import AvatarUrlField, TimestampField
|
||||
|
||||
# basic account fields for comments
|
||||
account_fields = {
|
||||
"id": fields.String,
|
||||
"name": fields.String,
|
||||
"email": fields.String,
|
||||
"avatar_url": AvatarUrlField,
|
||||
}
|
||||
|
||||
# Comment mention fields
|
||||
workflow_comment_mention_fields = {
|
||||
"mentioned_user_id": fields.String,
|
||||
"mentioned_user_account": fields.Nested(account_fields, allow_null=True),
|
||||
"reply_id": fields.String,
|
||||
}
|
||||
|
||||
# Comment reply fields
|
||||
workflow_comment_reply_fields = {
|
||||
"id": fields.String,
|
||||
"content": fields.String,
|
||||
"created_by": fields.String,
|
||||
"created_by_account": fields.Nested(account_fields, allow_null=True),
|
||||
"created_at": TimestampField,
|
||||
}
|
||||
|
||||
# Basic comment fields (for list views)
|
||||
workflow_comment_basic_fields = {
|
||||
"id": fields.String,
|
||||
"position_x": fields.Float,
|
||||
"position_y": fields.Float,
|
||||
"content": fields.String,
|
||||
"created_by": fields.String,
|
||||
"created_by_account": fields.Nested(account_fields, allow_null=True),
|
||||
"created_at": TimestampField,
|
||||
"updated_at": TimestampField,
|
||||
"resolved": fields.Boolean,
|
||||
"resolved_at": TimestampField,
|
||||
"resolved_by": fields.String,
|
||||
"resolved_by_account": fields.Nested(account_fields, allow_null=True),
|
||||
"reply_count": fields.Integer,
|
||||
"mention_count": fields.Integer,
|
||||
"participants": fields.List(fields.Nested(account_fields)),
|
||||
}
|
||||
|
||||
# Detailed comment fields (for single comment view)
|
||||
workflow_comment_detail_fields = {
|
||||
"id": fields.String,
|
||||
"position_x": fields.Float,
|
||||
"position_y": fields.Float,
|
||||
"content": fields.String,
|
||||
"created_by": fields.String,
|
||||
"created_by_account": fields.Nested(account_fields, allow_null=True),
|
||||
"created_at": TimestampField,
|
||||
"updated_at": TimestampField,
|
||||
"resolved": fields.Boolean,
|
||||
"resolved_at": TimestampField,
|
||||
"resolved_by": fields.String,
|
||||
"resolved_by_account": fields.Nested(account_fields, allow_null=True),
|
||||
"replies": fields.List(fields.Nested(workflow_comment_reply_fields)),
|
||||
"mentions": fields.List(fields.Nested(workflow_comment_mention_fields)),
|
||||
}
|
||||
|
||||
# Comment creation response fields (simplified)
|
||||
workflow_comment_create_fields = {
|
||||
"id": fields.String,
|
||||
"created_at": TimestampField,
|
||||
}
|
||||
|
||||
# Comment update response fields (simplified)
|
||||
workflow_comment_update_fields = {
|
||||
"id": fields.String,
|
||||
"updated_at": TimestampField,
|
||||
}
|
||||
|
||||
# Comment resolve response fields
|
||||
workflow_comment_resolve_fields = {
|
||||
"id": fields.String,
|
||||
"resolved": fields.Boolean,
|
||||
"resolved_at": TimestampField,
|
||||
"resolved_by": fields.String,
|
||||
}
|
||||
|
||||
# Reply creation response fields (simplified)
|
||||
workflow_comment_reply_create_fields = {
|
||||
"id": fields.String,
|
||||
"created_at": TimestampField,
|
||||
}
|
||||
|
||||
# Reply update response fields
|
||||
workflow_comment_reply_update_fields = {
|
||||
"id": fields.String,
|
||||
"updated_at": TimestampField,
|
||||
}
|
||||
@ -37,6 +37,7 @@ class EmailType(StrEnum):
|
||||
ENTERPRISE_CUSTOM = auto()
|
||||
QUEUE_MONITOR_ALERT = auto()
|
||||
DOCUMENT_CLEAN_NOTIFY = auto()
|
||||
WORKFLOW_COMMENT_MENTION = auto()
|
||||
EMAIL_REGISTER = auto()
|
||||
EMAIL_REGISTER_WHEN_ACCOUNT_EXIST = auto()
|
||||
RESET_PASSWORD_WHEN_ACCOUNT_NOT_EXIST_NO_REGISTER = auto()
|
||||
@ -453,6 +454,18 @@ def create_default_email_config() -> EmailI18nConfig:
|
||||
branded_template_path="clean_document_job_mail_template_zh-CN.html",
|
||||
),
|
||||
},
|
||||
EmailType.WORKFLOW_COMMENT_MENTION: {
|
||||
EmailLanguage.EN_US: EmailTemplate(
|
||||
subject="You were mentioned in a workflow comment",
|
||||
template_path="workflow_comment_mention_template_en-US.html",
|
||||
branded_template_path="without-brand/workflow_comment_mention_template_en-US.html",
|
||||
),
|
||||
EmailLanguage.ZH_HANS: EmailTemplate(
|
||||
subject="你在工作流评论中被提及",
|
||||
template_path="workflow_comment_mention_template_zh-CN.html",
|
||||
branded_template_path="without-brand/workflow_comment_mention_template_zh-CN.html",
|
||||
),
|
||||
},
|
||||
EmailType.TRIGGER_EVENTS_LIMIT_SANDBOX: {
|
||||
EmailLanguage.EN_US: EmailTemplate(
|
||||
subject="You’ve reached your Sandbox Trigger Events limit",
|
||||
|
||||
@ -0,0 +1,26 @@
|
||||
"""add qdrant_endpoint to tidb_auth_bindings
|
||||
|
||||
Revision ID: 8574b23a38fd
|
||||
Revises: 6b5f9f8b1a2c
|
||||
Create Date: 2026-04-14 15:00:00.000000
|
||||
|
||||
"""
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "8574b23a38fd"
|
||||
down_revision = "6b5f9f8b1a2c"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
with op.batch_alter_table("tidb_auth_bindings", schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column("qdrant_endpoint", sa.String(length=512), nullable=True))
|
||||
|
||||
|
||||
def downgrade():
|
||||
with op.batch_alter_table("tidb_auth_bindings", schema=None) as batch_op:
|
||||
batch_op.drop_column("qdrant_endpoint")
|
||||
@ -0,0 +1,90 @@
|
||||
"""Add workflow comments table
|
||||
|
||||
Revision ID: 227822d22895
|
||||
Revises: 8574b23a38fd
|
||||
Create Date: 2025-08-22 17:26:15.255980
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import models as models
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '227822d22895'
|
||||
down_revision = '8574b23a38fd'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('workflow_comments',
|
||||
sa.Column('id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('app_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('position_x', sa.Float(), nullable=False),
|
||||
sa.Column('position_y', sa.Float(), nullable=False),
|
||||
sa.Column('content', sa.Text(), nullable=False),
|
||||
sa.Column('created_by', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.Column('resolved', sa.Boolean(), server_default=sa.text('false'), nullable=False),
|
||||
sa.Column('resolved_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('resolved_by', models.types.StringUUID(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id', name='workflow_comments_pkey')
|
||||
)
|
||||
with op.batch_alter_table('workflow_comments', schema=None) as batch_op:
|
||||
batch_op.create_index('workflow_comments_app_idx', ['tenant_id', 'app_id'], unique=False)
|
||||
batch_op.create_index('workflow_comments_created_at_idx', ['created_at'], unique=False)
|
||||
|
||||
op.create_table('workflow_comment_replies',
|
||||
sa.Column('id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('comment_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('content', sa.Text(), nullable=False),
|
||||
sa.Column('created_by', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.ForeignKeyConstraint(['comment_id'], ['workflow_comments.id'], name=op.f('workflow_comment_replies_comment_id_fkey'), ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id', name='workflow_comment_replies_pkey')
|
||||
)
|
||||
with op.batch_alter_table('workflow_comment_replies', schema=None) as batch_op:
|
||||
batch_op.create_index('comment_replies_comment_idx', ['comment_id'], unique=False)
|
||||
batch_op.create_index('comment_replies_created_at_idx', ['created_at'], unique=False)
|
||||
|
||||
op.create_table('workflow_comment_mentions',
|
||||
sa.Column('id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('comment_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('reply_id', models.types.StringUUID(), nullable=True),
|
||||
sa.Column('mentioned_user_id', models.types.StringUUID(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['comment_id'], ['workflow_comments.id'], name=op.f('workflow_comment_mentions_comment_id_fkey'), ondelete='CASCADE'),
|
||||
sa.ForeignKeyConstraint(['reply_id'], ['workflow_comment_replies.id'], name=op.f('workflow_comment_mentions_reply_id_fkey'), ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id', name='workflow_comment_mentions_pkey')
|
||||
)
|
||||
with op.batch_alter_table('workflow_comment_mentions', schema=None) as batch_op:
|
||||
batch_op.create_index('comment_mentions_comment_idx', ['comment_id'], unique=False)
|
||||
batch_op.create_index('comment_mentions_reply_idx', ['reply_id'], unique=False)
|
||||
batch_op.create_index('comment_mentions_user_idx', ['mentioned_user_id'], unique=False)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table('workflow_comment_mentions', schema=None) as batch_op:
|
||||
batch_op.drop_index('comment_mentions_user_idx')
|
||||
batch_op.drop_index('comment_mentions_reply_idx')
|
||||
batch_op.drop_index('comment_mentions_comment_idx')
|
||||
|
||||
op.drop_table('workflow_comment_mentions')
|
||||
with op.batch_alter_table('workflow_comment_replies', schema=None) as batch_op:
|
||||
batch_op.drop_index('comment_replies_created_at_idx')
|
||||
batch_op.drop_index('comment_replies_comment_idx')
|
||||
|
||||
op.drop_table('workflow_comment_replies')
|
||||
with op.batch_alter_table('workflow_comments', schema=None) as batch_op:
|
||||
batch_op.drop_index('workflow_comments_created_at_idx')
|
||||
batch_op.drop_index('workflow_comments_app_idx')
|
||||
|
||||
op.drop_table('workflow_comments')
|
||||
# ### end Alembic commands ###
|
||||
@ -9,6 +9,11 @@ from .account import (
|
||||
TenantStatus,
|
||||
)
|
||||
from .api_based_extension import APIBasedExtension, APIBasedExtensionPoint
|
||||
from .comment import (
|
||||
WorkflowComment,
|
||||
WorkflowCommentMention,
|
||||
WorkflowCommentReply,
|
||||
)
|
||||
from .dataset import (
|
||||
AppDatasetJoin,
|
||||
Dataset,
|
||||
@ -208,6 +213,9 @@ __all__ = [
|
||||
"WorkflowAppLog",
|
||||
"WorkflowAppLogCreatedFrom",
|
||||
"WorkflowArchiveLog",
|
||||
"WorkflowComment",
|
||||
"WorkflowCommentMention",
|
||||
"WorkflowCommentReply",
|
||||
"WorkflowNodeExecutionModel",
|
||||
"WorkflowNodeExecutionOffload",
|
||||
"WorkflowNodeExecutionTriggeredFrom",
|
||||
|
||||
218
api/models/comment.py
Normal file
218
api/models/comment.py
Normal file
@ -0,0 +1,218 @@
|
||||
"""Workflow comment models."""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import Index, func
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from .account import Account
|
||||
from .base import Base
|
||||
from .engine import db
|
||||
from .types import StringUUID
|
||||
|
||||
|
||||
class WorkflowComment(Base):
|
||||
"""Workflow comment model for canvas commenting functionality.
|
||||
|
||||
Comments are associated with apps rather than specific workflow versions,
|
||||
since an app has only one draft workflow at a time and comments should persist
|
||||
across workflow version changes.
|
||||
|
||||
Attributes:
|
||||
id: Comment ID
|
||||
tenant_id: Workspace ID
|
||||
app_id: App ID (primary association, comments belong to apps)
|
||||
position_x: X coordinate on canvas
|
||||
position_y: Y coordinate on canvas
|
||||
content: Comment content
|
||||
created_by: Creator account ID
|
||||
created_at: Creation time
|
||||
updated_at: Last update time
|
||||
resolved: Whether comment is resolved
|
||||
resolved_at: Resolution time
|
||||
resolved_by: Resolver account ID
|
||||
"""
|
||||
|
||||
__tablename__ = "workflow_comments"
|
||||
__table_args__ = (
|
||||
db.PrimaryKeyConstraint("id", name="workflow_comments_pkey"),
|
||||
Index("workflow_comments_app_idx", "tenant_id", "app_id"),
|
||||
Index("workflow_comments_created_at_idx", "created_at"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, server_default=db.text("uuidv7()"))
|
||||
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
app_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
position_x: Mapped[float] = mapped_column(db.Float)
|
||||
position_y: Mapped[float] = mapped_column(db.Float)
|
||||
content: Mapped[str] = mapped_column(db.Text, nullable=False)
|
||||
created_by: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
created_at: Mapped[datetime] = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
db.DateTime, nullable=False, server_default=func.current_timestamp(), onupdate=func.current_timestamp()
|
||||
)
|
||||
resolved: Mapped[bool] = mapped_column(db.Boolean, nullable=False, server_default=db.text("false"))
|
||||
resolved_at: Mapped[datetime | None] = mapped_column(db.DateTime)
|
||||
resolved_by: Mapped[str | None] = mapped_column(StringUUID)
|
||||
|
||||
# Relationships
|
||||
replies: Mapped[list["WorkflowCommentReply"]] = relationship(
|
||||
"WorkflowCommentReply", back_populates="comment", cascade="all, delete-orphan"
|
||||
)
|
||||
mentions: Mapped[list["WorkflowCommentMention"]] = relationship(
|
||||
"WorkflowCommentMention", back_populates="comment", cascade="all, delete-orphan"
|
||||
)
|
||||
|
||||
@property
|
||||
def created_by_account(self):
|
||||
"""Get creator account."""
|
||||
if hasattr(self, "_created_by_account_cache"):
|
||||
return self._created_by_account_cache
|
||||
return db.session.get(Account, self.created_by)
|
||||
|
||||
def cache_created_by_account(self, account: Account | None) -> None:
|
||||
"""Cache creator account to avoid extra queries."""
|
||||
self._created_by_account_cache = account
|
||||
|
||||
@property
|
||||
def resolved_by_account(self):
|
||||
"""Get resolver account."""
|
||||
if hasattr(self, "_resolved_by_account_cache"):
|
||||
return self._resolved_by_account_cache
|
||||
if self.resolved_by:
|
||||
return db.session.get(Account, self.resolved_by)
|
||||
return None
|
||||
|
||||
def cache_resolved_by_account(self, account: Account | None) -> None:
|
||||
"""Cache resolver account to avoid extra queries."""
|
||||
self._resolved_by_account_cache = account
|
||||
|
||||
@property
|
||||
def reply_count(self):
|
||||
"""Get reply count."""
|
||||
return len(self.replies)
|
||||
|
||||
@property
|
||||
def mention_count(self):
|
||||
"""Get mention count."""
|
||||
return len(self.mentions)
|
||||
|
||||
@property
|
||||
def participants(self):
|
||||
"""Get all participants (creator + repliers + mentioned users)."""
|
||||
participant_ids: set[str] = set()
|
||||
participants: list[Account] = []
|
||||
|
||||
# Use account properties to reuse preloaded caches and avoid hidden N+1.
|
||||
if self.created_by not in participant_ids:
|
||||
participant_ids.add(self.created_by)
|
||||
created_by_account = self.created_by_account
|
||||
if created_by_account:
|
||||
participants.append(created_by_account)
|
||||
|
||||
for reply in self.replies:
|
||||
if reply.created_by in participant_ids:
|
||||
continue
|
||||
participant_ids.add(reply.created_by)
|
||||
reply_account = reply.created_by_account
|
||||
if reply_account:
|
||||
participants.append(reply_account)
|
||||
|
||||
for mention in self.mentions:
|
||||
if mention.mentioned_user_id in participant_ids:
|
||||
continue
|
||||
participant_ids.add(mention.mentioned_user_id)
|
||||
mentioned_account = mention.mentioned_user_account
|
||||
if mentioned_account:
|
||||
participants.append(mentioned_account)
|
||||
|
||||
return participants
|
||||
|
||||
|
||||
class WorkflowCommentReply(Base):
|
||||
"""Workflow comment reply model.
|
||||
|
||||
Attributes:
|
||||
id: Reply ID
|
||||
comment_id: Parent comment ID
|
||||
content: Reply content
|
||||
created_by: Creator account ID
|
||||
created_at: Creation time
|
||||
"""
|
||||
|
||||
__tablename__ = "workflow_comment_replies"
|
||||
__table_args__ = (
|
||||
db.PrimaryKeyConstraint("id", name="workflow_comment_replies_pkey"),
|
||||
Index("comment_replies_comment_idx", "comment_id"),
|
||||
Index("comment_replies_created_at_idx", "created_at"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, server_default=db.text("uuidv7()"))
|
||||
comment_id: Mapped[str] = mapped_column(
|
||||
StringUUID, db.ForeignKey("workflow_comments.id", ondelete="CASCADE"), nullable=False
|
||||
)
|
||||
content: Mapped[str] = mapped_column(db.Text, nullable=False)
|
||||
created_by: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
created_at: Mapped[datetime] = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
db.DateTime, nullable=False, server_default=func.current_timestamp(), onupdate=func.current_timestamp()
|
||||
)
|
||||
# Relationships
|
||||
comment: Mapped["WorkflowComment"] = relationship("WorkflowComment", back_populates="replies")
|
||||
|
||||
@property
|
||||
def created_by_account(self):
|
||||
"""Get creator account."""
|
||||
if hasattr(self, "_created_by_account_cache"):
|
||||
return self._created_by_account_cache
|
||||
return db.session.get(Account, self.created_by)
|
||||
|
||||
def cache_created_by_account(self, account: Account | None) -> None:
|
||||
"""Cache creator account to avoid extra queries."""
|
||||
self._created_by_account_cache = account
|
||||
|
||||
|
||||
class WorkflowCommentMention(Base):
|
||||
"""Workflow comment mention model.
|
||||
|
||||
Mentions are only for internal accounts since end users
|
||||
cannot access workflow canvas and commenting features.
|
||||
|
||||
Attributes:
|
||||
id: Mention ID
|
||||
comment_id: Parent comment ID
|
||||
mentioned_user_id: Mentioned account ID
|
||||
"""
|
||||
|
||||
__tablename__ = "workflow_comment_mentions"
|
||||
__table_args__ = (
|
||||
db.PrimaryKeyConstraint("id", name="workflow_comment_mentions_pkey"),
|
||||
Index("comment_mentions_comment_idx", "comment_id"),
|
||||
Index("comment_mentions_reply_idx", "reply_id"),
|
||||
Index("comment_mentions_user_idx", "mentioned_user_id"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, server_default=db.text("uuidv7()"))
|
||||
comment_id: Mapped[str] = mapped_column(
|
||||
StringUUID, db.ForeignKey("workflow_comments.id", ondelete="CASCADE"), nullable=False
|
||||
)
|
||||
reply_id: Mapped[str | None] = mapped_column(
|
||||
StringUUID, db.ForeignKey("workflow_comment_replies.id", ondelete="CASCADE"), nullable=True
|
||||
)
|
||||
mentioned_user_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
|
||||
# Relationships
|
||||
comment: Mapped["WorkflowComment"] = relationship("WorkflowComment", back_populates="mentions")
|
||||
reply: Mapped[Optional["WorkflowCommentReply"]] = relationship("WorkflowCommentReply")
|
||||
|
||||
@property
|
||||
def mentioned_user_account(self):
|
||||
"""Get mentioned account."""
|
||||
if hasattr(self, "_mentioned_user_account_cache"):
|
||||
return self._mentioned_user_account_cache
|
||||
return db.session.get(Account, self.mentioned_user_id)
|
||||
|
||||
def cache_mentioned_user_account(self, account: Account | None) -> None:
|
||||
"""Cache mentioned account to avoid extra queries."""
|
||||
self._mentioned_user_account_cache = account
|
||||
@ -1305,6 +1305,7 @@ class TidbAuthBinding(TypeBase):
|
||||
)
|
||||
account: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
password: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
qdrant_endpoint: Mapped[str | None] = mapped_column(String(512), nullable=True, default=None)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime, nullable=False, server_default=func.current_timestamp(), init=False
|
||||
)
|
||||
|
||||
@ -490,7 +490,7 @@ class Workflow(Base): # bug
|
||||
|
||||
:return: hash
|
||||
"""
|
||||
entity = {"graph": self.graph_dict, "features": self.features_dict}
|
||||
entity = {"graph": self.graph_dict}
|
||||
|
||||
return helper.generate_text_hash(json.dumps(entity, sort_keys=True))
|
||||
|
||||
|
||||
@ -1,4 +1,5 @@
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import uuid
|
||||
from collections.abc import Generator, Iterable, Sequence
|
||||
@ -7,6 +8,8 @@ from typing import TYPE_CHECKING, Any
|
||||
|
||||
import httpx
|
||||
import qdrant_client
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
from flask import current_app
|
||||
from httpx import DigestAuth
|
||||
from pydantic import BaseModel
|
||||
@ -421,13 +424,16 @@ class TidbOnQdrantVector(BaseVector):
|
||||
|
||||
class TidbOnQdrantVectorFactory(AbstractVectorFactory):
|
||||
def init_vector(self, dataset: Dataset, attributes: list, embeddings: Embeddings) -> TidbOnQdrantVector:
|
||||
logger.info("init_vector: tenant_id=%s, dataset_id=%s", dataset.tenant_id, dataset.id)
|
||||
stmt = select(TidbAuthBinding).where(TidbAuthBinding.tenant_id == dataset.tenant_id)
|
||||
tidb_auth_binding = db.session.scalars(stmt).one_or_none()
|
||||
if not tidb_auth_binding:
|
||||
logger.info("No existing TidbAuthBinding for tenant %s, acquiring lock", dataset.tenant_id)
|
||||
with redis_client.lock("create_tidb_serverless_cluster_lock", timeout=900):
|
||||
stmt = select(TidbAuthBinding).where(TidbAuthBinding.tenant_id == dataset.tenant_id)
|
||||
tidb_auth_binding = db.session.scalars(stmt).one_or_none()
|
||||
if tidb_auth_binding:
|
||||
logger.info("Found binding after lock: cluster_id=%s", tidb_auth_binding.cluster_id)
|
||||
TIDB_ON_QDRANT_API_KEY = f"{tidb_auth_binding.account}:{tidb_auth_binding.password}"
|
||||
|
||||
else:
|
||||
@ -437,11 +443,18 @@ class TidbOnQdrantVectorFactory(AbstractVectorFactory):
|
||||
.limit(1)
|
||||
)
|
||||
if idle_tidb_auth_binding:
|
||||
logger.info(
|
||||
"Assigning idle cluster %s to tenant %s",
|
||||
idle_tidb_auth_binding.cluster_id,
|
||||
dataset.tenant_id,
|
||||
)
|
||||
idle_tidb_auth_binding.active = True
|
||||
idle_tidb_auth_binding.tenant_id = dataset.tenant_id
|
||||
db.session.commit()
|
||||
tidb_auth_binding = idle_tidb_auth_binding
|
||||
TIDB_ON_QDRANT_API_KEY = f"{idle_tidb_auth_binding.account}:{idle_tidb_auth_binding.password}"
|
||||
else:
|
||||
logger.info("No idle clusters available, creating new cluster for tenant %s", dataset.tenant_id)
|
||||
new_cluster = TidbService.create_tidb_serverless_cluster(
|
||||
dify_config.TIDB_PROJECT_ID or "",
|
||||
dify_config.TIDB_API_URL or "",
|
||||
@ -450,21 +463,39 @@ class TidbOnQdrantVectorFactory(AbstractVectorFactory):
|
||||
dify_config.TIDB_PRIVATE_KEY or "",
|
||||
dify_config.TIDB_REGION or "",
|
||||
)
|
||||
logger.info(
|
||||
"New cluster created: cluster_id=%s, qdrant_endpoint=%s",
|
||||
new_cluster["cluster_id"],
|
||||
new_cluster.get("qdrant_endpoint"),
|
||||
)
|
||||
new_tidb_auth_binding = TidbAuthBinding(
|
||||
cluster_id=new_cluster["cluster_id"],
|
||||
cluster_name=new_cluster["cluster_name"],
|
||||
account=new_cluster["account"],
|
||||
password=new_cluster["password"],
|
||||
qdrant_endpoint=new_cluster.get("qdrant_endpoint"),
|
||||
tenant_id=dataset.tenant_id,
|
||||
active=True,
|
||||
status=TidbAuthBindingStatus.ACTIVE,
|
||||
)
|
||||
db.session.add(new_tidb_auth_binding)
|
||||
db.session.commit()
|
||||
tidb_auth_binding = new_tidb_auth_binding
|
||||
TIDB_ON_QDRANT_API_KEY = f"{new_tidb_auth_binding.account}:{new_tidb_auth_binding.password}"
|
||||
else:
|
||||
logger.info("Existing binding found: cluster_id=%s", tidb_auth_binding.cluster_id)
|
||||
TIDB_ON_QDRANT_API_KEY = f"{tidb_auth_binding.account}:{tidb_auth_binding.password}"
|
||||
|
||||
qdrant_url = (
|
||||
(tidb_auth_binding.qdrant_endpoint if tidb_auth_binding else None) or dify_config.TIDB_ON_QDRANT_URL or ""
|
||||
)
|
||||
logger.info(
|
||||
"Using qdrant endpoint: %s (from_binding=%s, fallback_global=%s)",
|
||||
qdrant_url,
|
||||
tidb_auth_binding.qdrant_endpoint if tidb_auth_binding else None,
|
||||
dify_config.TIDB_ON_QDRANT_URL,
|
||||
)
|
||||
|
||||
if dataset.index_struct_dict:
|
||||
class_prefix: str = dataset.index_struct_dict["vector_store"]["class_prefix"]
|
||||
collection_name = class_prefix
|
||||
@ -479,7 +510,7 @@ class TidbOnQdrantVectorFactory(AbstractVectorFactory):
|
||||
collection_name=collection_name,
|
||||
group_id=dataset.id,
|
||||
config=TidbOnQdrantConfig(
|
||||
endpoint=dify_config.TIDB_ON_QDRANT_URL or "",
|
||||
endpoint=qdrant_url,
|
||||
api_key=TIDB_ON_QDRANT_API_KEY,
|
||||
root_path=str(config.root_path),
|
||||
timeout=dify_config.TIDB_ON_QDRANT_CLIENT_TIMEOUT,
|
||||
|
||||
@ -1,3 +1,4 @@
|
||||
import logging
|
||||
import time
|
||||
import uuid
|
||||
from collections.abc import Sequence
|
||||
@ -12,6 +13,8 @@ from extensions.ext_redis import redis_client
|
||||
from models.dataset import TidbAuthBinding
|
||||
from models.enums import TidbAuthBindingStatus
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Reuse a pooled HTTP client for all TiDB Cloud requests to minimize connection churn
|
||||
_tidb_http_client: httpx.Client = get_pooled_http_client(
|
||||
"tidb:cloud",
|
||||
@ -20,6 +23,46 @@ _tidb_http_client: httpx.Client = get_pooled_http_client(
|
||||
|
||||
|
||||
class TidbService:
|
||||
@staticmethod
|
||||
def extract_qdrant_endpoint(cluster_response: dict) -> str | None:
|
||||
"""Extract the qdrant endpoint URL from a Get Cluster API response.
|
||||
|
||||
Reads ``endpoints.public.host`` (e.g. ``gateway01.xx.tidbcloud.com``),
|
||||
prepends ``qdrant-`` and wraps it as an ``https://`` URL.
|
||||
"""
|
||||
endpoints = cluster_response.get("endpoints") or {}
|
||||
public = endpoints.get("public") or {}
|
||||
host = public.get("host")
|
||||
if host:
|
||||
return f"https://qdrant-{host}"
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def fetch_qdrant_endpoint(api_url: str, public_key: str, private_key: str, cluster_id: str) -> str | None:
|
||||
"""Call Get Cluster API and extract the qdrant endpoint.
|
||||
|
||||
Use ``extract_qdrant_endpoint`` instead when you already have
|
||||
the cluster response to avoid a redundant API call.
|
||||
"""
|
||||
try:
|
||||
logger.info("Fetching qdrant endpoint for cluster %s", cluster_id)
|
||||
cluster_response = TidbService.get_tidb_serverless_cluster(api_url, public_key, private_key, cluster_id)
|
||||
if not cluster_response:
|
||||
logger.warning("Empty response from Get Cluster API for cluster %s", cluster_id)
|
||||
return None
|
||||
qdrant_url = TidbService.extract_qdrant_endpoint(cluster_response)
|
||||
if qdrant_url:
|
||||
logger.info("Resolved qdrant endpoint for cluster %s: %s", cluster_id, qdrant_url)
|
||||
return qdrant_url
|
||||
logger.warning(
|
||||
"No endpoints.public.host found for cluster %s, response keys: %s",
|
||||
cluster_id,
|
||||
list(cluster_response.keys()),
|
||||
)
|
||||
except Exception:
|
||||
logger.exception("Failed to fetch qdrant endpoint for cluster %s", cluster_id)
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def create_tidb_serverless_cluster(
|
||||
project_id: str, api_url: str, iam_url: str, public_key: str, private_key: str, region: str
|
||||
@ -57,6 +100,7 @@ class TidbService:
|
||||
"rootPassword": password,
|
||||
}
|
||||
|
||||
logger.info("Creating TiDB serverless cluster: display_name=%s, region=%s", display_name, region)
|
||||
response = _tidb_http_client.post(
|
||||
f"{api_url}/clusters", json=cluster_data, auth=DigestAuth(public_key, private_key)
|
||||
)
|
||||
@ -64,21 +108,39 @@ class TidbService:
|
||||
if response.status_code == 200:
|
||||
response_data = response.json()
|
||||
cluster_id = response_data["clusterId"]
|
||||
logger.info("Cluster created, cluster_id=%s, waiting for ACTIVE state", cluster_id)
|
||||
retry_count = 0
|
||||
max_retries = 30
|
||||
while retry_count < max_retries:
|
||||
cluster_response = TidbService.get_tidb_serverless_cluster(api_url, public_key, private_key, cluster_id)
|
||||
if cluster_response["state"] == "ACTIVE":
|
||||
user_prefix = cluster_response["userPrefix"]
|
||||
qdrant_endpoint = TidbService.extract_qdrant_endpoint(cluster_response)
|
||||
logger.info(
|
||||
"Cluster %s is ACTIVE, user_prefix=%s, qdrant_endpoint=%s",
|
||||
cluster_id,
|
||||
user_prefix,
|
||||
qdrant_endpoint,
|
||||
)
|
||||
return {
|
||||
"cluster_id": cluster_id,
|
||||
"cluster_name": display_name,
|
||||
"account": f"{user_prefix}.root",
|
||||
"password": password,
|
||||
"qdrant_endpoint": qdrant_endpoint,
|
||||
}
|
||||
time.sleep(30) # wait 30 seconds before retrying
|
||||
logger.info(
|
||||
"Cluster %s state=%s, retry %d/%d",
|
||||
cluster_id,
|
||||
cluster_response["state"],
|
||||
retry_count + 1,
|
||||
max_retries,
|
||||
)
|
||||
time.sleep(30)
|
||||
retry_count += 1
|
||||
logger.error("Cluster %s did not become ACTIVE after %d retries", cluster_id, max_retries)
|
||||
else:
|
||||
logger.error("Failed to create cluster: status=%d, body=%s", response.status_code, response.text)
|
||||
response.raise_for_status()
|
||||
|
||||
@staticmethod
|
||||
@ -243,19 +305,29 @@ class TidbService:
|
||||
if response.status_code == 200:
|
||||
response_data = response.json()
|
||||
cluster_infos = []
|
||||
logger.info("Batch created %d clusters", len(response_data.get("clusters", [])))
|
||||
for item in response_data["clusters"]:
|
||||
cache_key = f"tidb_serverless_cluster_password:{item['displayName']}"
|
||||
cached_password = redis_client.get(cache_key)
|
||||
if not cached_password:
|
||||
logger.warning("No cached password for cluster %s, skipping", item["displayName"])
|
||||
continue
|
||||
qdrant_endpoint = TidbService.fetch_qdrant_endpoint(api_url, public_key, private_key, item["clusterId"])
|
||||
logger.info(
|
||||
"Batch cluster %s: qdrant_endpoint=%s",
|
||||
item["clusterId"],
|
||||
qdrant_endpoint,
|
||||
)
|
||||
cluster_info = {
|
||||
"cluster_id": item["clusterId"],
|
||||
"cluster_name": item["displayName"],
|
||||
"account": "root",
|
||||
"password": cached_password.decode("utf-8"),
|
||||
"qdrant_endpoint": qdrant_endpoint,
|
||||
}
|
||||
cluster_infos.append(cluster_info)
|
||||
return cluster_infos
|
||||
else:
|
||||
logger.error("Batch create failed: status=%d, body=%s", response.status_code, response.text)
|
||||
response.raise_for_status()
|
||||
return []
|
||||
|
||||
@ -114,14 +114,12 @@ class TestTidbOnQdrantVectorDeleteByIds:
|
||||
|
||||
assert exc_info.value.status_code == 500
|
||||
|
||||
def test_delete_by_ids_with_large_batch(self, vector_instance):
|
||||
"""Test deletion with a large batch of IDs."""
|
||||
# Create 1000 IDs
|
||||
def test_delete_by_ids_with_exactly_1000(self, vector_instance):
|
||||
"""Test deletion with exactly 1000 IDs triggers a single batch."""
|
||||
ids = [f"doc_{i}" for i in range(1000)]
|
||||
|
||||
vector_instance.delete_by_ids(ids)
|
||||
|
||||
# Verify single delete call with all IDs
|
||||
vector_instance._client.delete.assert_called_once()
|
||||
call_args = vector_instance._client.delete.call_args
|
||||
|
||||
@ -129,11 +127,28 @@ class TestTidbOnQdrantVectorDeleteByIds:
|
||||
filter_obj = filter_selector.filter
|
||||
field_condition = filter_obj.must[0]
|
||||
|
||||
# Verify all 1000 IDs are in the batch
|
||||
assert len(field_condition.match.any) == 1000
|
||||
assert "doc_0" in field_condition.match.any
|
||||
assert "doc_999" in field_condition.match.any
|
||||
|
||||
def test_delete_by_ids_splits_into_batches(self, vector_instance):
|
||||
"""Test deletion with >1000 IDs triggers multiple batched calls."""
|
||||
ids = [f"doc_{i}" for i in range(2500)]
|
||||
|
||||
vector_instance.delete_by_ids(ids)
|
||||
|
||||
assert vector_instance._client.delete.call_count == 3
|
||||
|
||||
batches = []
|
||||
for call in vector_instance._client.delete.call_args_list:
|
||||
filter_selector = call[1]["points_selector"]
|
||||
field_condition = filter_selector.filter.must[0]
|
||||
batches.append(field_condition.match.any)
|
||||
|
||||
assert len(batches[0]) == 1000
|
||||
assert len(batches[1]) == 1000
|
||||
assert len(batches[2]) == 500
|
||||
|
||||
def test_delete_by_ids_filter_structure(self, vector_instance):
|
||||
"""Test that the filter structure is correctly constructed."""
|
||||
ids = ["doc1", "doc2"]
|
||||
@ -157,3 +172,57 @@ class TestTidbOnQdrantVectorDeleteByIds:
|
||||
# Verify MatchAny structure
|
||||
assert isinstance(field_condition.match, rest.MatchAny)
|
||||
assert field_condition.match.any == ids
|
||||
|
||||
|
||||
class TestInitVectorEndpointSelection:
|
||||
"""Test that init_vector selects the correct qdrant endpoint.
|
||||
|
||||
We avoid importing the full module (which triggers Flask app context)
|
||||
by testing the endpoint selection logic directly on TidbOnQdrantConfig.
|
||||
"""
|
||||
|
||||
def test_uses_binding_endpoint_when_present(self):
|
||||
binding_endpoint = "https://qdrant-custom.tidb.com"
|
||||
global_url = "https://qdrant-global.tidb.com"
|
||||
|
||||
qdrant_url = binding_endpoint or global_url or ""
|
||||
|
||||
assert qdrant_url == "https://qdrant-custom.tidb.com"
|
||||
config = TidbOnQdrantConfig(endpoint=qdrant_url)
|
||||
assert config.endpoint == "https://qdrant-custom.tidb.com"
|
||||
|
||||
def test_falls_back_to_global_when_binding_endpoint_is_none(self):
|
||||
binding_endpoint = None
|
||||
global_url = "https://qdrant-global.tidb.com"
|
||||
|
||||
qdrant_url = binding_endpoint or global_url or ""
|
||||
|
||||
assert qdrant_url == "https://qdrant-global.tidb.com"
|
||||
config = TidbOnQdrantConfig(endpoint=qdrant_url)
|
||||
assert config.endpoint == "https://qdrant-global.tidb.com"
|
||||
|
||||
def test_falls_back_to_empty_when_both_none(self):
|
||||
binding_endpoint = None
|
||||
global_url = None
|
||||
|
||||
qdrant_url = binding_endpoint or global_url or ""
|
||||
|
||||
assert qdrant_url == ""
|
||||
config = TidbOnQdrantConfig(endpoint=qdrant_url)
|
||||
assert config.endpoint == ""
|
||||
|
||||
def test_binding_endpoint_takes_precedence_over_global(self):
|
||||
binding_endpoint = "https://qdrant-ap-southeast.tidb.com"
|
||||
global_url = "https://qdrant-us-east.tidb.com"
|
||||
|
||||
qdrant_url = binding_endpoint or global_url or ""
|
||||
|
||||
assert qdrant_url == "https://qdrant-ap-southeast.tidb.com"
|
||||
|
||||
def test_empty_string_binding_endpoint_falls_back_to_global(self):
|
||||
binding_endpoint = ""
|
||||
global_url = "https://qdrant-global.tidb.com"
|
||||
|
||||
qdrant_url = binding_endpoint or global_url or ""
|
||||
|
||||
assert qdrant_url == "https://qdrant-global.tidb.com"
|
||||
|
||||
@ -0,0 +1,218 @@
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from dify_vdb_tidb_on_qdrant.tidb_service import TidbService
|
||||
|
||||
|
||||
class TestExtractQdrantEndpoint:
|
||||
"""Unit tests for TidbService.extract_qdrant_endpoint."""
|
||||
|
||||
def test_returns_endpoint_when_host_present(self):
|
||||
response = {"endpoints": {"public": {"host": "gateway01.us-east-1.tidbcloud.com", "port": 4000}}}
|
||||
result = TidbService.extract_qdrant_endpoint(response)
|
||||
assert result == "https://qdrant-gateway01.us-east-1.tidbcloud.com"
|
||||
|
||||
def test_returns_none_when_host_missing(self):
|
||||
response = {"endpoints": {"public": {}}}
|
||||
assert TidbService.extract_qdrant_endpoint(response) is None
|
||||
|
||||
def test_returns_none_when_public_missing(self):
|
||||
response = {"endpoints": {}}
|
||||
assert TidbService.extract_qdrant_endpoint(response) is None
|
||||
|
||||
def test_returns_none_when_endpoints_missing(self):
|
||||
assert TidbService.extract_qdrant_endpoint({}) is None
|
||||
|
||||
|
||||
class TestFetchQdrantEndpoint:
|
||||
"""Unit tests for TidbService.fetch_qdrant_endpoint."""
|
||||
|
||||
@patch.object(TidbService, "get_tidb_serverless_cluster")
|
||||
def test_returns_endpoint_when_host_present(self, mock_get_cluster):
|
||||
mock_get_cluster.return_value = {
|
||||
"endpoints": {"public": {"host": "gateway01.us-east-1.tidbcloud.com", "port": 4000}}
|
||||
}
|
||||
result = TidbService.fetch_qdrant_endpoint("url", "pub", "priv", "c-123")
|
||||
assert result == "https://qdrant-gateway01.us-east-1.tidbcloud.com"
|
||||
|
||||
@patch.object(TidbService, "get_tidb_serverless_cluster")
|
||||
def test_returns_none_when_cluster_response_is_none(self, mock_get_cluster):
|
||||
mock_get_cluster.return_value = None
|
||||
assert TidbService.fetch_qdrant_endpoint("url", "pub", "priv", "c-123") is None
|
||||
|
||||
@patch.object(TidbService, "get_tidb_serverless_cluster")
|
||||
def test_returns_none_when_host_missing(self, mock_get_cluster):
|
||||
mock_get_cluster.return_value = {"endpoints": {"public": {}}}
|
||||
assert TidbService.fetch_qdrant_endpoint("url", "pub", "priv", "c-123") is None
|
||||
|
||||
@patch.object(TidbService, "get_tidb_serverless_cluster")
|
||||
def test_returns_none_when_endpoints_missing(self, mock_get_cluster):
|
||||
mock_get_cluster.return_value = {}
|
||||
assert TidbService.fetch_qdrant_endpoint("url", "pub", "priv", "c-123") is None
|
||||
|
||||
@patch.object(TidbService, "get_tidb_serverless_cluster")
|
||||
def test_returns_none_on_exception(self, mock_get_cluster):
|
||||
mock_get_cluster.side_effect = RuntimeError("network error")
|
||||
assert TidbService.fetch_qdrant_endpoint("url", "pub", "priv", "c-123") is None
|
||||
|
||||
|
||||
class TestCreateTidbServerlessClusterQdrantEndpoint:
|
||||
"""Verify that create_tidb_serverless_cluster includes qdrant_endpoint in its result."""
|
||||
|
||||
@patch.object(TidbService, "get_tidb_serverless_cluster")
|
||||
@patch("dify_vdb_tidb_on_qdrant.tidb_service._tidb_http_client")
|
||||
@patch("dify_vdb_tidb_on_qdrant.tidb_service.dify_config")
|
||||
def test_result_contains_qdrant_endpoint(self, mock_config, mock_http, mock_get_cluster):
|
||||
mock_config.TIDB_SPEND_LIMIT = 10
|
||||
mock_http.post.return_value = MagicMock(status_code=200, json=lambda: {"clusterId": "c-1"})
|
||||
mock_get_cluster.return_value = {
|
||||
"state": "ACTIVE",
|
||||
"userPrefix": "pfx",
|
||||
"endpoints": {"public": {"host": "gw.tidbcloud.com", "port": 4000}},
|
||||
}
|
||||
|
||||
result = TidbService.create_tidb_serverless_cluster("proj", "url", "iam", "pub", "priv", "us-east-1")
|
||||
|
||||
assert result is not None
|
||||
assert result["qdrant_endpoint"] == "https://qdrant-gw.tidbcloud.com"
|
||||
|
||||
@patch.object(TidbService, "get_tidb_serverless_cluster")
|
||||
@patch("dify_vdb_tidb_on_qdrant.tidb_service._tidb_http_client")
|
||||
@patch("dify_vdb_tidb_on_qdrant.tidb_service.dify_config")
|
||||
def test_result_qdrant_endpoint_none_when_no_endpoints(self, mock_config, mock_http, mock_get_cluster):
|
||||
mock_config.TIDB_SPEND_LIMIT = 10
|
||||
mock_http.post.return_value = MagicMock(status_code=200, json=lambda: {"clusterId": "c-1"})
|
||||
mock_get_cluster.return_value = {"state": "ACTIVE", "userPrefix": "pfx"}
|
||||
|
||||
result = TidbService.create_tidb_serverless_cluster("proj", "url", "iam", "pub", "priv", "us-east-1")
|
||||
|
||||
assert result is not None
|
||||
assert result["qdrant_endpoint"] is None
|
||||
|
||||
|
||||
class TestBatchCreateTidbServerlessClusterQdrantEndpoint:
|
||||
"""Verify that batch_create includes qdrant_endpoint per cluster."""
|
||||
|
||||
@patch.object(TidbService, "fetch_qdrant_endpoint", return_value="https://qdrant-gw.tidbcloud.com")
|
||||
@patch("dify_vdb_tidb_on_qdrant.tidb_service.redis_client")
|
||||
@patch("dify_vdb_tidb_on_qdrant.tidb_service._tidb_http_client")
|
||||
@patch("dify_vdb_tidb_on_qdrant.tidb_service.dify_config")
|
||||
def test_batch_result_contains_qdrant_endpoint(self, mock_config, mock_http, mock_redis, mock_fetch_ep):
|
||||
mock_config.TIDB_SPEND_LIMIT = 10
|
||||
cluster_name = "abc123"
|
||||
mock_http.post.return_value = MagicMock(
|
||||
status_code=200,
|
||||
json=lambda: {"clusters": [{"clusterId": "c-1", "displayName": cluster_name}]},
|
||||
)
|
||||
mock_redis.setex = MagicMock()
|
||||
mock_redis.get.return_value = b"password123"
|
||||
|
||||
result = TidbService.batch_create_tidb_serverless_cluster(
|
||||
batch_size=1,
|
||||
project_id="proj",
|
||||
api_url="url",
|
||||
iam_url="iam",
|
||||
public_key="pub",
|
||||
private_key="priv",
|
||||
region="us-east-1",
|
||||
)
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0]["qdrant_endpoint"] == "https://qdrant-gw.tidbcloud.com"
|
||||
|
||||
|
||||
class TestCreateTidbServerlessClusterRetry:
|
||||
"""Cover retry/logging paths in create_tidb_serverless_cluster."""
|
||||
|
||||
@patch.object(TidbService, "get_tidb_serverless_cluster")
|
||||
@patch("dify_vdb_tidb_on_qdrant.tidb_service._tidb_http_client")
|
||||
@patch("dify_vdb_tidb_on_qdrant.tidb_service.dify_config")
|
||||
def test_polls_until_active(self, mock_config, mock_http, mock_get_cluster):
|
||||
mock_config.TIDB_SPEND_LIMIT = 10
|
||||
mock_http.post.return_value = MagicMock(status_code=200, json=lambda: {"clusterId": "c-1"})
|
||||
mock_get_cluster.side_effect = [
|
||||
{"state": "CREATING", "userPrefix": ""},
|
||||
{"state": "ACTIVE", "userPrefix": "pfx", "endpoints": {"public": {"host": "gw.tidb.com"}}},
|
||||
]
|
||||
|
||||
with patch("dify_vdb_tidb_on_qdrant.tidb_service.time.sleep"):
|
||||
result = TidbService.create_tidb_serverless_cluster("proj", "url", "iam", "pub", "priv", "us-east-1")
|
||||
|
||||
assert result is not None
|
||||
assert result["qdrant_endpoint"] == "https://qdrant-gw.tidb.com"
|
||||
assert mock_get_cluster.call_count == 2
|
||||
|
||||
@patch.object(TidbService, "get_tidb_serverless_cluster")
|
||||
@patch("dify_vdb_tidb_on_qdrant.tidb_service._tidb_http_client")
|
||||
@patch("dify_vdb_tidb_on_qdrant.tidb_service.dify_config")
|
||||
def test_returns_none_after_max_retries(self, mock_config, mock_http, mock_get_cluster):
|
||||
mock_config.TIDB_SPEND_LIMIT = 10
|
||||
mock_http.post.return_value = MagicMock(status_code=200, json=lambda: {"clusterId": "c-1"})
|
||||
mock_get_cluster.return_value = {"state": "CREATING", "userPrefix": ""}
|
||||
|
||||
with patch("dify_vdb_tidb_on_qdrant.tidb_service.time.sleep"):
|
||||
result = TidbService.create_tidb_serverless_cluster("proj", "url", "iam", "pub", "priv", "us-east-1")
|
||||
|
||||
assert result is None
|
||||
|
||||
@patch("dify_vdb_tidb_on_qdrant.tidb_service._tidb_http_client")
|
||||
@patch("dify_vdb_tidb_on_qdrant.tidb_service.dify_config")
|
||||
def test_raises_on_post_failure(self, mock_config, mock_http):
|
||||
mock_config.TIDB_SPEND_LIMIT = 10
|
||||
mock_response = MagicMock(status_code=400, text="Bad Request")
|
||||
mock_response.raise_for_status.side_effect = Exception("HTTP 400")
|
||||
mock_http.post.return_value = mock_response
|
||||
|
||||
with pytest.raises(Exception, match="HTTP 400"):
|
||||
TidbService.create_tidb_serverless_cluster("proj", "url", "iam", "pub", "priv", "us-east-1")
|
||||
|
||||
|
||||
class TestBatchCreateEdgeCases:
|
||||
"""Cover logging/edge-case branches in batch_create."""
|
||||
|
||||
@patch.object(TidbService, "fetch_qdrant_endpoint", return_value=None)
|
||||
@patch("dify_vdb_tidb_on_qdrant.tidb_service.redis_client")
|
||||
@patch("dify_vdb_tidb_on_qdrant.tidb_service._tidb_http_client")
|
||||
@patch("dify_vdb_tidb_on_qdrant.tidb_service.dify_config")
|
||||
def test_skips_cluster_when_no_cached_password(self, mock_config, mock_http, mock_redis, mock_fetch_ep):
|
||||
mock_config.TIDB_SPEND_LIMIT = 10
|
||||
mock_http.post.return_value = MagicMock(
|
||||
status_code=200,
|
||||
json=lambda: {"clusters": [{"clusterId": "c-1", "displayName": "name1"}]},
|
||||
)
|
||||
mock_redis.setex = MagicMock()
|
||||
mock_redis.get.return_value = None
|
||||
|
||||
result = TidbService.batch_create_tidb_serverless_cluster(
|
||||
batch_size=1,
|
||||
project_id="proj",
|
||||
api_url="url",
|
||||
iam_url="iam",
|
||||
public_key="pub",
|
||||
private_key="priv",
|
||||
region="us-east-1",
|
||||
)
|
||||
|
||||
assert len(result) == 0
|
||||
mock_fetch_ep.assert_not_called()
|
||||
|
||||
@patch("dify_vdb_tidb_on_qdrant.tidb_service.redis_client")
|
||||
@patch("dify_vdb_tidb_on_qdrant.tidb_service._tidb_http_client")
|
||||
@patch("dify_vdb_tidb_on_qdrant.tidb_service.dify_config")
|
||||
def test_raises_on_post_failure(self, mock_config, mock_http, mock_redis):
|
||||
mock_config.TIDB_SPEND_LIMIT = 10
|
||||
mock_response = MagicMock(status_code=500, text="Server Error")
|
||||
mock_response.raise_for_status.side_effect = Exception("HTTP 500")
|
||||
mock_http.post.return_value = mock_response
|
||||
mock_redis.setex = MagicMock()
|
||||
|
||||
with pytest.raises(Exception, match="HTTP 500"):
|
||||
TidbService.batch_create_tidb_serverless_cluster(
|
||||
batch_size=1,
|
||||
project_id="proj",
|
||||
api_url="url",
|
||||
iam_url="iam",
|
||||
public_key="pub",
|
||||
private_key="priv",
|
||||
region="us-east-1",
|
||||
)
|
||||
@ -4,92 +4,55 @@ version = "1.13.3"
|
||||
requires-python = "~=3.12.0"
|
||||
|
||||
dependencies = [
|
||||
"aliyun-log-python-sdk~=0.9.44",
|
||||
"arize-phoenix-otel~=0.15.0",
|
||||
"azure-identity==1.25.3",
|
||||
"beautifulsoup4==4.14.3",
|
||||
"boto3==1.42.88",
|
||||
"bs4~=0.0.1",
|
||||
"cachetools~=7.0.5",
|
||||
"celery~=5.6.3",
|
||||
"charset-normalizer>=3.4.7",
|
||||
"flask~=3.1.3",
|
||||
"flask-compress>=1.24,<1.25",
|
||||
"flask-cors~=6.0.2",
|
||||
"flask-login~=0.6.3",
|
||||
"flask-migrate~=4.1.0",
|
||||
"flask-orjson~=2.0.0",
|
||||
"flask-sqlalchemy~=3.1.1",
|
||||
"gevent~=26.4.0",
|
||||
"gmpy2~=2.3.0",
|
||||
"google-api-core>=2.30.3",
|
||||
"google-api-python-client==2.194.0",
|
||||
"google-auth>=2.49.2",
|
||||
"google-auth-httplib2==0.3.1",
|
||||
"google-cloud-aiplatform>=1.147.0",
|
||||
"googleapis-common-protos>=1.74.0",
|
||||
"graphon>=0.1.2",
|
||||
"gunicorn~=25.3.0",
|
||||
"httpx[socks]~=0.28.1",
|
||||
"jieba==0.42.1",
|
||||
"json-repair>=0.59.2",
|
||||
"langfuse>=4.2.0,<5.0.0",
|
||||
"langsmith~=0.7.30",
|
||||
"markdown~=3.10.2",
|
||||
"mlflow-skinny>=3.11.1",
|
||||
"numpy~=2.4.4",
|
||||
"openpyxl~=3.1.5",
|
||||
"opik~=1.11.2",
|
||||
"litellm==1.83.0", # Pinned to avoid madoka dependency issue
|
||||
"opentelemetry-api==1.41.0",
|
||||
"opentelemetry-distro==0.62b0",
|
||||
"opentelemetry-exporter-otlp==1.41.0",
|
||||
"opentelemetry-exporter-otlp-proto-common==1.41.0",
|
||||
"opentelemetry-exporter-otlp-proto-grpc==1.41.0",
|
||||
"opentelemetry-exporter-otlp-proto-http==1.41.0",
|
||||
"opentelemetry-instrumentation==0.62b0",
|
||||
"opentelemetry-instrumentation-celery==0.62b0",
|
||||
"opentelemetry-instrumentation-flask==0.62b0",
|
||||
"opentelemetry-instrumentation-httpx==0.62b0",
|
||||
"opentelemetry-instrumentation-redis==0.62b0",
|
||||
"opentelemetry-instrumentation-sqlalchemy==0.62b0",
|
||||
"opentelemetry-propagator-b3==1.41.0",
|
||||
"opentelemetry-proto==1.41.0",
|
||||
"opentelemetry-sdk==1.41.0",
|
||||
"opentelemetry-semantic-conventions==0.62b0",
|
||||
"opentelemetry-util-http==0.62b0",
|
||||
"pandas[excel,output-formatting,performance]~=3.0.2",
|
||||
"psycogreen~=1.0.2",
|
||||
"psycopg2-binary~=2.9.11",
|
||||
"pycryptodome==3.23.0",
|
||||
"pydantic~=2.12.5",
|
||||
"pydantic-settings~=2.13.1",
|
||||
"pyjwt~=2.12.1",
|
||||
"pypdfium2==5.6.0",
|
||||
"python-docx~=1.2.0",
|
||||
"python-dotenv==1.2.2",
|
||||
"pyyaml~=6.0.1",
|
||||
"readabilipy~=0.3.0",
|
||||
"redis[hiredis]~=7.4.0",
|
||||
"resend~=2.27.0",
|
||||
"sentry-sdk[flask]~=2.57.0",
|
||||
"sqlalchemy~=2.0.49",
|
||||
"starlette==1.0.0",
|
||||
"tiktoken~=0.12.0",
|
||||
"transformers~=5.3.0",
|
||||
"unstructured[docx,epub,md,ppt,pptx]~=0.21.5",
|
||||
"pypandoc~=1.13",
|
||||
"yarl~=1.23.0",
|
||||
"sseclient-py~=1.9.0",
|
||||
"httpx-sse~=0.4.0",
|
||||
"sendgrid~=6.12.5",
|
||||
"flask-restx~=1.3.2",
|
||||
"packaging~=26.0",
|
||||
# Legacy: mature and widely deployed
|
||||
"bleach>=6.3.0",
|
||||
"boto3>=1.42.88",
|
||||
"celery>=5.6.3",
|
||||
"croniter>=6.2.2",
|
||||
"apscheduler>=3.11.2",
|
||||
"weave>=0.52.36",
|
||||
"fastopenapi[flask]>=0.7.0",
|
||||
"bleach~=6.3.0",
|
||||
"flask-cors>=6.0.2",
|
||||
"gevent>=26.4.0",
|
||||
"gevent-websocket>=0.10.1",
|
||||
"gmpy2>=2.3.0",
|
||||
"google-api-python-client>=2.194.0",
|
||||
"gunicorn>=25.3.0",
|
||||
"psycogreen>=1.0.2",
|
||||
"psycopg2-binary>=2.9.11",
|
||||
"python-socketio>=5.13.0",
|
||||
"redis[hiredis]>=7.4.0",
|
||||
"sendgrid>=6.12.5",
|
||||
"sseclient-py>=1.8.0",
|
||||
|
||||
# Stable: production-proven, cap below the next major
|
||||
"aliyun-log-python-sdk>=0.9.44,<1.0.0",
|
||||
"azure-identity>=1.25.3,<2.0.0",
|
||||
"flask-compress>=1.24,<2.0.0",
|
||||
"flask-login>=0.6.3,<1.0.0",
|
||||
"flask-migrate>=4.1.0,<5.0.0",
|
||||
"flask-orjson>=2.0.0,<3.0.0",
|
||||
"flask-restx>=1.3.2,<2.0.0",
|
||||
"google-cloud-aiplatform>=1.147.0,<2.0.0",
|
||||
"httpx[socks]>=0.28.1,<1.0.0",
|
||||
"langfuse>=4.2.0,<5.0.0",
|
||||
"langsmith>=0.7.30,<1.0.0",
|
||||
"mlflow-skinny>=3.11.1,<4.0.0",
|
||||
"opentelemetry-distro>=0.62b0,<1.0.0",
|
||||
"opentelemetry-instrumentation-celery>=0.62b0,<1.0.0",
|
||||
"opentelemetry-instrumentation-flask>=0.62b0,<1.0.0",
|
||||
"opentelemetry-instrumentation-httpx>=0.62b0,<1.0.0",
|
||||
"opentelemetry-instrumentation-redis>=0.62b0,<1.0.0",
|
||||
"opentelemetry-instrumentation-sqlalchemy>=0.62b0,<1.0.0",
|
||||
"opentelemetry-propagator-b3>=1.41.0,<2.0.0",
|
||||
"readabilipy>=0.3.0,<1.0.0",
|
||||
"resend>=2.27.0,<3.0.0",
|
||||
"weave>=0.52.36,<1.0.0",
|
||||
|
||||
# Emerging: newer and fast-moving, use compatible pins
|
||||
"arize-phoenix-otel~=0.15.0",
|
||||
"fastopenapi[flask]~=0.7.0",
|
||||
"graphon~=0.1.2",
|
||||
"httpx-sse~=0.4.0",
|
||||
"json-repair~=0.59.2",
|
||||
"opik~=1.11.2",
|
||||
]
|
||||
# Before adding new dependency, consider place it in
|
||||
# alphabet order (a-z) and suitable group.
|
||||
@ -147,46 +110,46 @@ override-dependencies = [
|
||||
# Required for development and running tests
|
||||
############################################################
|
||||
dev = [
|
||||
"coverage~=7.13.4",
|
||||
"dotenv-linter~=0.7.0",
|
||||
"faker~=40.13.0",
|
||||
"lxml-stubs~=0.5.1",
|
||||
"basedpyright~=1.39.0",
|
||||
"ruff~=0.15.10",
|
||||
"pytest~=9.0.3",
|
||||
"pytest-benchmark~=5.2.3",
|
||||
"pytest-cov~=7.1.0",
|
||||
"pytest-env~=1.6.0",
|
||||
"pytest-mock~=3.15.1",
|
||||
"testcontainers~=4.14.2",
|
||||
"types-aiofiles~=25.1.0",
|
||||
"types-beautifulsoup4~=4.12.0",
|
||||
"types-cachetools~=6.2.0",
|
||||
"types-colorama~=0.4.15",
|
||||
"types-defusedxml~=0.7.0",
|
||||
"types-deprecated~=1.3.1",
|
||||
"types-docutils~=0.22.3",
|
||||
"types-flask-cors~=6.0.0",
|
||||
"types-flask-migrate~=4.1.0",
|
||||
"types-gevent~=26.4.0",
|
||||
"types-greenlet~=3.4.0",
|
||||
"types-html5lib~=1.1.11",
|
||||
"types-markdown~=3.10.2",
|
||||
"types-oauthlib~=3.3.0",
|
||||
"types-objgraph~=3.6.0",
|
||||
"types-olefile~=0.47.0",
|
||||
"types-openpyxl~=3.1.5",
|
||||
"types-pexpect~=4.9.0",
|
||||
"types-protobuf~=7.34.1",
|
||||
"types-psutil~=7.2.2",
|
||||
"types-psycopg2~=2.9.21",
|
||||
"types-pygments~=2.20.0",
|
||||
"types-pymysql~=1.1.0",
|
||||
"types-python-dateutil~=2.9.0",
|
||||
"types-pywin32~=311.0.0",
|
||||
"types-pyyaml~=6.0.12",
|
||||
"types-regex~=2026.4.4",
|
||||
"types-shapely~=2.1.0",
|
||||
"coverage>=7.13.4",
|
||||
"dotenv-linter>=0.7.0",
|
||||
"faker>=20.1.0",
|
||||
"lxml-stubs>=0.5.1",
|
||||
"basedpyright>=1.39.0",
|
||||
"ruff>=0.15.10",
|
||||
"pytest>=9.0.3",
|
||||
"pytest-benchmark>=5.2.3",
|
||||
"pytest-cov>=7.1.0",
|
||||
"pytest-env>=1.6.0",
|
||||
"pytest-mock>=3.15.1",
|
||||
"testcontainers>=4.14.2",
|
||||
"types-aiofiles>=25.1.0",
|
||||
"types-beautifulsoup4>=4.12.0",
|
||||
"types-cachetools>=6.2.0",
|
||||
"types-colorama>=0.4.15",
|
||||
"types-defusedxml>=0.7.0",
|
||||
"types-deprecated>=1.3.1",
|
||||
"types-docutils>=0.22.3",
|
||||
"types-flask-cors>=6.0.0",
|
||||
"types-flask-migrate>=4.1.0",
|
||||
"types-gevent>=26.4.0",
|
||||
"types-greenlet>=3.4.0",
|
||||
"types-html5lib>=1.1.11",
|
||||
"types-markdown>=3.10.2",
|
||||
"types-oauthlib>=3.3.0",
|
||||
"types-objgraph>=3.6.0",
|
||||
"types-olefile>=0.47.0",
|
||||
"types-openpyxl>=3.1.5",
|
||||
"types-pexpect>=4.9.0",
|
||||
"types-protobuf>=7.34.1",
|
||||
"types-psutil>=7.2.2",
|
||||
"types-psycopg2>=2.9.21",
|
||||
"types-pygments>=2.20.0",
|
||||
"types-pymysql>=1.1.0",
|
||||
"types-python-dateutil>=2.9.0",
|
||||
"types-pywin32>=311.0.0",
|
||||
"types-pyyaml>=6.0.12",
|
||||
"types-regex>=2026.4.4",
|
||||
"types-shapely>=2.1.0",
|
||||
"types-simplejson>=3.20.0.20260408",
|
||||
"types-six>=1.17.0.20260408",
|
||||
"types-tensorflow>=2.18.0.20260408",
|
||||
@ -198,19 +161,18 @@ dev = [
|
||||
"types_pyOpenSSL>=24.1.0",
|
||||
"types_cffi>=2.0.0.20260408",
|
||||
"types_setuptools>=82.0.0.20260408",
|
||||
"pandas-stubs~=3.0.0",
|
||||
"pandas-stubs>=3.0.0",
|
||||
"scipy-stubs>=1.15.3.0",
|
||||
"types-python-http-client>=3.3.7.20260408",
|
||||
"import-linter>=2.3",
|
||||
"types-redis>=4.6.0.20241004",
|
||||
"celery-types>=0.23.0",
|
||||
"mypy~=1.20.1",
|
||||
"mypy>=1.20.1",
|
||||
# "locust>=2.40.4", # Temporarily removed due to compatibility issues. Uncomment when resolved.
|
||||
"sseclient-py>=1.8.0",
|
||||
"pytest-timeout>=2.4.0",
|
||||
"pytest-xdist>=3.8.0",
|
||||
"pyrefly>=0.60.0",
|
||||
"xinference-client~=2.4.0",
|
||||
"xinference-client>=2.4.0",
|
||||
]
|
||||
|
||||
############################################################
|
||||
@ -218,21 +180,21 @@ dev = [
|
||||
# Required for storage clients
|
||||
############################################################
|
||||
storage = [
|
||||
"azure-storage-blob==12.28.0",
|
||||
"bce-python-sdk~=0.9.69",
|
||||
"cos-python-sdk-v5==1.9.41",
|
||||
"esdk-obs-python==3.26.2",
|
||||
"azure-storage-blob>=12.28.0",
|
||||
"bce-python-sdk>=0.9.69",
|
||||
"cos-python-sdk-v5>=1.9.41",
|
||||
"esdk-obs-python>=3.22.2",
|
||||
"google-cloud-storage>=3.10.1",
|
||||
"opendal~=0.46.0",
|
||||
"oss2==2.19.1",
|
||||
"supabase~=2.18.1",
|
||||
"tos~=2.9.0",
|
||||
"opendal>=0.46.0",
|
||||
"oss2>=2.19.1",
|
||||
"supabase>=2.18.1",
|
||||
"tos>=2.9.0",
|
||||
]
|
||||
|
||||
############################################################
|
||||
# [ Tools ] dependency group
|
||||
############################################################
|
||||
tools = ["cloudscraper~=1.2.71", "nltk~=3.9.1"]
|
||||
tools = ["cloudscraper>=1.2.71", "nltk>=3.9.1"]
|
||||
|
||||
############################################################
|
||||
# [ VDB ] workspace plugins — hollow packages under providers/vdb/*
|
||||
@ -302,7 +264,7 @@ vdb-vastbase = ["dify-vdb-vastbase"]
|
||||
vdb-vikingdb = ["dify-vdb-vikingdb"]
|
||||
vdb-weaviate = ["dify-vdb-weaviate"]
|
||||
# Optional client used by some tests / integrations (not a vector backend plugin)
|
||||
vdb-xinference = ["xinference-client~=2.4.0"]
|
||||
vdb-xinference = ["xinference-client>=2.4.0"]
|
||||
|
||||
[tool.pyrefly]
|
||||
project-includes = ["."]
|
||||
|
||||
147
api/repositories/workflow_collaboration_repository.py
Normal file
147
api/repositories/workflow_collaboration_repository.py
Normal file
@ -0,0 +1,147 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from typing import TypedDict
|
||||
|
||||
from extensions.ext_redis import redis_client
|
||||
|
||||
SESSION_STATE_TTL_SECONDS = 3600
|
||||
WORKFLOW_ONLINE_USERS_PREFIX = "workflow_online_users:"
|
||||
WORKFLOW_LEADER_PREFIX = "workflow_leader:"
|
||||
WS_SID_MAP_PREFIX = "ws_sid_map:"
|
||||
|
||||
|
||||
class WorkflowSessionInfo(TypedDict):
|
||||
user_id: str
|
||||
username: str
|
||||
avatar: str | None
|
||||
sid: str
|
||||
connected_at: int
|
||||
|
||||
|
||||
class SidMapping(TypedDict):
|
||||
workflow_id: str
|
||||
user_id: str
|
||||
|
||||
|
||||
class WorkflowCollaborationRepository:
|
||||
def __init__(self) -> None:
|
||||
self._redis = redis_client
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"{self.__class__.__name__}(redis_client={self._redis})"
|
||||
|
||||
@staticmethod
|
||||
def workflow_key(workflow_id: str) -> str:
|
||||
return f"{WORKFLOW_ONLINE_USERS_PREFIX}{workflow_id}"
|
||||
|
||||
@staticmethod
|
||||
def leader_key(workflow_id: str) -> str:
|
||||
return f"{WORKFLOW_LEADER_PREFIX}{workflow_id}"
|
||||
|
||||
@staticmethod
|
||||
def sid_key(sid: str) -> str:
|
||||
return f"{WS_SID_MAP_PREFIX}{sid}"
|
||||
|
||||
@staticmethod
|
||||
def _decode(value: str | bytes | None) -> str | None:
|
||||
if value is None:
|
||||
return None
|
||||
if isinstance(value, bytes):
|
||||
return value.decode("utf-8")
|
||||
return value
|
||||
|
||||
def refresh_session_state(self, workflow_id: str, sid: str) -> None:
|
||||
workflow_key = self.workflow_key(workflow_id)
|
||||
sid_key = self.sid_key(sid)
|
||||
if self._redis.exists(workflow_key):
|
||||
self._redis.expire(workflow_key, SESSION_STATE_TTL_SECONDS)
|
||||
if self._redis.exists(sid_key):
|
||||
self._redis.expire(sid_key, SESSION_STATE_TTL_SECONDS)
|
||||
|
||||
def set_session_info(self, workflow_id: str, session_info: WorkflowSessionInfo) -> None:
|
||||
workflow_key = self.workflow_key(workflow_id)
|
||||
self._redis.hset(workflow_key, session_info["sid"], json.dumps(session_info))
|
||||
self._redis.set(
|
||||
self.sid_key(session_info["sid"]),
|
||||
json.dumps({"workflow_id": workflow_id, "user_id": session_info["user_id"]}),
|
||||
ex=SESSION_STATE_TTL_SECONDS,
|
||||
)
|
||||
self.refresh_session_state(workflow_id, session_info["sid"])
|
||||
|
||||
def get_sid_mapping(self, sid: str) -> SidMapping | None:
|
||||
raw = self._redis.get(self.sid_key(sid))
|
||||
if not raw:
|
||||
return None
|
||||
value = self._decode(raw)
|
||||
if not value:
|
||||
return None
|
||||
try:
|
||||
return json.loads(value)
|
||||
except (TypeError, json.JSONDecodeError):
|
||||
return None
|
||||
|
||||
def delete_session(self, workflow_id: str, sid: str) -> None:
|
||||
self._redis.hdel(self.workflow_key(workflow_id), sid)
|
||||
self._redis.delete(self.sid_key(sid))
|
||||
|
||||
def session_exists(self, workflow_id: str, sid: str) -> bool:
|
||||
return bool(self._redis.hexists(self.workflow_key(workflow_id), sid))
|
||||
|
||||
def sid_mapping_exists(self, sid: str) -> bool:
|
||||
return bool(self._redis.exists(self.sid_key(sid)))
|
||||
|
||||
def get_session_sids(self, workflow_id: str) -> list[str]:
|
||||
raw_sids = self._redis.hkeys(self.workflow_key(workflow_id))
|
||||
decoded_sids: list[str] = []
|
||||
for sid in raw_sids:
|
||||
decoded = self._decode(sid)
|
||||
if decoded:
|
||||
decoded_sids.append(decoded)
|
||||
return decoded_sids
|
||||
|
||||
def list_sessions(self, workflow_id: str) -> list[WorkflowSessionInfo]:
|
||||
sessions_json = self._redis.hgetall(self.workflow_key(workflow_id))
|
||||
users: list[WorkflowSessionInfo] = []
|
||||
|
||||
for session_info_json in sessions_json.values():
|
||||
value = self._decode(session_info_json)
|
||||
if not value:
|
||||
continue
|
||||
try:
|
||||
session_info = json.loads(value)
|
||||
except (TypeError, json.JSONDecodeError):
|
||||
continue
|
||||
|
||||
if not isinstance(session_info, dict):
|
||||
continue
|
||||
if "user_id" not in session_info or "username" not in session_info or "sid" not in session_info:
|
||||
continue
|
||||
|
||||
users.append(
|
||||
{
|
||||
"user_id": str(session_info["user_id"]),
|
||||
"username": str(session_info["username"]),
|
||||
"avatar": session_info.get("avatar"),
|
||||
"sid": str(session_info["sid"]),
|
||||
"connected_at": int(session_info.get("connected_at") or 0),
|
||||
}
|
||||
)
|
||||
|
||||
return users
|
||||
|
||||
def get_current_leader(self, workflow_id: str) -> str | None:
|
||||
raw = self._redis.get(self.leader_key(workflow_id))
|
||||
return self._decode(raw)
|
||||
|
||||
def set_leader_if_absent(self, workflow_id: str, sid: str) -> bool:
|
||||
return bool(self._redis.set(self.leader_key(workflow_id), sid, nx=True, ex=SESSION_STATE_TTL_SECONDS))
|
||||
|
||||
def set_leader(self, workflow_id: str, sid: str) -> None:
|
||||
self._redis.set(self.leader_key(workflow_id), sid, ex=SESSION_STATE_TTL_SECONDS)
|
||||
|
||||
def delete_leader(self, workflow_id: str) -> None:
|
||||
self._redis.delete(self.leader_key(workflow_id))
|
||||
|
||||
def expire_leader(self, workflow_id: str) -> None:
|
||||
self._redis.expire(self.leader_key(workflow_id), SESSION_STATE_TTL_SECONDS)
|
||||
@ -57,6 +57,7 @@ def create_clusters(batch_size):
|
||||
cluster_name=new_cluster["cluster_name"],
|
||||
account=new_cluster["account"],
|
||||
password=new_cluster["password"],
|
||||
qdrant_endpoint=new_cluster.get("qdrant_endpoint"),
|
||||
active=False,
|
||||
status=TidbAuthBindingStatus.CREATING,
|
||||
)
|
||||
|
||||
@ -455,7 +455,7 @@ class AppDslService:
|
||||
app.updated_by = account.id
|
||||
|
||||
self._session.add(app)
|
||||
self._session.commit()
|
||||
self._session.flush()
|
||||
app_was_created.send(app, account=account)
|
||||
|
||||
# save dependencies
|
||||
|
||||
@ -164,6 +164,7 @@ class SystemFeatureModel(BaseModel):
|
||||
enable_email_code_login: bool = False
|
||||
enable_email_password_login: bool = True
|
||||
enable_social_oauth_login: bool = False
|
||||
enable_collaboration_mode: bool = False
|
||||
is_allow_register: bool = False
|
||||
is_allow_create_workspace: bool = False
|
||||
is_email_setup: bool = False
|
||||
@ -244,6 +245,7 @@ class FeatureService:
|
||||
system_features.enable_email_code_login = dify_config.ENABLE_EMAIL_CODE_LOGIN
|
||||
system_features.enable_email_password_login = dify_config.ENABLE_EMAIL_PASSWORD_LOGIN
|
||||
system_features.enable_social_oauth_login = dify_config.ENABLE_SOCIAL_OAUTH_LOGIN
|
||||
system_features.enable_collaboration_mode = dify_config.ENABLE_COLLABORATION_MODE
|
||||
system_features.is_allow_register = dify_config.ALLOW_REGISTER
|
||||
system_features.is_allow_create_workspace = dify_config.ALLOW_CREATE_WORKSPACE
|
||||
system_features.is_email_setup = dify_config.MAIL_TYPE is not None and dify_config.MAIL_TYPE != ""
|
||||
|
||||
295
api/services/workflow_collaboration_service.py
Normal file
295
api/services/workflow_collaboration_service.py
Normal file
@ -0,0 +1,295 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import time
|
||||
from collections.abc import Mapping
|
||||
|
||||
from sqlalchemy import select
|
||||
|
||||
from core.db.session_factory import session_factory
|
||||
from models.account import Account
|
||||
from models.model import App
|
||||
from repositories.workflow_collaboration_repository import WorkflowCollaborationRepository, WorkflowSessionInfo
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class WorkflowCollaborationService:
|
||||
def __init__(self, repository: WorkflowCollaborationRepository, socketio) -> None:
|
||||
self._repository = repository
|
||||
self._socketio = socketio
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"{self.__class__.__name__}(repository={self._repository})"
|
||||
|
||||
def save_socket_identity(self, sid: str, user: Account) -> None:
|
||||
"""Persist the authenticated console user on the raw socket session."""
|
||||
self._socketio.save_session(
|
||||
sid,
|
||||
{
|
||||
"user_id": user.id,
|
||||
"username": user.name,
|
||||
"avatar": user.avatar,
|
||||
"tenant_id": user.current_tenant_id,
|
||||
},
|
||||
)
|
||||
|
||||
def authorize_and_join_workflow_room(self, workflow_id: str, sid: str) -> tuple[str, bool] | None:
|
||||
"""
|
||||
Join a collaboration room only after validating the socket session and tenant-scoped app access.
|
||||
|
||||
The Socket.IO payload still calls the room key `workflow_id`, but the identifier is the workflow app's
|
||||
`App.id`. Returning `None` lets the controller reject the join before any Redis or room state is created.
|
||||
"""
|
||||
session = self._socketio.get_session(sid)
|
||||
user_id = session.get("user_id")
|
||||
tenant_id = session.get("tenant_id")
|
||||
if not user_id or not tenant_id:
|
||||
return None
|
||||
|
||||
if not self._can_access_workflow(workflow_id, str(tenant_id)):
|
||||
logger.warning(
|
||||
"Workflow collaboration join rejected: workflow_id=%s tenant_id=%s user_id=%s sid=%s",
|
||||
workflow_id,
|
||||
tenant_id,
|
||||
user_id,
|
||||
sid,
|
||||
)
|
||||
return None
|
||||
|
||||
session_info: WorkflowSessionInfo = {
|
||||
"user_id": str(user_id),
|
||||
"username": str(session.get("username", "Unknown")),
|
||||
"avatar": session.get("avatar"),
|
||||
"sid": sid,
|
||||
"connected_at": int(time.time()),
|
||||
}
|
||||
|
||||
self._repository.set_session_info(workflow_id, session_info)
|
||||
|
||||
leader_sid = self.get_or_set_leader(workflow_id, sid)
|
||||
is_leader = leader_sid == sid
|
||||
|
||||
self._socketio.enter_room(sid, workflow_id)
|
||||
self.broadcast_online_users(workflow_id)
|
||||
|
||||
self._socketio.emit("status", {"isLeader": is_leader}, room=sid)
|
||||
|
||||
return str(user_id), is_leader
|
||||
|
||||
def _can_access_workflow(self, workflow_id: str, tenant_id: str) -> bool:
|
||||
"""Check room access without relying on Flask's app-context-bound scoped session."""
|
||||
with session_factory.create_session() as session:
|
||||
app_id = session.scalar(select(App.id).where(App.id == workflow_id, App.tenant_id == tenant_id).limit(1))
|
||||
return app_id is not None
|
||||
|
||||
def disconnect_session(self, sid: str) -> None:
|
||||
mapping = self._repository.get_sid_mapping(sid)
|
||||
if not mapping:
|
||||
return
|
||||
|
||||
workflow_id = mapping["workflow_id"]
|
||||
self._repository.delete_session(workflow_id, sid)
|
||||
|
||||
self.handle_leader_disconnect(workflow_id, sid)
|
||||
self.broadcast_online_users(workflow_id)
|
||||
|
||||
def relay_collaboration_event(self, sid: str, data: Mapping[str, object]) -> tuple[dict[str, str], int]:
|
||||
mapping = self._repository.get_sid_mapping(sid)
|
||||
if not mapping:
|
||||
return {"msg": "unauthorized"}, 401
|
||||
|
||||
workflow_id = mapping["workflow_id"]
|
||||
user_id = mapping["user_id"]
|
||||
self.refresh_session_state(workflow_id, sid)
|
||||
|
||||
event_type = data.get("type")
|
||||
event_data = data.get("data")
|
||||
timestamp = data.get("timestamp", int(time.time()))
|
||||
|
||||
if not event_type:
|
||||
return {"msg": "invalid event type"}, 400
|
||||
|
||||
if event_type == "sync_request":
|
||||
leader_sid = self._repository.get_current_leader(workflow_id)
|
||||
target_sid: str | None
|
||||
if leader_sid and self.is_session_active(workflow_id, leader_sid):
|
||||
target_sid = leader_sid
|
||||
else:
|
||||
if leader_sid:
|
||||
self._repository.delete_leader(workflow_id)
|
||||
target_sid = self._select_graph_leader(workflow_id, preferred_sid=sid)
|
||||
if target_sid:
|
||||
self._repository.set_leader(workflow_id, target_sid)
|
||||
self.broadcast_leader_change(workflow_id, target_sid)
|
||||
|
||||
if not target_sid:
|
||||
return {"msg": "no_active_leader"}, 200
|
||||
|
||||
self._socketio.emit(
|
||||
"collaboration_update",
|
||||
{"type": event_type, "userId": user_id, "data": event_data, "timestamp": timestamp},
|
||||
room=target_sid,
|
||||
)
|
||||
|
||||
return {"msg": "sync_request_forwarded"}, 200
|
||||
|
||||
self._socketio.emit(
|
||||
"collaboration_update",
|
||||
{"type": event_type, "userId": user_id, "data": event_data, "timestamp": timestamp},
|
||||
room=workflow_id,
|
||||
skip_sid=sid,
|
||||
)
|
||||
|
||||
return {"msg": "event_broadcasted"}, 200
|
||||
|
||||
def relay_graph_event(self, sid: str, data: object) -> tuple[dict[str, str], int]:
|
||||
mapping = self._repository.get_sid_mapping(sid)
|
||||
if not mapping:
|
||||
return {"msg": "unauthorized"}, 401
|
||||
|
||||
workflow_id = mapping["workflow_id"]
|
||||
self.refresh_session_state(workflow_id, sid)
|
||||
|
||||
self._socketio.emit("graph_update", data, room=workflow_id, skip_sid=sid)
|
||||
|
||||
return {"msg": "graph_update_broadcasted"}, 200
|
||||
|
||||
def get_or_set_leader(self, workflow_id: str, sid: str) -> str:
|
||||
current_leader = self._repository.get_current_leader(workflow_id)
|
||||
|
||||
if current_leader:
|
||||
if self.is_session_active(workflow_id, current_leader):
|
||||
return current_leader
|
||||
self._repository.delete_session(workflow_id, current_leader)
|
||||
self._repository.delete_leader(workflow_id)
|
||||
|
||||
was_set = self._repository.set_leader_if_absent(workflow_id, sid)
|
||||
|
||||
if was_set:
|
||||
if current_leader:
|
||||
self.broadcast_leader_change(workflow_id, sid)
|
||||
return sid
|
||||
|
||||
current_leader = self._repository.get_current_leader(workflow_id)
|
||||
if current_leader:
|
||||
return current_leader
|
||||
|
||||
return sid
|
||||
|
||||
def handle_leader_disconnect(self, workflow_id: str, disconnected_sid: str) -> None:
|
||||
current_leader = self._repository.get_current_leader(workflow_id)
|
||||
if not current_leader:
|
||||
return
|
||||
|
||||
if current_leader != disconnected_sid:
|
||||
return
|
||||
|
||||
new_leader_sid = self._select_graph_leader(workflow_id)
|
||||
if new_leader_sid:
|
||||
self._repository.set_leader(workflow_id, new_leader_sid)
|
||||
self.broadcast_leader_change(workflow_id, new_leader_sid)
|
||||
else:
|
||||
self._repository.delete_leader(workflow_id)
|
||||
|
||||
def broadcast_leader_change(self, workflow_id: str, new_leader_sid: str | None) -> None:
|
||||
for sid in self._repository.get_session_sids(workflow_id):
|
||||
try:
|
||||
is_leader = new_leader_sid is not None and sid == new_leader_sid
|
||||
self._socketio.emit("status", {"isLeader": is_leader}, room=sid)
|
||||
except Exception:
|
||||
logging.exception("Failed to emit leader status to session %s", sid)
|
||||
|
||||
def get_current_leader(self, workflow_id: str) -> str | None:
|
||||
return self._repository.get_current_leader(workflow_id)
|
||||
|
||||
def _prune_inactive_sessions(self, workflow_id: str) -> list[WorkflowSessionInfo]:
|
||||
"""Remove inactive sessions from storage and return active sessions only."""
|
||||
sessions = self._repository.list_sessions(workflow_id)
|
||||
if not sessions:
|
||||
return []
|
||||
|
||||
active_sessions: list[WorkflowSessionInfo] = []
|
||||
stale_sids: list[str] = []
|
||||
for session in sessions:
|
||||
sid = session["sid"]
|
||||
if self.is_session_active(workflow_id, sid):
|
||||
active_sessions.append(session)
|
||||
else:
|
||||
stale_sids.append(sid)
|
||||
|
||||
for sid in stale_sids:
|
||||
self._repository.delete_session(workflow_id, sid)
|
||||
|
||||
return active_sessions
|
||||
|
||||
def broadcast_online_users(self, workflow_id: str) -> None:
|
||||
users = self._prune_inactive_sessions(workflow_id)
|
||||
users.sort(key=lambda x: x.get("connected_at") or 0)
|
||||
|
||||
leader_sid = self.get_current_leader(workflow_id)
|
||||
previous_leader = leader_sid
|
||||
active_sids = {user["sid"] for user in users}
|
||||
if leader_sid and leader_sid not in active_sids:
|
||||
self._repository.delete_leader(workflow_id)
|
||||
leader_sid = None
|
||||
|
||||
if not leader_sid and users:
|
||||
leader_sid = self._select_graph_leader(workflow_id)
|
||||
if leader_sid:
|
||||
self._repository.set_leader(workflow_id, leader_sid)
|
||||
|
||||
if leader_sid != previous_leader:
|
||||
self.broadcast_leader_change(workflow_id, leader_sid)
|
||||
|
||||
self._socketio.emit(
|
||||
"online_users",
|
||||
{"workflow_id": workflow_id, "users": users, "leader": leader_sid},
|
||||
room=workflow_id,
|
||||
)
|
||||
|
||||
def refresh_session_state(self, workflow_id: str, sid: str) -> None:
|
||||
self._repository.refresh_session_state(workflow_id, sid)
|
||||
self._ensure_leader(workflow_id, sid)
|
||||
|
||||
def _ensure_leader(self, workflow_id: str, sid: str) -> None:
|
||||
current_leader = self._repository.get_current_leader(workflow_id)
|
||||
if current_leader and self.is_session_active(workflow_id, current_leader):
|
||||
self._repository.expire_leader(workflow_id)
|
||||
return
|
||||
|
||||
if current_leader:
|
||||
self._repository.delete_leader(workflow_id)
|
||||
|
||||
self._repository.set_leader(workflow_id, sid)
|
||||
self.broadcast_leader_change(workflow_id, sid)
|
||||
|
||||
def _select_graph_leader(self, workflow_id: str, preferred_sid: str | None = None) -> str | None:
|
||||
session_sids = [
|
||||
session["sid"]
|
||||
for session in self._repository.list_sessions(workflow_id)
|
||||
if session.get("graph_active", True) and self.is_session_active(workflow_id, session["sid"])
|
||||
]
|
||||
if not session_sids:
|
||||
return None
|
||||
if preferred_sid and preferred_sid in session_sids:
|
||||
return preferred_sid
|
||||
return session_sids[0]
|
||||
|
||||
def is_session_active(self, workflow_id: str, sid: str) -> bool:
|
||||
if not sid:
|
||||
return False
|
||||
|
||||
try:
|
||||
if not self._socketio.manager.is_connected(sid, "/"):
|
||||
return False
|
||||
except AttributeError:
|
||||
return False
|
||||
|
||||
if not self._repository.session_exists(workflow_id, sid):
|
||||
return False
|
||||
|
||||
if not self._repository.sid_mapping_exists(sid):
|
||||
return False
|
||||
|
||||
return True
|
||||
564
api/services/workflow_comment_service.py
Normal file
564
api/services/workflow_comment_service.py
Normal file
@ -0,0 +1,564 @@
|
||||
import logging
|
||||
from collections.abc import Sequence
|
||||
|
||||
from sqlalchemy import desc, select
|
||||
from sqlalchemy.orm import Session, selectinload
|
||||
from werkzeug.exceptions import Forbidden, NotFound
|
||||
|
||||
from configs import dify_config
|
||||
from extensions.ext_database import db
|
||||
from libs.datetime_utils import naive_utc_now
|
||||
from libs.helper import uuid_value
|
||||
from models import App, TenantAccountJoin, WorkflowComment, WorkflowCommentMention, WorkflowCommentReply
|
||||
from models.account import Account
|
||||
from tasks.mail_workflow_comment_task import send_workflow_comment_mention_email_task
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class WorkflowCommentService:
|
||||
"""Service for managing workflow comments."""
|
||||
|
||||
@staticmethod
|
||||
def _validate_content(content: str) -> None:
|
||||
if len(content.strip()) == 0:
|
||||
raise ValueError("Comment content cannot be empty")
|
||||
|
||||
if len(content) > 1000:
|
||||
raise ValueError("Comment content cannot exceed 1000 characters")
|
||||
|
||||
@staticmethod
|
||||
def _filter_valid_mentioned_user_ids(
|
||||
mentioned_user_ids: Sequence[str], *, session: Session, tenant_id: str
|
||||
) -> list[str]:
|
||||
"""Return deduplicated UUID user IDs that belong to the tenant, preserving input order."""
|
||||
unique_user_ids: list[str] = []
|
||||
seen: set[str] = set()
|
||||
for user_id in mentioned_user_ids:
|
||||
if not isinstance(user_id, str):
|
||||
continue
|
||||
if not uuid_value(user_id):
|
||||
continue
|
||||
if user_id in seen:
|
||||
continue
|
||||
seen.add(user_id)
|
||||
unique_user_ids.append(user_id)
|
||||
if not unique_user_ids:
|
||||
return []
|
||||
|
||||
tenant_member_ids = {
|
||||
str(account_id)
|
||||
for account_id in session.scalars(
|
||||
select(TenantAccountJoin.account_id).where(
|
||||
TenantAccountJoin.tenant_id == tenant_id,
|
||||
TenantAccountJoin.account_id.in_(unique_user_ids),
|
||||
)
|
||||
).all()
|
||||
}
|
||||
|
||||
return [user_id for user_id in unique_user_ids if user_id in tenant_member_ids]
|
||||
|
||||
@staticmethod
|
||||
def _format_comment_excerpt(content: str, max_length: int = 200) -> str:
|
||||
"""Trim comment content for email display."""
|
||||
trimmed = content.strip()
|
||||
if len(trimmed) <= max_length:
|
||||
return trimmed
|
||||
if max_length <= 3:
|
||||
return trimmed[:max_length]
|
||||
return f"{trimmed[: max_length - 3].rstrip()}..."
|
||||
|
||||
@staticmethod
|
||||
def _build_mention_email_payloads(
|
||||
session: Session,
|
||||
tenant_id: str,
|
||||
app_id: str,
|
||||
mentioner_id: str,
|
||||
mentioned_user_ids: Sequence[str],
|
||||
content: str,
|
||||
) -> list[dict[str, str]]:
|
||||
"""Prepare email payloads for mentioned users, including workflow app link."""
|
||||
if not mentioned_user_ids:
|
||||
return []
|
||||
|
||||
candidate_user_ids = [user_id for user_id in mentioned_user_ids if user_id != mentioner_id]
|
||||
if not candidate_user_ids:
|
||||
return []
|
||||
|
||||
app_name_value = session.scalar(select(App.name).where(App.id == app_id, App.tenant_id == tenant_id))
|
||||
app_name = app_name_value if isinstance(app_name_value, str) and app_name_value else "Dify app"
|
||||
commenter_name_value = session.scalar(select(Account.name).where(Account.id == mentioner_id))
|
||||
commenter_name = (
|
||||
commenter_name_value if isinstance(commenter_name_value, str) and commenter_name_value else "Dify user"
|
||||
)
|
||||
comment_excerpt = WorkflowCommentService._format_comment_excerpt(content)
|
||||
base_url = dify_config.CONSOLE_WEB_URL.rstrip("/")
|
||||
app_url = f"{base_url}/app/{app_id}/workflow"
|
||||
|
||||
accounts = session.scalars(
|
||||
select(Account)
|
||||
.join(TenantAccountJoin, TenantAccountJoin.account_id == Account.id)
|
||||
.where(TenantAccountJoin.tenant_id == tenant_id, Account.id.in_(candidate_user_ids))
|
||||
).all()
|
||||
|
||||
payloads: list[dict[str, str]] = []
|
||||
for account in accounts:
|
||||
email = account.email
|
||||
if not isinstance(email, str) or not email:
|
||||
continue
|
||||
mentioned_name = account.name if isinstance(account.name, str) and account.name else email
|
||||
language = (
|
||||
account.interface_language
|
||||
if isinstance(account.interface_language, str) and account.interface_language
|
||||
else "en-US"
|
||||
)
|
||||
payloads.append(
|
||||
{
|
||||
"language": language,
|
||||
"to": email,
|
||||
"mentioned_name": mentioned_name,
|
||||
"commenter_name": commenter_name,
|
||||
"app_name": app_name,
|
||||
"comment_content": comment_excerpt,
|
||||
"app_url": app_url,
|
||||
}
|
||||
)
|
||||
return payloads
|
||||
|
||||
@staticmethod
|
||||
def _dispatch_mention_emails(payloads: Sequence[dict[str, str]]) -> None:
|
||||
"""Enqueue mention notification emails."""
|
||||
for payload in payloads:
|
||||
send_workflow_comment_mention_email_task.delay(**payload)
|
||||
|
||||
@staticmethod
|
||||
def get_comments(tenant_id: str, app_id: str) -> Sequence[WorkflowComment]:
|
||||
"""Get all comments for a workflow."""
|
||||
with Session(db.engine) as session:
|
||||
# Get all comments with eager loading
|
||||
stmt = (
|
||||
select(WorkflowComment)
|
||||
.options(selectinload(WorkflowComment.replies), selectinload(WorkflowComment.mentions))
|
||||
.where(WorkflowComment.tenant_id == tenant_id, WorkflowComment.app_id == app_id)
|
||||
.order_by(desc(WorkflowComment.created_at))
|
||||
)
|
||||
|
||||
comments = session.scalars(stmt).all()
|
||||
|
||||
# Batch preload all Account objects to avoid N+1 queries
|
||||
WorkflowCommentService._preload_accounts(session, comments)
|
||||
|
||||
return comments
|
||||
|
||||
@staticmethod
|
||||
def _preload_accounts(session: Session, comments: Sequence[WorkflowComment]) -> None:
|
||||
"""Batch preload Account objects for comments, replies, and mentions."""
|
||||
# Collect all user IDs
|
||||
user_ids: set[str] = set()
|
||||
for comment in comments:
|
||||
user_ids.add(comment.created_by)
|
||||
if comment.resolved_by:
|
||||
user_ids.add(comment.resolved_by)
|
||||
user_ids.update(reply.created_by for reply in comment.replies)
|
||||
user_ids.update(mention.mentioned_user_id for mention in comment.mentions)
|
||||
|
||||
if not user_ids:
|
||||
return
|
||||
|
||||
# Batch query all accounts
|
||||
accounts = session.scalars(select(Account).where(Account.id.in_(user_ids))).all()
|
||||
account_map = {str(account.id): account for account in accounts}
|
||||
|
||||
# Cache accounts on objects
|
||||
for comment in comments:
|
||||
comment.cache_created_by_account(account_map.get(comment.created_by))
|
||||
comment.cache_resolved_by_account(account_map.get(comment.resolved_by) if comment.resolved_by else None)
|
||||
for reply in comment.replies:
|
||||
reply.cache_created_by_account(account_map.get(reply.created_by))
|
||||
for mention in comment.mentions:
|
||||
mention.cache_mentioned_user_account(account_map.get(mention.mentioned_user_id))
|
||||
|
||||
@staticmethod
|
||||
def get_comment(tenant_id: str, app_id: str, comment_id: str, session: Session | None = None) -> WorkflowComment:
|
||||
"""Get a specific comment."""
|
||||
|
||||
def _get_comment(session: Session) -> WorkflowComment:
|
||||
stmt = (
|
||||
select(WorkflowComment)
|
||||
.options(selectinload(WorkflowComment.replies), selectinload(WorkflowComment.mentions))
|
||||
.where(
|
||||
WorkflowComment.id == comment_id,
|
||||
WorkflowComment.tenant_id == tenant_id,
|
||||
WorkflowComment.app_id == app_id,
|
||||
)
|
||||
)
|
||||
comment = session.scalar(stmt)
|
||||
|
||||
if not comment:
|
||||
raise NotFound("Comment not found")
|
||||
|
||||
# Preload accounts to avoid N+1 queries
|
||||
WorkflowCommentService._preload_accounts(session, [comment])
|
||||
|
||||
return comment
|
||||
|
||||
if session is not None:
|
||||
return _get_comment(session)
|
||||
else:
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
return _get_comment(session)
|
||||
|
||||
@staticmethod
|
||||
def create_comment(
|
||||
tenant_id: str,
|
||||
app_id: str,
|
||||
created_by: str,
|
||||
content: str,
|
||||
position_x: float,
|
||||
position_y: float,
|
||||
mentioned_user_ids: list[str] | None = None,
|
||||
) -> dict:
|
||||
"""Create a new workflow comment and send mention notification emails."""
|
||||
WorkflowCommentService._validate_content(content)
|
||||
|
||||
with Session(db.engine) as session:
|
||||
comment = WorkflowComment(
|
||||
tenant_id=tenant_id,
|
||||
app_id=app_id,
|
||||
position_x=position_x,
|
||||
position_y=position_y,
|
||||
content=content,
|
||||
created_by=created_by,
|
||||
)
|
||||
|
||||
session.add(comment)
|
||||
session.flush() # Get the comment ID for mentions
|
||||
|
||||
# Create mentions if specified
|
||||
mentioned_user_ids = WorkflowCommentService._filter_valid_mentioned_user_ids(
|
||||
mentioned_user_ids or [],
|
||||
session=session,
|
||||
tenant_id=tenant_id,
|
||||
)
|
||||
for user_id in mentioned_user_ids:
|
||||
mention = WorkflowCommentMention(
|
||||
comment_id=comment.id,
|
||||
reply_id=None, # This is a comment mention, not reply mention
|
||||
mentioned_user_id=user_id,
|
||||
)
|
||||
session.add(mention)
|
||||
|
||||
mention_email_payloads = WorkflowCommentService._build_mention_email_payloads(
|
||||
session=session,
|
||||
tenant_id=tenant_id,
|
||||
app_id=app_id,
|
||||
mentioner_id=created_by,
|
||||
mentioned_user_ids=mentioned_user_ids,
|
||||
content=content,
|
||||
)
|
||||
|
||||
session.commit()
|
||||
WorkflowCommentService._dispatch_mention_emails(mention_email_payloads)
|
||||
|
||||
# Return only what we need - id and created_at
|
||||
return {"id": comment.id, "created_at": comment.created_at}
|
||||
|
||||
@staticmethod
|
||||
def update_comment(
|
||||
tenant_id: str,
|
||||
app_id: str,
|
||||
comment_id: str,
|
||||
user_id: str,
|
||||
content: str,
|
||||
position_x: float | None = None,
|
||||
position_y: float | None = None,
|
||||
mentioned_user_ids: list[str] | None = None,
|
||||
) -> dict:
|
||||
"""Update a workflow comment and notify newly mentioned users.
|
||||
|
||||
`mentioned_user_ids=None` means "leave mentions unchanged".
|
||||
Passing an explicit list replaces the existing comment mentions, including clearing them with `[]`.
|
||||
"""
|
||||
WorkflowCommentService._validate_content(content)
|
||||
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
# Get comment with validation
|
||||
stmt = select(WorkflowComment).where(
|
||||
WorkflowComment.id == comment_id,
|
||||
WorkflowComment.tenant_id == tenant_id,
|
||||
WorkflowComment.app_id == app_id,
|
||||
)
|
||||
comment = session.scalar(stmt)
|
||||
|
||||
if not comment:
|
||||
raise NotFound("Comment not found")
|
||||
|
||||
# Only the creator can update the comment
|
||||
if comment.created_by != user_id:
|
||||
raise Forbidden("Only the comment creator can update it")
|
||||
|
||||
# Update comment fields
|
||||
comment.content = content
|
||||
if position_x is not None:
|
||||
comment.position_x = position_x
|
||||
if position_y is not None:
|
||||
comment.position_y = position_y
|
||||
|
||||
mention_email_payloads: list[dict[str, str]] = []
|
||||
if mentioned_user_ids is not None:
|
||||
# Replace comment mentions only when the client explicitly sends the mention list.
|
||||
existing_mentions = session.scalars(
|
||||
select(WorkflowCommentMention).where(
|
||||
WorkflowCommentMention.comment_id == comment.id,
|
||||
WorkflowCommentMention.reply_id.is_(None), # Only comment mentions, not reply mentions
|
||||
)
|
||||
).all()
|
||||
existing_mentioned_user_ids = {mention.mentioned_user_id for mention in existing_mentions}
|
||||
for mention in existing_mentions:
|
||||
session.delete(mention)
|
||||
|
||||
filtered_mentioned_user_ids = WorkflowCommentService._filter_valid_mentioned_user_ids(
|
||||
mentioned_user_ids,
|
||||
session=session,
|
||||
tenant_id=tenant_id,
|
||||
)
|
||||
new_mentioned_user_ids = [
|
||||
mentioned_user_id
|
||||
for mentioned_user_id in filtered_mentioned_user_ids
|
||||
if mentioned_user_id not in existing_mentioned_user_ids
|
||||
]
|
||||
for mentioned_user_id in filtered_mentioned_user_ids:
|
||||
mention = WorkflowCommentMention(
|
||||
comment_id=comment.id,
|
||||
reply_id=None, # This is a comment mention
|
||||
mentioned_user_id=mentioned_user_id,
|
||||
)
|
||||
session.add(mention)
|
||||
|
||||
mention_email_payloads = WorkflowCommentService._build_mention_email_payloads(
|
||||
session=session,
|
||||
tenant_id=tenant_id,
|
||||
app_id=app_id,
|
||||
mentioner_id=user_id,
|
||||
mentioned_user_ids=new_mentioned_user_ids,
|
||||
content=content,
|
||||
)
|
||||
|
||||
session.commit()
|
||||
WorkflowCommentService._dispatch_mention_emails(mention_email_payloads)
|
||||
|
||||
return {"id": comment.id, "updated_at": comment.updated_at}
|
||||
|
||||
@staticmethod
|
||||
def delete_comment(tenant_id: str, app_id: str, comment_id: str, user_id: str) -> None:
|
||||
"""Delete a workflow comment."""
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
comment = WorkflowCommentService.get_comment(tenant_id, app_id, comment_id, session)
|
||||
|
||||
# Only the creator can delete the comment
|
||||
if comment.created_by != user_id:
|
||||
raise Forbidden("Only the comment creator can delete it")
|
||||
|
||||
# Delete associated mentions (both comment and reply mentions)
|
||||
mentions = session.scalars(
|
||||
select(WorkflowCommentMention).where(WorkflowCommentMention.comment_id == comment_id)
|
||||
).all()
|
||||
for mention in mentions:
|
||||
session.delete(mention)
|
||||
|
||||
# Delete associated replies
|
||||
replies = session.scalars(
|
||||
select(WorkflowCommentReply).where(WorkflowCommentReply.comment_id == comment_id)
|
||||
).all()
|
||||
for reply in replies:
|
||||
session.delete(reply)
|
||||
|
||||
session.delete(comment)
|
||||
session.commit()
|
||||
|
||||
@staticmethod
|
||||
def resolve_comment(tenant_id: str, app_id: str, comment_id: str, user_id: str) -> WorkflowComment:
|
||||
"""Resolve a workflow comment."""
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
comment = WorkflowCommentService.get_comment(tenant_id, app_id, comment_id, session)
|
||||
if comment.resolved:
|
||||
return comment
|
||||
|
||||
comment.resolved = True
|
||||
comment.resolved_at = naive_utc_now()
|
||||
comment.resolved_by = user_id
|
||||
session.commit()
|
||||
|
||||
return comment
|
||||
|
||||
@staticmethod
|
||||
def create_reply(
|
||||
comment_id: str, content: str, created_by: str, mentioned_user_ids: list[str] | None = None
|
||||
) -> dict:
|
||||
"""Add a reply to a workflow comment and notify mentioned users."""
|
||||
WorkflowCommentService._validate_content(content)
|
||||
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
# Check if comment exists
|
||||
comment = session.get(WorkflowComment, comment_id)
|
||||
if not comment:
|
||||
raise NotFound("Comment not found")
|
||||
|
||||
reply = WorkflowCommentReply(comment_id=comment_id, content=content, created_by=created_by)
|
||||
|
||||
session.add(reply)
|
||||
session.flush() # Get the reply ID for mentions
|
||||
|
||||
# Create mentions if specified
|
||||
mentioned_user_ids = WorkflowCommentService._filter_valid_mentioned_user_ids(
|
||||
mentioned_user_ids or [],
|
||||
session=session,
|
||||
tenant_id=comment.tenant_id,
|
||||
)
|
||||
for user_id in mentioned_user_ids:
|
||||
# Create mention linking to specific reply
|
||||
mention = WorkflowCommentMention(comment_id=comment_id, reply_id=reply.id, mentioned_user_id=user_id)
|
||||
session.add(mention)
|
||||
|
||||
mention_email_payloads = WorkflowCommentService._build_mention_email_payloads(
|
||||
session=session,
|
||||
tenant_id=comment.tenant_id,
|
||||
app_id=comment.app_id,
|
||||
mentioner_id=created_by,
|
||||
mentioned_user_ids=mentioned_user_ids,
|
||||
content=content,
|
||||
)
|
||||
|
||||
session.commit()
|
||||
WorkflowCommentService._dispatch_mention_emails(mention_email_payloads)
|
||||
|
||||
return {"id": reply.id, "created_at": reply.created_at}
|
||||
|
||||
@staticmethod
|
||||
def _get_reply_in_comment_scope(
|
||||
*,
|
||||
session: Session,
|
||||
tenant_id: str,
|
||||
app_id: str,
|
||||
comment_id: str,
|
||||
reply_id: str,
|
||||
) -> WorkflowCommentReply:
|
||||
"""Get a reply scoped to tenant/app/comment to prevent cross-thread mutations."""
|
||||
stmt = (
|
||||
select(WorkflowCommentReply)
|
||||
.join(WorkflowComment, WorkflowComment.id == WorkflowCommentReply.comment_id)
|
||||
.where(
|
||||
WorkflowCommentReply.id == reply_id,
|
||||
WorkflowCommentReply.comment_id == comment_id,
|
||||
WorkflowComment.tenant_id == tenant_id,
|
||||
WorkflowComment.app_id == app_id,
|
||||
)
|
||||
.limit(1)
|
||||
)
|
||||
reply = session.scalar(stmt)
|
||||
if not reply:
|
||||
raise NotFound("Reply not found")
|
||||
return reply
|
||||
|
||||
@staticmethod
|
||||
def update_reply(
|
||||
tenant_id: str,
|
||||
app_id: str,
|
||||
comment_id: str,
|
||||
reply_id: str,
|
||||
user_id: str,
|
||||
content: str,
|
||||
mentioned_user_ids: list[str] | None = None,
|
||||
) -> dict:
|
||||
"""Update a comment reply and notify newly mentioned users."""
|
||||
WorkflowCommentService._validate_content(content)
|
||||
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
reply = WorkflowCommentService._get_reply_in_comment_scope(
|
||||
session=session,
|
||||
tenant_id=tenant_id,
|
||||
app_id=app_id,
|
||||
comment_id=comment_id,
|
||||
reply_id=reply_id,
|
||||
)
|
||||
|
||||
# Only the creator can update the reply
|
||||
if reply.created_by != user_id:
|
||||
raise Forbidden("Only the reply creator can update it")
|
||||
|
||||
reply.content = content
|
||||
|
||||
# Update mentions - first remove existing mentions for this reply
|
||||
existing_mentions = session.scalars(
|
||||
select(WorkflowCommentMention).where(WorkflowCommentMention.reply_id == reply.id)
|
||||
).all()
|
||||
existing_mentioned_user_ids = {mention.mentioned_user_id for mention in existing_mentions}
|
||||
for mention in existing_mentions:
|
||||
session.delete(mention)
|
||||
|
||||
# Add mentions
|
||||
raw_mentioned_user_ids = mentioned_user_ids or []
|
||||
comment = session.get(WorkflowComment, reply.comment_id)
|
||||
mentioned_user_ids = []
|
||||
if comment:
|
||||
mentioned_user_ids = WorkflowCommentService._filter_valid_mentioned_user_ids(
|
||||
raw_mentioned_user_ids,
|
||||
session=session,
|
||||
tenant_id=comment.tenant_id,
|
||||
)
|
||||
new_mentioned_user_ids = [
|
||||
user_id for user_id in mentioned_user_ids if user_id not in existing_mentioned_user_ids
|
||||
]
|
||||
for user_id_str in mentioned_user_ids:
|
||||
mention = WorkflowCommentMention(
|
||||
comment_id=reply.comment_id, reply_id=reply.id, mentioned_user_id=user_id_str
|
||||
)
|
||||
session.add(mention)
|
||||
|
||||
mention_email_payloads: list[dict[str, str]] = []
|
||||
if comment:
|
||||
mention_email_payloads = WorkflowCommentService._build_mention_email_payloads(
|
||||
session=session,
|
||||
tenant_id=comment.tenant_id,
|
||||
app_id=comment.app_id,
|
||||
mentioner_id=user_id,
|
||||
mentioned_user_ids=new_mentioned_user_ids,
|
||||
content=content,
|
||||
)
|
||||
|
||||
session.commit()
|
||||
session.refresh(reply) # Refresh to get updated timestamp
|
||||
WorkflowCommentService._dispatch_mention_emails(mention_email_payloads)
|
||||
|
||||
return {"id": reply.id, "updated_at": reply.updated_at}
|
||||
|
||||
@staticmethod
|
||||
def delete_reply(tenant_id: str, app_id: str, comment_id: str, reply_id: str, user_id: str) -> None:
|
||||
"""Delete a comment reply."""
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
reply = WorkflowCommentService._get_reply_in_comment_scope(
|
||||
session=session,
|
||||
tenant_id=tenant_id,
|
||||
app_id=app_id,
|
||||
comment_id=comment_id,
|
||||
reply_id=reply_id,
|
||||
)
|
||||
|
||||
# Only the creator can delete the reply
|
||||
if reply.created_by != user_id:
|
||||
raise Forbidden("Only the reply creator can delete it")
|
||||
|
||||
# Delete associated mentions first
|
||||
mentions = session.scalars(
|
||||
select(WorkflowCommentMention).where(WorkflowCommentMention.reply_id == reply_id)
|
||||
).all()
|
||||
for mention in mentions:
|
||||
session.delete(mention)
|
||||
|
||||
session.delete(reply)
|
||||
session.commit()
|
||||
|
||||
@staticmethod
|
||||
def validate_comment_access(comment_id: str, tenant_id: str, app_id: str) -> WorkflowComment:
|
||||
"""Validate that a comment belongs to the specified tenant and app."""
|
||||
return WorkflowCommentService.get_comment(tenant_id, app_id, comment_id)
|
||||
@ -199,6 +199,16 @@ class WorkflowService:
|
||||
|
||||
return workflow
|
||||
|
||||
def get_accessible_app_ids(self, app_ids: Sequence[str], tenant_id: str) -> set[str]:
|
||||
"""
|
||||
Return app IDs that belong to the given tenant.
|
||||
"""
|
||||
if not app_ids:
|
||||
return set()
|
||||
|
||||
stmt = select(App.id).where(App.id.in_(app_ids), App.tenant_id == tenant_id)
|
||||
return {str(app_id) for app_id in db.session.scalars(stmt).all()}
|
||||
|
||||
def get_all_published_workflow(
|
||||
self,
|
||||
*,
|
||||
@ -296,6 +306,78 @@ class WorkflowService:
|
||||
# return draft workflow
|
||||
return workflow
|
||||
|
||||
def update_draft_workflow_environment_variables(
|
||||
self,
|
||||
*,
|
||||
app_model: App,
|
||||
environment_variables: Sequence[VariableBase],
|
||||
account: Account,
|
||||
):
|
||||
"""
|
||||
Update draft workflow environment variables
|
||||
"""
|
||||
# fetch draft workflow by app_model
|
||||
workflow = self.get_draft_workflow(app_model=app_model)
|
||||
|
||||
if not workflow:
|
||||
raise ValueError("No draft workflow found.")
|
||||
|
||||
workflow.environment_variables = environment_variables
|
||||
workflow.updated_by = account.id
|
||||
workflow.updated_at = naive_utc_now()
|
||||
|
||||
# commit db session changes
|
||||
db.session.commit()
|
||||
|
||||
def update_draft_workflow_conversation_variables(
|
||||
self,
|
||||
*,
|
||||
app_model: App,
|
||||
conversation_variables: Sequence[VariableBase],
|
||||
account: Account,
|
||||
):
|
||||
"""
|
||||
Update draft workflow conversation variables
|
||||
"""
|
||||
# fetch draft workflow by app_model
|
||||
workflow = self.get_draft_workflow(app_model=app_model)
|
||||
|
||||
if not workflow:
|
||||
raise ValueError("No draft workflow found.")
|
||||
|
||||
workflow.conversation_variables = conversation_variables
|
||||
workflow.updated_by = account.id
|
||||
workflow.updated_at = naive_utc_now()
|
||||
|
||||
# commit db session changes
|
||||
db.session.commit()
|
||||
|
||||
def update_draft_workflow_features(
|
||||
self,
|
||||
*,
|
||||
app_model: App,
|
||||
features: dict,
|
||||
account: Account,
|
||||
):
|
||||
"""
|
||||
Update draft workflow features
|
||||
"""
|
||||
# fetch draft workflow by app_model
|
||||
workflow = self.get_draft_workflow(app_model=app_model)
|
||||
|
||||
if not workflow:
|
||||
raise ValueError("No draft workflow found.")
|
||||
|
||||
# validate features structure
|
||||
self.validate_features_structure(app_model=app_model, features=features)
|
||||
|
||||
workflow.features = json.dumps(features)
|
||||
workflow.updated_by = account.id
|
||||
workflow.updated_at = naive_utc_now()
|
||||
|
||||
# commit db session changes
|
||||
db.session.commit()
|
||||
|
||||
def restore_published_workflow_to_draft(
|
||||
self,
|
||||
*,
|
||||
|
||||
65
api/tasks/mail_workflow_comment_task.py
Normal file
65
api/tasks/mail_workflow_comment_task.py
Normal file
@ -0,0 +1,65 @@
|
||||
import logging
|
||||
import time
|
||||
|
||||
import click
|
||||
from celery import shared_task
|
||||
|
||||
from extensions.ext_mail import mail
|
||||
from libs.email_i18n import EmailType, get_email_i18n_service
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@shared_task(queue="mail")
|
||||
def send_workflow_comment_mention_email_task(
|
||||
language: str,
|
||||
to: str,
|
||||
mentioned_name: str,
|
||||
commenter_name: str,
|
||||
app_name: str,
|
||||
comment_content: str,
|
||||
app_url: str,
|
||||
):
|
||||
"""
|
||||
Send workflow comment mention email with internationalization support.
|
||||
|
||||
Args:
|
||||
language: Language code for email localization
|
||||
to: Recipient email address
|
||||
mentioned_name: Name of the mentioned user
|
||||
commenter_name: Name of the comment author
|
||||
app_name: Name of the app where the comment was made
|
||||
comment_content: Comment content excerpt
|
||||
app_url: Link to the app workflow page
|
||||
"""
|
||||
if not mail.is_inited():
|
||||
return
|
||||
|
||||
logger.info(click.style(f"Start workflow comment mention mail to {to}", fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
|
||||
try:
|
||||
email_service = get_email_i18n_service()
|
||||
email_service.send_email(
|
||||
email_type=EmailType.WORKFLOW_COMMENT_MENTION,
|
||||
language_code=language,
|
||||
to=to,
|
||||
template_context={
|
||||
"to": to,
|
||||
"mentioned_name": mentioned_name,
|
||||
"commenter_name": commenter_name,
|
||||
"app_name": app_name,
|
||||
"comment_content": comment_content,
|
||||
"app_url": app_url,
|
||||
},
|
||||
)
|
||||
|
||||
end_at = time.perf_counter()
|
||||
logger.info(
|
||||
click.style(
|
||||
f"Send workflow comment mention mail to {to} succeeded: latency: {end_at - start_at}",
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
except Exception:
|
||||
logger.exception("workflow comment mention email to %s failed", to)
|
||||
@ -0,0 +1,119 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
|
||||
<head>
|
||||
<style>
|
||||
body {
|
||||
font-family: 'Arial', sans-serif;
|
||||
line-height: 16pt;
|
||||
color: #101828;
|
||||
background-color: #e9ebf0;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
.container {
|
||||
width: 504px;
|
||||
min-height: 374px;
|
||||
margin: 40px auto;
|
||||
padding: 0 48px;
|
||||
background-color: #fcfcfd;
|
||||
border-radius: 16px;
|
||||
border: 1px solid #ffffff;
|
||||
box-shadow: 0px 3px 10px -2px rgba(9, 9, 11, 0.08), 0px 2px 4px -2px rgba(9, 9, 11, 0.06);
|
||||
}
|
||||
|
||||
.header {
|
||||
padding-top: 36px;
|
||||
padding-bottom: 24px;
|
||||
}
|
||||
|
||||
.header img {
|
||||
max-width: 63px;
|
||||
height: auto;
|
||||
}
|
||||
|
||||
.title {
|
||||
margin: 0;
|
||||
padding-top: 8px;
|
||||
padding-bottom: 16px;
|
||||
color: #101828;
|
||||
font-size: 24px;
|
||||
font-family: Inter;
|
||||
font-style: normal;
|
||||
font-weight: 600;
|
||||
line-height: 120%;
|
||||
}
|
||||
|
||||
.description {
|
||||
color: #354052;
|
||||
font-size: 14px;
|
||||
font-family: Inter;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 20px;
|
||||
letter-spacing: -0.07px;
|
||||
}
|
||||
|
||||
.content1 {
|
||||
margin: 0;
|
||||
padding-top: 16px;
|
||||
padding-bottom: 8px;
|
||||
}
|
||||
|
||||
.content2 {
|
||||
margin: 0;
|
||||
padding-bottom: 16px;
|
||||
}
|
||||
|
||||
.comment-box {
|
||||
margin-bottom: 16px;
|
||||
padding: 12px 16px;
|
||||
border-radius: 12px;
|
||||
background-color: #f2f4f7;
|
||||
}
|
||||
|
||||
.comment-text {
|
||||
margin: 0;
|
||||
color: #101828;
|
||||
font-family: Inter;
|
||||
font-size: 14px;
|
||||
font-style: normal;
|
||||
font-weight: 500;
|
||||
line-height: 20px;
|
||||
white-space: pre-wrap;
|
||||
word-break: break-word;
|
||||
}
|
||||
|
||||
.tips {
|
||||
margin: 0;
|
||||
padding-bottom: 24px;
|
||||
color: #354052;
|
||||
font-size: 14px;
|
||||
font-family: Inter;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 20px;
|
||||
letter-spacing: -0.07px;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<div class="container">
|
||||
<div class="header">
|
||||
<img src="https://assets.dify.ai/images/logo.png" alt="Dify Logo" />
|
||||
</div>
|
||||
<p class="title">You were mentioned in a workflow comment</p>
|
||||
<div class="description">
|
||||
<p class="content1">Hi {{ mentioned_name }},</p>
|
||||
<p class="content2">{{ commenter_name }} mentioned you in {{ app_name }}.</p>
|
||||
</div>
|
||||
<div class="comment-box">
|
||||
<p class="comment-text">{{ comment_content }}</p>
|
||||
</div>
|
||||
<p class="tips"><a href="{{ app_url }}" style="color: #155AEF; text-decoration: none;">Open {{ application_title }}</a> to reply to the comment.</p>
|
||||
</div>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
@ -0,0 +1,119 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
|
||||
<head>
|
||||
<style>
|
||||
body {
|
||||
font-family: 'Arial', sans-serif;
|
||||
line-height: 16pt;
|
||||
color: #101828;
|
||||
background-color: #e9ebf0;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
.container {
|
||||
width: 504px;
|
||||
min-height: 374px;
|
||||
margin: 40px auto;
|
||||
padding: 0 48px;
|
||||
background-color: #fcfcfd;
|
||||
border-radius: 16px;
|
||||
border: 1px solid #ffffff;
|
||||
box-shadow: 0px 3px 10px -2px rgba(9, 9, 11, 0.08), 0px 2px 4px -2px rgba(9, 9, 11, 0.06);
|
||||
}
|
||||
|
||||
.header {
|
||||
padding-top: 36px;
|
||||
padding-bottom: 24px;
|
||||
}
|
||||
|
||||
.header img {
|
||||
max-width: 63px;
|
||||
height: auto;
|
||||
}
|
||||
|
||||
.title {
|
||||
margin: 0;
|
||||
padding-top: 8px;
|
||||
padding-bottom: 16px;
|
||||
color: #101828;
|
||||
font-size: 24px;
|
||||
font-family: Inter;
|
||||
font-style: normal;
|
||||
font-weight: 600;
|
||||
line-height: 120%;
|
||||
}
|
||||
|
||||
.description {
|
||||
color: #354052;
|
||||
font-size: 14px;
|
||||
font-family: Inter;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 20px;
|
||||
letter-spacing: -0.07px;
|
||||
}
|
||||
|
||||
.content1 {
|
||||
margin: 0;
|
||||
padding-top: 16px;
|
||||
padding-bottom: 8px;
|
||||
}
|
||||
|
||||
.content2 {
|
||||
margin: 0;
|
||||
padding-bottom: 16px;
|
||||
}
|
||||
|
||||
.comment-box {
|
||||
margin-bottom: 16px;
|
||||
padding: 12px 16px;
|
||||
border-radius: 12px;
|
||||
background-color: #f2f4f7;
|
||||
}
|
||||
|
||||
.comment-text {
|
||||
margin: 0;
|
||||
color: #101828;
|
||||
font-family: Inter;
|
||||
font-size: 14px;
|
||||
font-style: normal;
|
||||
font-weight: 500;
|
||||
line-height: 20px;
|
||||
white-space: pre-wrap;
|
||||
word-break: break-word;
|
||||
}
|
||||
|
||||
.tips {
|
||||
margin: 0;
|
||||
padding-bottom: 24px;
|
||||
color: #354052;
|
||||
font-size: 14px;
|
||||
font-family: Inter;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 20px;
|
||||
letter-spacing: -0.07px;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<div class="container">
|
||||
<div class="header">
|
||||
<img src="https://assets.dify.ai/images/logo.png" alt="Dify Logo" />
|
||||
</div>
|
||||
<p class="title">你在工作流评论中被提及</p>
|
||||
<div class="description">
|
||||
<p class="content1">你好,{{ mentioned_name }}:</p>
|
||||
<p class="content2">{{ commenter_name }} 在 {{ app_name }} 中提及了你。</p>
|
||||
</div>
|
||||
<div class="comment-box">
|
||||
<p class="comment-text">{{ comment_content }}</p>
|
||||
</div>
|
||||
<p class="tips">请在 <a href="{{ app_url }}" style="color: #155AEF; text-decoration: none;">{{ application_title }}</a> 中查看并回复此评论。</p>
|
||||
</div>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
119
api/templates/workflow_comment_mention_template_en-US.html
Normal file
119
api/templates/workflow_comment_mention_template_en-US.html
Normal file
@ -0,0 +1,119 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
|
||||
<head>
|
||||
<style>
|
||||
body {
|
||||
font-family: 'Arial', sans-serif;
|
||||
line-height: 16pt;
|
||||
color: #101828;
|
||||
background-color: #e9ebf0;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
.container {
|
||||
width: 504px;
|
||||
min-height: 374px;
|
||||
margin: 40px auto;
|
||||
padding: 0 48px;
|
||||
background-color: #fcfcfd;
|
||||
border-radius: 16px;
|
||||
border: 1px solid #ffffff;
|
||||
box-shadow: 0px 3px 10px -2px rgba(9, 9, 11, 0.08), 0px 2px 4px -2px rgba(9, 9, 11, 0.06);
|
||||
}
|
||||
|
||||
.header {
|
||||
padding-top: 36px;
|
||||
padding-bottom: 24px;
|
||||
}
|
||||
|
||||
.header img {
|
||||
max-width: 63px;
|
||||
height: auto;
|
||||
}
|
||||
|
||||
.title {
|
||||
margin: 0;
|
||||
padding-top: 8px;
|
||||
padding-bottom: 16px;
|
||||
color: #101828;
|
||||
font-size: 24px;
|
||||
font-family: Inter;
|
||||
font-style: normal;
|
||||
font-weight: 600;
|
||||
line-height: 120%;
|
||||
}
|
||||
|
||||
.description {
|
||||
color: #354052;
|
||||
font-size: 14px;
|
||||
font-family: Inter;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 20px;
|
||||
letter-spacing: -0.07px;
|
||||
}
|
||||
|
||||
.content1 {
|
||||
margin: 0;
|
||||
padding-top: 16px;
|
||||
padding-bottom: 8px;
|
||||
}
|
||||
|
||||
.content2 {
|
||||
margin: 0;
|
||||
padding-bottom: 16px;
|
||||
}
|
||||
|
||||
.comment-box {
|
||||
margin-bottom: 16px;
|
||||
padding: 12px 16px;
|
||||
border-radius: 12px;
|
||||
background-color: #f2f4f7;
|
||||
}
|
||||
|
||||
.comment-text {
|
||||
margin: 0;
|
||||
color: #101828;
|
||||
font-family: Inter;
|
||||
font-size: 14px;
|
||||
font-style: normal;
|
||||
font-weight: 500;
|
||||
line-height: 20px;
|
||||
white-space: pre-wrap;
|
||||
word-break: break-word;
|
||||
}
|
||||
|
||||
.tips {
|
||||
margin: 0;
|
||||
padding-bottom: 24px;
|
||||
color: #354052;
|
||||
font-size: 14px;
|
||||
font-family: Inter;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 20px;
|
||||
letter-spacing: -0.07px;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<div class="container">
|
||||
<div class="header">
|
||||
<img src="https://assets.dify.ai/images/logo.png" alt="Dify Logo" />
|
||||
</div>
|
||||
<p class="title">You were mentioned in a workflow comment</p>
|
||||
<div class="description">
|
||||
<p class="content1">Hi {{ mentioned_name }},</p>
|
||||
<p class="content2">{{ commenter_name }} mentioned you in {{ app_name }}.</p>
|
||||
</div>
|
||||
<div class="comment-box">
|
||||
<p class="comment-text">{{ comment_content }}</p>
|
||||
</div>
|
||||
<p class="tips"><a href="{{ app_url }}" style="color: #155AEF; text-decoration: none;">Open {{ application_title }}</a> to reply to the comment.</p>
|
||||
</div>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
119
api/templates/workflow_comment_mention_template_zh-CN.html
Normal file
119
api/templates/workflow_comment_mention_template_zh-CN.html
Normal file
@ -0,0 +1,119 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
|
||||
<head>
|
||||
<style>
|
||||
body {
|
||||
font-family: 'Arial', sans-serif;
|
||||
line-height: 16pt;
|
||||
color: #101828;
|
||||
background-color: #e9ebf0;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
.container {
|
||||
width: 504px;
|
||||
min-height: 374px;
|
||||
margin: 40px auto;
|
||||
padding: 0 48px;
|
||||
background-color: #fcfcfd;
|
||||
border-radius: 16px;
|
||||
border: 1px solid #ffffff;
|
||||
box-shadow: 0px 3px 10px -2px rgba(9, 9, 11, 0.08), 0px 2px 4px -2px rgba(9, 9, 11, 0.06);
|
||||
}
|
||||
|
||||
.header {
|
||||
padding-top: 36px;
|
||||
padding-bottom: 24px;
|
||||
}
|
||||
|
||||
.header img {
|
||||
max-width: 63px;
|
||||
height: auto;
|
||||
}
|
||||
|
||||
.title {
|
||||
margin: 0;
|
||||
padding-top: 8px;
|
||||
padding-bottom: 16px;
|
||||
color: #101828;
|
||||
font-size: 24px;
|
||||
font-family: Inter;
|
||||
font-style: normal;
|
||||
font-weight: 600;
|
||||
line-height: 120%;
|
||||
}
|
||||
|
||||
.description {
|
||||
color: #354052;
|
||||
font-size: 14px;
|
||||
font-family: Inter;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 20px;
|
||||
letter-spacing: -0.07px;
|
||||
}
|
||||
|
||||
.content1 {
|
||||
margin: 0;
|
||||
padding-top: 16px;
|
||||
padding-bottom: 8px;
|
||||
}
|
||||
|
||||
.content2 {
|
||||
margin: 0;
|
||||
padding-bottom: 16px;
|
||||
}
|
||||
|
||||
.comment-box {
|
||||
margin-bottom: 16px;
|
||||
padding: 12px 16px;
|
||||
border-radius: 12px;
|
||||
background-color: #f2f4f7;
|
||||
}
|
||||
|
||||
.comment-text {
|
||||
margin: 0;
|
||||
color: #101828;
|
||||
font-family: Inter;
|
||||
font-size: 14px;
|
||||
font-style: normal;
|
||||
font-weight: 500;
|
||||
line-height: 20px;
|
||||
white-space: pre-wrap;
|
||||
word-break: break-word;
|
||||
}
|
||||
|
||||
.tips {
|
||||
margin: 0;
|
||||
padding-bottom: 24px;
|
||||
color: #354052;
|
||||
font-size: 14px;
|
||||
font-family: Inter;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 20px;
|
||||
letter-spacing: -0.07px;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<div class="container">
|
||||
<div class="header">
|
||||
<img src="https://assets.dify.ai/images/logo.png" alt="Dify Logo" />
|
||||
</div>
|
||||
<p class="title">你在工作流评论中被提及</p>
|
||||
<div class="description">
|
||||
<p class="content1">你好,{{ mentioned_name }}:</p>
|
||||
<p class="content2">{{ commenter_name }} 在 {{ app_name }} 中提及了你。</p>
|
||||
</div>
|
||||
<div class="comment-box">
|
||||
<p class="comment-text">{{ comment_content }}</p>
|
||||
</div>
|
||||
<p class="tips">请在 <a href="{{ app_url }}" style="color: #155AEF; text-decoration: none;">{{ application_title }}</a> 中查看并回复此评论。</p>
|
||||
</div>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
@ -48,7 +48,7 @@ os.environ["OPENDAL_FS_ROOT"] = "/tmp/dify-storage"
|
||||
os.environ.setdefault("STORAGE_TYPE", "opendal")
|
||||
os.environ.setdefault("OPENDAL_SCHEME", "fs")
|
||||
|
||||
_CACHED_APP = create_app()
|
||||
_SIO_APP, _CACHED_APP = create_app()
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
|
||||
@ -1,47 +0,0 @@
|
||||
import uuid
|
||||
from unittest import mock
|
||||
|
||||
from controllers.console.app import workflow_draft_variable as draft_variable_api
|
||||
from controllers.console.app import wraps
|
||||
from factories.variable_factory import build_segment
|
||||
from models import App, AppMode
|
||||
from models.workflow import WorkflowDraftVariable
|
||||
from services.workflow_draft_variable_service import WorkflowDraftVariableList, WorkflowDraftVariableService
|
||||
|
||||
|
||||
def _get_mock_srv_class() -> type[WorkflowDraftVariableService]:
|
||||
return mock.create_autospec(WorkflowDraftVariableService)
|
||||
|
||||
|
||||
class TestWorkflowDraftNodeVariableListApi:
|
||||
def test_get(self, test_client, auth_header, monkeypatch):
|
||||
srv_class = _get_mock_srv_class()
|
||||
mock_app_model: App = App()
|
||||
mock_app_model.id = str(uuid.uuid4())
|
||||
test_node_id = "test_node_id"
|
||||
mock_app_model.mode = AppMode.ADVANCED_CHAT
|
||||
mock_load_app_model = mock.Mock(return_value=mock_app_model)
|
||||
|
||||
monkeypatch.setattr(draft_variable_api, "WorkflowDraftVariableService", srv_class)
|
||||
monkeypatch.setattr(wraps, "_load_app_model", mock_load_app_model)
|
||||
|
||||
var1 = WorkflowDraftVariable.new_node_variable(
|
||||
app_id="test_app_1",
|
||||
node_id="test_node_1",
|
||||
name="str_var",
|
||||
value=build_segment("str_value"),
|
||||
node_execution_id=str(uuid.uuid4()),
|
||||
)
|
||||
srv_instance = mock.create_autospec(WorkflowDraftVariableService, instance=True)
|
||||
srv_class.return_value = srv_instance
|
||||
srv_instance.list_node_variables.return_value = WorkflowDraftVariableList(variables=[var1])
|
||||
|
||||
response = test_client.get(
|
||||
f"/console/api/apps/{mock_app_model.id}/workflows/draft/nodes/{test_node_id}/variables",
|
||||
headers=auth_header,
|
||||
)
|
||||
assert response.status_code == 200
|
||||
response_dict = response.json
|
||||
assert isinstance(response_dict, dict)
|
||||
assert "items" in response_dict
|
||||
assert len(response_dict["items"]) == 1
|
||||
@ -1,244 +0,0 @@
|
||||
"""Integration tests for Trigger Provider subscription permission verification."""
|
||||
|
||||
import uuid
|
||||
from unittest import mock
|
||||
|
||||
import pytest
|
||||
from flask.testing import FlaskClient
|
||||
|
||||
from controllers.console.workspace import trigger_providers as trigger_providers_api
|
||||
from libs.datetime_utils import naive_utc_now
|
||||
from models import Tenant
|
||||
from models.account import Account, TenantAccountJoin, TenantAccountRole
|
||||
|
||||
|
||||
class TestTriggerProviderSubscriptionPermissions:
|
||||
"""Test permission verification for Trigger Provider subscription endpoints."""
|
||||
|
||||
@pytest.fixture
|
||||
def mock_account(self, monkeypatch: pytest.MonkeyPatch):
|
||||
"""Create a mock Account for testing."""
|
||||
|
||||
account = Account(name="Test User", email="test@example.com")
|
||||
account.id = str(uuid.uuid4())
|
||||
account.last_active_at = naive_utc_now()
|
||||
account.created_at = naive_utc_now()
|
||||
account.updated_at = naive_utc_now()
|
||||
|
||||
# Create mock tenant
|
||||
tenant = Tenant(name="Test Tenant")
|
||||
tenant.id = str(uuid.uuid4())
|
||||
|
||||
mock_session_instance = mock.Mock()
|
||||
|
||||
mock_tenant_join = TenantAccountJoin(role=TenantAccountRole.OWNER)
|
||||
monkeypatch.setattr(mock_session_instance, "scalar", mock.Mock(return_value=mock_tenant_join))
|
||||
|
||||
mock_scalars_result = mock.Mock()
|
||||
mock_scalars_result.one.return_value = tenant
|
||||
monkeypatch.setattr(mock_session_instance, "scalars", mock.Mock(return_value=mock_scalars_result))
|
||||
|
||||
mock_session_context = mock.Mock()
|
||||
mock_session_context.__enter__.return_value = mock_session_instance
|
||||
monkeypatch.setattr("models.account.Session", lambda _, expire_on_commit: mock_session_context)
|
||||
|
||||
account.current_tenant = tenant
|
||||
account.current_tenant_id = tenant.id
|
||||
return account
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("role", "list_status", "get_status", "update_status", "create_status", "build_status", "delete_status"),
|
||||
[
|
||||
# Admin/Owner can do everything
|
||||
(TenantAccountRole.OWNER, 200, 200, 200, 200, 200, 200),
|
||||
(TenantAccountRole.ADMIN, 200, 200, 200, 200, 200, 200),
|
||||
# Editor can list, get, update (parameters), but not create, build, or delete
|
||||
(TenantAccountRole.EDITOR, 200, 200, 200, 403, 403, 403),
|
||||
# Normal user cannot do anything
|
||||
(TenantAccountRole.NORMAL, 403, 403, 403, 403, 403, 403),
|
||||
# Dataset operator cannot do anything
|
||||
(TenantAccountRole.DATASET_OPERATOR, 403, 403, 403, 403, 403, 403),
|
||||
],
|
||||
)
|
||||
def test_trigger_subscription_permissions(
|
||||
self,
|
||||
test_client: FlaskClient,
|
||||
auth_header,
|
||||
monkeypatch,
|
||||
mock_account,
|
||||
role: TenantAccountRole,
|
||||
list_status: int,
|
||||
get_status: int,
|
||||
update_status: int,
|
||||
create_status: int,
|
||||
build_status: int,
|
||||
delete_status: int,
|
||||
):
|
||||
"""Test that different roles have appropriate permissions for trigger subscription operations."""
|
||||
# Set user role
|
||||
mock_account.role = role
|
||||
|
||||
# Mock current user
|
||||
monkeypatch.setattr(trigger_providers_api, "current_user", mock_account)
|
||||
|
||||
# Mock AccountService.load_user to prevent authentication issues
|
||||
from services.account_service import AccountService
|
||||
|
||||
mock_load_user = mock.Mock(return_value=mock_account)
|
||||
monkeypatch.setattr(AccountService, "load_user", mock_load_user)
|
||||
|
||||
# Test data
|
||||
provider = "some_provider/some_trigger"
|
||||
subscription_builder_id = str(uuid.uuid4())
|
||||
subscription_id = str(uuid.uuid4())
|
||||
|
||||
# Mock service methods
|
||||
mock_list_subscriptions = mock.Mock(return_value=[])
|
||||
monkeypatch.setattr(
|
||||
"services.trigger.trigger_provider_service.TriggerProviderService.list_trigger_provider_subscriptions",
|
||||
mock_list_subscriptions,
|
||||
)
|
||||
|
||||
mock_get_subscription_builder = mock.Mock(return_value={"id": subscription_builder_id})
|
||||
monkeypatch.setattr(
|
||||
"services.trigger.trigger_subscription_builder_service.TriggerSubscriptionBuilderService.get_subscription_builder_by_id",
|
||||
mock_get_subscription_builder,
|
||||
)
|
||||
|
||||
mock_update_subscription_builder = mock.Mock(return_value={"id": subscription_builder_id})
|
||||
monkeypatch.setattr(
|
||||
"services.trigger.trigger_subscription_builder_service.TriggerSubscriptionBuilderService.update_trigger_subscription_builder",
|
||||
mock_update_subscription_builder,
|
||||
)
|
||||
|
||||
mock_create_subscription_builder = mock.Mock(return_value={"id": subscription_builder_id})
|
||||
monkeypatch.setattr(
|
||||
"services.trigger.trigger_subscription_builder_service.TriggerSubscriptionBuilderService.create_trigger_subscription_builder",
|
||||
mock_create_subscription_builder,
|
||||
)
|
||||
|
||||
mock_update_and_build_builder = mock.Mock()
|
||||
monkeypatch.setattr(
|
||||
"services.trigger.trigger_subscription_builder_service.TriggerSubscriptionBuilderService.update_and_build_builder",
|
||||
mock_update_and_build_builder,
|
||||
)
|
||||
|
||||
mock_delete_provider = mock.Mock()
|
||||
mock_delete_plugin_trigger = mock.Mock()
|
||||
mock_db_session = mock.Mock()
|
||||
mock_db_session.commit = mock.Mock()
|
||||
|
||||
def mock_session_func(engine=None):
|
||||
return mock_session_context
|
||||
|
||||
mock_session_context = mock.Mock()
|
||||
mock_session_context.__enter__.return_value = mock_db_session
|
||||
mock_session_context.__exit__.return_value = None
|
||||
|
||||
monkeypatch.setattr("services.trigger.trigger_provider_service.Session", mock_session_func)
|
||||
monkeypatch.setattr("services.trigger.trigger_subscription_operator_service.Session", mock_session_func)
|
||||
|
||||
monkeypatch.setattr(
|
||||
"services.trigger.trigger_provider_service.TriggerProviderService.delete_trigger_provider",
|
||||
mock_delete_provider,
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
"services.trigger.trigger_subscription_operator_service.TriggerSubscriptionOperatorService.delete_plugin_trigger_by_subscription",
|
||||
mock_delete_plugin_trigger,
|
||||
)
|
||||
|
||||
# Test 1: List subscriptions (should work for Editor, Admin, Owner)
|
||||
response = test_client.get(
|
||||
f"/console/api/workspaces/current/trigger-provider/{provider}/subscriptions/list",
|
||||
headers=auth_header,
|
||||
)
|
||||
assert response.status_code == list_status
|
||||
|
||||
# Test 2: Get subscription builder (should work for Editor, Admin, Owner)
|
||||
response = test_client.get(
|
||||
f"/console/api/workspaces/current/trigger-provider/{provider}/subscriptions/builder/{subscription_builder_id}",
|
||||
headers=auth_header,
|
||||
)
|
||||
assert response.status_code == get_status
|
||||
|
||||
# Test 3: Update subscription builder parameters (should work for Editor, Admin, Owner)
|
||||
response = test_client.post(
|
||||
f"/console/api/workspaces/current/trigger-provider/{provider}/subscriptions/builder/update/{subscription_builder_id}",
|
||||
headers=auth_header,
|
||||
json={"parameters": {"webhook_url": "https://example.com/webhook"}},
|
||||
)
|
||||
assert response.status_code == update_status
|
||||
|
||||
# Test 4: Create subscription builder (should only work for Admin, Owner)
|
||||
response = test_client.post(
|
||||
f"/console/api/workspaces/current/trigger-provider/{provider}/subscriptions/builder/create",
|
||||
headers=auth_header,
|
||||
json={"credential_type": "api_key"},
|
||||
)
|
||||
assert response.status_code == create_status
|
||||
|
||||
# Test 5: Build/activate subscription (should only work for Admin, Owner)
|
||||
response = test_client.post(
|
||||
f"/console/api/workspaces/current/trigger-provider/{provider}/subscriptions/builder/build/{subscription_builder_id}",
|
||||
headers=auth_header,
|
||||
json={"name": "Test Subscription"},
|
||||
)
|
||||
assert response.status_code == build_status
|
||||
|
||||
# Test 6: Delete subscription (should only work for Admin, Owner)
|
||||
response = test_client.post(
|
||||
f"/console/api/workspaces/current/trigger-provider/{subscription_id}/subscriptions/delete",
|
||||
headers=auth_header,
|
||||
)
|
||||
assert response.status_code == delete_status
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("role", "status"),
|
||||
[
|
||||
(TenantAccountRole.OWNER, 200),
|
||||
(TenantAccountRole.ADMIN, 200),
|
||||
# Editor should be able to access logs for debugging
|
||||
(TenantAccountRole.EDITOR, 200),
|
||||
(TenantAccountRole.NORMAL, 403),
|
||||
(TenantAccountRole.DATASET_OPERATOR, 403),
|
||||
],
|
||||
)
|
||||
def test_trigger_subscription_logs_permissions(
|
||||
self,
|
||||
test_client: FlaskClient,
|
||||
auth_header,
|
||||
monkeypatch,
|
||||
mock_account,
|
||||
role: TenantAccountRole,
|
||||
status: int,
|
||||
):
|
||||
"""Test that different roles have appropriate permissions for accessing subscription logs."""
|
||||
# Set user role
|
||||
mock_account.role = role
|
||||
|
||||
# Mock current user
|
||||
monkeypatch.setattr(trigger_providers_api, "current_user", mock_account)
|
||||
|
||||
# Mock AccountService.load_user to prevent authentication issues
|
||||
from services.account_service import AccountService
|
||||
|
||||
mock_load_user = mock.Mock(return_value=mock_account)
|
||||
monkeypatch.setattr(AccountService, "load_user", mock_load_user)
|
||||
|
||||
# Test data
|
||||
provider = "some_provider/some_trigger"
|
||||
subscription_builder_id = str(uuid.uuid4())
|
||||
|
||||
# Mock service method
|
||||
mock_list_logs = mock.Mock(return_value=[])
|
||||
monkeypatch.setattr(
|
||||
"services.trigger.trigger_subscription_builder_service.TriggerSubscriptionBuilderService.list_logs",
|
||||
mock_list_logs,
|
||||
)
|
||||
|
||||
# Test access to logs
|
||||
response = test_client.get(
|
||||
f"/console/api/workspaces/current/trigger-provider/{provider}/subscriptions/builder/logs/{subscription_builder_id}",
|
||||
headers=auth_header,
|
||||
)
|
||||
assert response.status_code == status
|
||||
@ -1,375 +0,0 @@
|
||||
import unittest
|
||||
from datetime import UTC, datetime
|
||||
from unittest.mock import patch
|
||||
from uuid import uuid4
|
||||
|
||||
import pytest
|
||||
from graphon.file import File, FileTransferMethod, FileType
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from core.app.file_access import DatabaseFileAccessController
|
||||
from extensions.ext_database import db
|
||||
from extensions.storage.storage_type import StorageType
|
||||
from factories.file_factory import StorageKeyLoader
|
||||
from models import ToolFile, UploadFile
|
||||
from models.enums import CreatorUserRole
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("flask_req_ctx")
|
||||
class TestStorageKeyLoader(unittest.TestCase):
|
||||
"""
|
||||
Integration tests for StorageKeyLoader class.
|
||||
|
||||
Tests the batched loading of storage keys from the database for files
|
||||
with different transfer methods: LOCAL_FILE, REMOTE_URL, and TOOL_FILE.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
"""Set up test data before each test method."""
|
||||
self.session = db.session()
|
||||
self.tenant_id = str(uuid4())
|
||||
self.user_id = str(uuid4())
|
||||
self.conversation_id = str(uuid4())
|
||||
|
||||
# Create test data that will be cleaned up after each test
|
||||
self.test_upload_files = []
|
||||
self.test_tool_files = []
|
||||
|
||||
# Create StorageKeyLoader instance
|
||||
self.loader = StorageKeyLoader(
|
||||
self.session,
|
||||
self.tenant_id,
|
||||
access_controller=DatabaseFileAccessController(),
|
||||
)
|
||||
|
||||
def tearDown(self):
|
||||
"""Clean up test data after each test method."""
|
||||
self.session.rollback()
|
||||
|
||||
def _create_upload_file(
|
||||
self, file_id: str | None = None, storage_key: str | None = None, tenant_id: str | None = None
|
||||
) -> UploadFile:
|
||||
"""Helper method to create an UploadFile record for testing."""
|
||||
if file_id is None:
|
||||
file_id = str(uuid4())
|
||||
if storage_key is None:
|
||||
storage_key = f"test_storage_key_{uuid4()}"
|
||||
if tenant_id is None:
|
||||
tenant_id = self.tenant_id
|
||||
|
||||
upload_file = UploadFile(
|
||||
tenant_id=tenant_id,
|
||||
storage_type=StorageType.LOCAL,
|
||||
key=storage_key,
|
||||
name="test_file.txt",
|
||||
size=1024,
|
||||
extension=".txt",
|
||||
mime_type="text/plain",
|
||||
created_by_role=CreatorUserRole.ACCOUNT,
|
||||
created_by=self.user_id,
|
||||
created_at=datetime.now(UTC),
|
||||
used=False,
|
||||
)
|
||||
upload_file.id = file_id
|
||||
|
||||
self.session.add(upload_file)
|
||||
self.session.flush()
|
||||
self.test_upload_files.append(upload_file)
|
||||
|
||||
return upload_file
|
||||
|
||||
def _create_tool_file(
|
||||
self, file_id: str | None = None, file_key: str | None = None, tenant_id: str | None = None
|
||||
) -> ToolFile:
|
||||
"""Helper method to create a ToolFile record for testing."""
|
||||
if file_id is None:
|
||||
file_id = str(uuid4())
|
||||
if file_key is None:
|
||||
file_key = f"test_file_key_{uuid4()}"
|
||||
if tenant_id is None:
|
||||
tenant_id = self.tenant_id
|
||||
|
||||
tool_file = ToolFile(
|
||||
user_id=self.user_id,
|
||||
tenant_id=tenant_id,
|
||||
conversation_id=self.conversation_id,
|
||||
file_key=file_key,
|
||||
mimetype="text/plain",
|
||||
original_url="http://example.com/file.txt",
|
||||
name="test_tool_file.txt",
|
||||
size=2048,
|
||||
)
|
||||
tool_file.id = file_id
|
||||
self.session.add(tool_file)
|
||||
self.session.flush()
|
||||
self.test_tool_files.append(tool_file)
|
||||
|
||||
return tool_file
|
||||
|
||||
def _create_file(self, related_id: str, transfer_method: FileTransferMethod, tenant_id: str | None = None) -> File:
|
||||
"""Helper method to create a File object for testing."""
|
||||
if tenant_id is None:
|
||||
tenant_id = self.tenant_id
|
||||
|
||||
# Set related_id for LOCAL_FILE and TOOL_FILE transfer methods
|
||||
file_related_id = None
|
||||
remote_url = None
|
||||
|
||||
if transfer_method in (FileTransferMethod.LOCAL_FILE, FileTransferMethod.TOOL_FILE):
|
||||
file_related_id = related_id
|
||||
elif transfer_method == FileTransferMethod.REMOTE_URL:
|
||||
remote_url = "https://example.com/test_file.txt"
|
||||
file_related_id = related_id
|
||||
|
||||
return File(
|
||||
id=str(uuid4()), # Generate new UUID for File.id
|
||||
tenant_id=tenant_id,
|
||||
type=FileType.DOCUMENT,
|
||||
transfer_method=transfer_method,
|
||||
related_id=file_related_id,
|
||||
remote_url=remote_url,
|
||||
filename="test_file.txt",
|
||||
extension=".txt",
|
||||
mime_type="text/plain",
|
||||
size=1024,
|
||||
storage_key="initial_key",
|
||||
)
|
||||
|
||||
def test_load_storage_keys_local_file(self):
|
||||
"""Test loading storage keys for LOCAL_FILE transfer method."""
|
||||
# Create test data
|
||||
upload_file = self._create_upload_file()
|
||||
file = self._create_file(related_id=upload_file.id, transfer_method=FileTransferMethod.LOCAL_FILE)
|
||||
|
||||
# Load storage keys
|
||||
self.loader.load_storage_keys([file])
|
||||
|
||||
# Verify storage key was loaded correctly
|
||||
assert file._storage_key == upload_file.key
|
||||
|
||||
def test_load_storage_keys_remote_url(self):
|
||||
"""Test loading storage keys for REMOTE_URL transfer method."""
|
||||
# Create test data
|
||||
upload_file = self._create_upload_file()
|
||||
file = self._create_file(related_id=upload_file.id, transfer_method=FileTransferMethod.REMOTE_URL)
|
||||
|
||||
# Load storage keys
|
||||
self.loader.load_storage_keys([file])
|
||||
|
||||
# Verify storage key was loaded correctly
|
||||
assert file._storage_key == upload_file.key
|
||||
|
||||
def test_load_storage_keys_tool_file(self):
|
||||
"""Test loading storage keys for TOOL_FILE transfer method."""
|
||||
# Create test data
|
||||
tool_file = self._create_tool_file()
|
||||
file = self._create_file(related_id=tool_file.id, transfer_method=FileTransferMethod.TOOL_FILE)
|
||||
|
||||
# Load storage keys
|
||||
self.loader.load_storage_keys([file])
|
||||
|
||||
# Verify storage key was loaded correctly
|
||||
assert file._storage_key == tool_file.file_key
|
||||
|
||||
def test_load_storage_keys_mixed_methods(self):
|
||||
"""Test batch loading with mixed transfer methods."""
|
||||
# Create test data for different transfer methods
|
||||
upload_file1 = self._create_upload_file()
|
||||
upload_file2 = self._create_upload_file()
|
||||
tool_file = self._create_tool_file()
|
||||
|
||||
file1 = self._create_file(related_id=upload_file1.id, transfer_method=FileTransferMethod.LOCAL_FILE)
|
||||
file2 = self._create_file(related_id=upload_file2.id, transfer_method=FileTransferMethod.REMOTE_URL)
|
||||
file3 = self._create_file(related_id=tool_file.id, transfer_method=FileTransferMethod.TOOL_FILE)
|
||||
|
||||
files = [file1, file2, file3]
|
||||
|
||||
# Load storage keys
|
||||
self.loader.load_storage_keys(files)
|
||||
|
||||
# Verify all storage keys were loaded correctly
|
||||
assert file1._storage_key == upload_file1.key
|
||||
assert file2._storage_key == upload_file2.key
|
||||
assert file3._storage_key == tool_file.file_key
|
||||
|
||||
def test_load_storage_keys_empty_list(self):
|
||||
"""Test with empty file list."""
|
||||
# Should not raise any exceptions
|
||||
self.loader.load_storage_keys([])
|
||||
|
||||
def test_load_storage_keys_ignores_legacy_file_tenant_id(self):
|
||||
"""Legacy file tenant_id should not override the loader tenant scope."""
|
||||
upload_file = self._create_upload_file()
|
||||
file = self._create_file(
|
||||
related_id=upload_file.id, transfer_method=FileTransferMethod.LOCAL_FILE, tenant_id=str(uuid4())
|
||||
)
|
||||
|
||||
self.loader.load_storage_keys([file])
|
||||
|
||||
assert file._storage_key == upload_file.key
|
||||
|
||||
def test_load_storage_keys_missing_file_id(self):
|
||||
"""Test with None file.related_id."""
|
||||
# Create a file with valid parameters first, then manually set related_id to None
|
||||
file = self._create_file(related_id=str(uuid4()), transfer_method=FileTransferMethod.LOCAL_FILE)
|
||||
file.related_id = None
|
||||
|
||||
# Should raise ValueError for None file related_id
|
||||
with pytest.raises(ValueError) as context:
|
||||
self.loader.load_storage_keys([file])
|
||||
|
||||
assert str(context.value) == "file id should not be None."
|
||||
|
||||
def test_load_storage_keys_nonexistent_upload_file_records(self):
|
||||
"""Test with missing UploadFile database records."""
|
||||
# Create file with non-existent upload file id
|
||||
non_existent_id = str(uuid4())
|
||||
file = self._create_file(related_id=non_existent_id, transfer_method=FileTransferMethod.LOCAL_FILE)
|
||||
|
||||
# Should raise ValueError for missing record
|
||||
with pytest.raises(ValueError):
|
||||
self.loader.load_storage_keys([file])
|
||||
|
||||
def test_load_storage_keys_nonexistent_tool_file_records(self):
|
||||
"""Test with missing ToolFile database records."""
|
||||
# Create file with non-existent tool file id
|
||||
non_existent_id = str(uuid4())
|
||||
file = self._create_file(related_id=non_existent_id, transfer_method=FileTransferMethod.TOOL_FILE)
|
||||
|
||||
# Should raise ValueError for missing record
|
||||
with pytest.raises(ValueError):
|
||||
self.loader.load_storage_keys([file])
|
||||
|
||||
def test_load_storage_keys_invalid_uuid(self):
|
||||
"""Test with invalid UUID format."""
|
||||
# Create a file with valid parameters first, then manually set invalid related_id
|
||||
file = self._create_file(related_id=str(uuid4()), transfer_method=FileTransferMethod.LOCAL_FILE)
|
||||
file.related_id = "invalid-uuid-format"
|
||||
|
||||
# Should raise ValueError for invalid UUID
|
||||
with pytest.raises(ValueError):
|
||||
self.loader.load_storage_keys([file])
|
||||
|
||||
def test_load_storage_keys_batch_efficiency(self):
|
||||
"""Test batched operations use efficient queries."""
|
||||
# Create multiple files of different types
|
||||
upload_files = [self._create_upload_file() for _ in range(3)]
|
||||
tool_files = [self._create_tool_file() for _ in range(2)]
|
||||
|
||||
files = []
|
||||
files.extend(
|
||||
[self._create_file(related_id=uf.id, transfer_method=FileTransferMethod.LOCAL_FILE) for uf in upload_files]
|
||||
)
|
||||
files.extend(
|
||||
[self._create_file(related_id=tf.id, transfer_method=FileTransferMethod.TOOL_FILE) for tf in tool_files]
|
||||
)
|
||||
|
||||
# Mock the session to count queries
|
||||
with patch.object(self.session, "scalars", wraps=self.session.scalars) as mock_scalars:
|
||||
self.loader.load_storage_keys(files)
|
||||
|
||||
# Should make exactly 2 queries (one for upload_files, one for tool_files)
|
||||
assert mock_scalars.call_count == 2
|
||||
|
||||
# Verify all storage keys were loaded correctly
|
||||
for i, file in enumerate(files[:3]):
|
||||
assert file._storage_key == upload_files[i].key
|
||||
for i, file in enumerate(files[3:]):
|
||||
assert file._storage_key == tool_files[i].file_key
|
||||
|
||||
def test_load_storage_keys_tenant_isolation(self):
|
||||
"""Test that tenant isolation works correctly."""
|
||||
# Create files for different tenants
|
||||
other_tenant_id = str(uuid4())
|
||||
|
||||
# Create upload file for current tenant
|
||||
upload_file_current = self._create_upload_file()
|
||||
file_current = self._create_file(
|
||||
related_id=upload_file_current.id, transfer_method=FileTransferMethod.LOCAL_FILE
|
||||
)
|
||||
|
||||
# Create upload file for other tenant (but don't add to cleanup list)
|
||||
upload_file_other = UploadFile(
|
||||
tenant_id=other_tenant_id,
|
||||
storage_type=StorageType.LOCAL,
|
||||
key="other_tenant_key",
|
||||
name="other_file.txt",
|
||||
size=1024,
|
||||
extension=".txt",
|
||||
mime_type="text/plain",
|
||||
created_by_role=CreatorUserRole.ACCOUNT,
|
||||
created_by=self.user_id,
|
||||
created_at=datetime.now(UTC),
|
||||
used=False,
|
||||
)
|
||||
upload_file_other.id = str(uuid4())
|
||||
self.session.add(upload_file_other)
|
||||
self.session.flush()
|
||||
|
||||
# Create file for other tenant but try to load with current tenant's loader
|
||||
file_other = self._create_file(
|
||||
related_id=upload_file_other.id, transfer_method=FileTransferMethod.LOCAL_FILE, tenant_id=other_tenant_id
|
||||
)
|
||||
|
||||
# Should raise ValueError due to tenant mismatch
|
||||
with pytest.raises(ValueError) as context:
|
||||
self.loader.load_storage_keys([file_other])
|
||||
|
||||
assert "Upload file not found for id:" in str(context.value)
|
||||
|
||||
# Current tenant's file should still work
|
||||
self.loader.load_storage_keys([file_current])
|
||||
assert file_current._storage_key == upload_file_current.key
|
||||
|
||||
def test_load_storage_keys_mixed_tenant_batch(self):
|
||||
"""Test batch with mixed tenant files (should fail on first mismatch)."""
|
||||
# Create files for current tenant
|
||||
upload_file_current = self._create_upload_file()
|
||||
file_current = self._create_file(
|
||||
related_id=upload_file_current.id, transfer_method=FileTransferMethod.LOCAL_FILE
|
||||
)
|
||||
|
||||
# Create file for different tenant
|
||||
other_tenant_id = str(uuid4())
|
||||
file_other = self._create_file(
|
||||
related_id=str(uuid4()), transfer_method=FileTransferMethod.LOCAL_FILE, tenant_id=other_tenant_id
|
||||
)
|
||||
|
||||
# Should raise ValueError on tenant mismatch
|
||||
with pytest.raises(ValueError) as context:
|
||||
self.loader.load_storage_keys([file_current, file_other])
|
||||
|
||||
assert "Upload file not found for id:" in str(context.value)
|
||||
|
||||
def test_load_storage_keys_duplicate_file_ids(self):
|
||||
"""Test handling of duplicate file IDs in the batch."""
|
||||
# Create upload file
|
||||
upload_file = self._create_upload_file()
|
||||
|
||||
# Create two File objects with same related_id
|
||||
file1 = self._create_file(related_id=upload_file.id, transfer_method=FileTransferMethod.LOCAL_FILE)
|
||||
file2 = self._create_file(related_id=upload_file.id, transfer_method=FileTransferMethod.LOCAL_FILE)
|
||||
|
||||
# Should handle duplicates gracefully
|
||||
self.loader.load_storage_keys([file1, file2])
|
||||
|
||||
# Both files should have the same storage key
|
||||
assert file1._storage_key == upload_file.key
|
||||
assert file2._storage_key == upload_file.key
|
||||
|
||||
def test_load_storage_keys_session_isolation(self):
|
||||
"""Test that the loader uses the provided session correctly."""
|
||||
# Create test data
|
||||
upload_file = self._create_upload_file()
|
||||
file = self._create_file(related_id=upload_file.id, transfer_method=FileTransferMethod.LOCAL_FILE)
|
||||
|
||||
# Create loader with different session (same underlying connection)
|
||||
|
||||
with Session(bind=db.engine) as other_session:
|
||||
other_loader = StorageKeyLoader(
|
||||
other_session,
|
||||
self.tenant_id,
|
||||
access_controller=DatabaseFileAccessController(),
|
||||
)
|
||||
with pytest.raises(ValueError):
|
||||
other_loader.load_storage_keys([file])
|
||||
@ -1,95 +0,0 @@
|
||||
import base64
|
||||
|
||||
from core.helper.code_executor.code_executor import CodeExecutor, CodeLanguage
|
||||
from core.helper.code_executor.jinja2.jinja2_transformer import Jinja2TemplateTransformer
|
||||
|
||||
CODE_LANGUAGE = CodeLanguage.JINJA2
|
||||
|
||||
|
||||
def test_jinja2():
|
||||
"""Test basic Jinja2 template rendering."""
|
||||
template = "Hello {{template}}"
|
||||
# Template must be base64 encoded to match the new safe embedding approach
|
||||
template_b64 = base64.b64encode(template.encode("utf-8")).decode("utf-8")
|
||||
inputs = base64.b64encode(b'{"template": "World"}').decode("utf-8")
|
||||
code = (
|
||||
Jinja2TemplateTransformer.get_runner_script()
|
||||
.replace(Jinja2TemplateTransformer._template_b64_placeholder, template_b64)
|
||||
.replace(Jinja2TemplateTransformer._inputs_placeholder, inputs)
|
||||
)
|
||||
result = CodeExecutor.execute_code(
|
||||
language=CODE_LANGUAGE, preload=Jinja2TemplateTransformer.get_preload_script(), code=code
|
||||
)
|
||||
assert result == "<<RESULT>>Hello World<<RESULT>>\n"
|
||||
|
||||
|
||||
def test_jinja2_with_code_template():
|
||||
"""Test template rendering via the high-level workflow API."""
|
||||
result = CodeExecutor.execute_workflow_code_template(
|
||||
language=CODE_LANGUAGE, code="Hello {{template}}", inputs={"template": "World"}
|
||||
)
|
||||
assert result == {"result": "Hello World"}
|
||||
|
||||
|
||||
def test_jinja2_get_runner_script():
|
||||
"""Test that runner script contains required placeholders."""
|
||||
runner_script = Jinja2TemplateTransformer.get_runner_script()
|
||||
assert runner_script.count(Jinja2TemplateTransformer._template_b64_placeholder) == 1
|
||||
assert runner_script.count(Jinja2TemplateTransformer._inputs_placeholder) == 1
|
||||
assert runner_script.count(Jinja2TemplateTransformer._result_tag) == 2
|
||||
|
||||
|
||||
def test_jinja2_template_with_special_characters():
|
||||
"""
|
||||
Test that templates with special characters (quotes, newlines) render correctly.
|
||||
This is a regression test for issue #26818 where textarea pre-fill values
|
||||
containing special characters would break template rendering.
|
||||
"""
|
||||
# Template with triple quotes, single quotes, double quotes, and newlines
|
||||
template = """<html>
|
||||
<body>
|
||||
<input value="{{ task.get('Task ID', '') }}"/>
|
||||
<textarea>{{ task.get('Issues', 'No issues reported') }}</textarea>
|
||||
<p>Status: "{{ status }}"</p>
|
||||
<pre>'''code block'''</pre>
|
||||
</body>
|
||||
</html>"""
|
||||
inputs = {"task": {"Task ID": "TASK-123", "Issues": "Line 1\nLine 2\nLine 3"}, "status": "completed"}
|
||||
|
||||
result = CodeExecutor.execute_workflow_code_template(language=CODE_LANGUAGE, code=template, inputs=inputs)
|
||||
|
||||
# Verify the template rendered correctly with all special characters
|
||||
output = result["result"]
|
||||
assert 'value="TASK-123"' in output
|
||||
assert "<textarea>Line 1\nLine 2\nLine 3</textarea>" in output
|
||||
assert 'Status: "completed"' in output
|
||||
assert "'''code block'''" in output
|
||||
|
||||
|
||||
def test_jinja2_template_with_html_textarea_prefill():
|
||||
"""
|
||||
Specific test for HTML textarea with Jinja2 variable pre-fill.
|
||||
Verifies fix for issue #26818.
|
||||
"""
|
||||
template = "<textarea name='notes'>{{ notes }}</textarea>"
|
||||
notes_content = "This is a multi-line note.\nWith special chars: 'single' and \"double\" quotes."
|
||||
inputs = {"notes": notes_content}
|
||||
|
||||
result = CodeExecutor.execute_workflow_code_template(language=CODE_LANGUAGE, code=template, inputs=inputs)
|
||||
|
||||
expected_output = f"<textarea name='notes'>{notes_content}</textarea>"
|
||||
assert result["result"] == expected_output
|
||||
|
||||
|
||||
def test_jinja2_assemble_runner_script_encodes_template():
|
||||
"""Test that assemble_runner_script properly base64 encodes the template."""
|
||||
template = "Hello {{ name }}!"
|
||||
inputs = {"name": "World"}
|
||||
|
||||
script = Jinja2TemplateTransformer.assemble_runner_script(template, inputs)
|
||||
|
||||
# The template should be base64 encoded in the script
|
||||
template_b64 = base64.b64encode(template.encode("utf-8")).decode("utf-8")
|
||||
assert template_b64 in script
|
||||
# The raw template should NOT appear in the script (it's encoded)
|
||||
assert "Hello {{ name }}!" not in script
|
||||
@ -369,7 +369,7 @@ def _create_app_with_containers() -> Flask:
|
||||
|
||||
# Create and configure the Flask application
|
||||
logger.info("Initializing Flask application...")
|
||||
app = create_app()
|
||||
sio_app, app = create_app()
|
||||
logger.info("Flask application created successfully")
|
||||
|
||||
# Initialize database schema
|
||||
|
||||
@ -12,7 +12,7 @@ from constants import HEADER_NAME_CSRF_TOKEN
|
||||
from libs.datetime_utils import naive_utc_now
|
||||
from libs.token import _real_cookie_name, generate_csrf_token
|
||||
from models import Account, DifySetup, Tenant, TenantAccountJoin
|
||||
from models.account import AccountStatus, TenantAccountRole
|
||||
from models.account import AccountStatus, TenantAccountRole, TenantStatus
|
||||
from models.enums import ConversationFromSource, CreatorUserRole
|
||||
from models.model import App, AppMode, Conversation, Message
|
||||
from models.workflow import WorkflowRun
|
||||
@ -30,7 +30,7 @@ def _create_account_and_tenant(db_session: Session) -> tuple[Account, Tenant]:
|
||||
db_session.add(account)
|
||||
db_session.commit()
|
||||
|
||||
tenant = Tenant(name="Test Tenant", status="normal")
|
||||
tenant = Tenant(name="Test Tenant", status=TenantStatus.NORMAL)
|
||||
db_session.add(tenant)
|
||||
db_session.commit()
|
||||
|
||||
|
||||
@ -11,7 +11,7 @@ from constants import HEADER_NAME_CSRF_TOKEN
|
||||
from libs.datetime_utils import naive_utc_now
|
||||
from libs.token import _real_cookie_name, generate_csrf_token
|
||||
from models import Account, DifySetup, Tenant, TenantAccountJoin
|
||||
from models.account import AccountStatus, TenantAccountRole
|
||||
from models.account import AccountStatus, TenantAccountRole, TenantStatus
|
||||
from models.model import App, AppMode
|
||||
from services.account_service import AccountService
|
||||
|
||||
@ -37,7 +37,7 @@ def create_console_account_and_tenant(db_session: Session) -> tuple[Account, Ten
|
||||
db_session.add(account)
|
||||
db_session.commit()
|
||||
|
||||
tenant = Tenant(name="Test Tenant", status="normal")
|
||||
tenant = Tenant(name="Test Tenant", status=TenantStatus.NORMAL)
|
||||
db_session.add(tenant)
|
||||
db_session.commit()
|
||||
|
||||
|
||||
@ -88,11 +88,11 @@ class TestPauseStatePersistenceLayerTestContainers:
|
||||
def setup_test_data(self, db_session_with_containers, file_service, workflow_run_service):
|
||||
"""Set up test data for each test method using TestContainers."""
|
||||
# Create test tenant and account
|
||||
from models.account import Tenant, TenantAccountJoin, TenantAccountRole
|
||||
from models.account import AccountStatus, Tenant, TenantAccountJoin, TenantAccountRole, TenantStatus
|
||||
|
||||
tenant = Tenant(
|
||||
name="Test Tenant",
|
||||
status="normal",
|
||||
status=TenantStatus.NORMAL,
|
||||
)
|
||||
db_session_with_containers.add(tenant)
|
||||
db_session_with_containers.commit()
|
||||
@ -101,7 +101,7 @@ class TestPauseStatePersistenceLayerTestContainers:
|
||||
email="test@example.com",
|
||||
name="Test User",
|
||||
interface_language="en-US",
|
||||
status="active",
|
||||
status=AccountStatus.ACTIVE,
|
||||
)
|
||||
db_session_with_containers.add(account)
|
||||
db_session_with_containers.commit()
|
||||
|
||||
@ -18,7 +18,14 @@ from core.workflow.human_input_compat import (
|
||||
MemberRecipient,
|
||||
WebAppDeliveryMethod,
|
||||
)
|
||||
from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole
|
||||
from models.account import (
|
||||
Account,
|
||||
AccountStatus,
|
||||
Tenant,
|
||||
TenantAccountJoin,
|
||||
TenantAccountRole,
|
||||
TenantStatus,
|
||||
)
|
||||
from models.human_input import (
|
||||
EmailExternalRecipientPayload,
|
||||
EmailMemberRecipientPayload,
|
||||
@ -29,7 +36,7 @@ from models.human_input import (
|
||||
|
||||
|
||||
def _create_tenant_with_members(session: Session, member_emails: list[str]) -> tuple[Tenant, list[Account]]:
|
||||
tenant = Tenant(name="Test Tenant", status="normal")
|
||||
tenant = Tenant(name="Test Tenant", status=TenantStatus.NORMAL)
|
||||
session.add(tenant)
|
||||
session.flush()
|
||||
|
||||
@ -39,7 +46,7 @@ def _create_tenant_with_members(session: Session, member_emails: list[str]) -> t
|
||||
email=email,
|
||||
name=f"Member {index}",
|
||||
interface_language="en-US",
|
||||
status="active",
|
||||
status=AccountStatus.ACTIVE,
|
||||
)
|
||||
session.add(account)
|
||||
session.flush()
|
||||
|
||||
@ -29,7 +29,7 @@ from core.workflow.node_runtime import DifyHumanInputNodeRuntime
|
||||
from core.workflow.system_variables import build_system_variables
|
||||
from libs.datetime_utils import naive_utc_now
|
||||
from models import Account
|
||||
from models.account import Tenant, TenantAccountJoin, TenantAccountRole
|
||||
from models.account import AccountStatus, Tenant, TenantAccountJoin, TenantAccountRole, TenantStatus
|
||||
from models.enums import CreatorUserRole, WorkflowRunTriggeredFrom
|
||||
from models.model import App, AppMode, IconType
|
||||
from models.workflow import Workflow, WorkflowNodeExecutionModel, WorkflowNodeExecutionTriggeredFrom, WorkflowRun
|
||||
@ -175,7 +175,7 @@ class TestHumanInputResumeNodeExecutionIntegration:
|
||||
def setup_test_data(self, db_session_with_containers: Session):
|
||||
tenant = Tenant(
|
||||
name="Test Tenant",
|
||||
status="normal",
|
||||
status=TenantStatus.NORMAL,
|
||||
)
|
||||
db_session_with_containers.add(tenant)
|
||||
db_session_with_containers.commit()
|
||||
@ -184,7 +184,7 @@ class TestHumanInputResumeNodeExecutionIntegration:
|
||||
email="test@example.com",
|
||||
name="Test User",
|
||||
interface_language="en-US",
|
||||
status="active",
|
||||
status=AccountStatus.ACTIVE,
|
||||
)
|
||||
db_session_with_containers.add(account)
|
||||
db_session_with_containers.commit()
|
||||
|
||||
@ -9,7 +9,7 @@ from werkzeug.exceptions import Unauthorized
|
||||
|
||||
from configs import dify_config
|
||||
from controllers.console.error import AccountNotFound, NotAllowedCreateWorkspace
|
||||
from models import AccountStatus, TenantAccountJoin
|
||||
from models import AccountStatus, TenantAccountJoin, TenantStatus
|
||||
from services.account_service import AccountService, RegisterService, TenantService, TokenPair
|
||||
from services.errors.account import (
|
||||
AccountAlreadyInTenantError,
|
||||
@ -2851,7 +2851,7 @@ class TestRegisterService:
|
||||
interface_language="en-US",
|
||||
password=existing_pending_member_password,
|
||||
)
|
||||
existing_account.status = "pending"
|
||||
existing_account.status = AccountStatus.PENDING
|
||||
|
||||
db_session_with_containers.commit()
|
||||
|
||||
@ -2941,7 +2941,7 @@ class TestRegisterService:
|
||||
interface_language="en-US",
|
||||
password=already_in_tenant_password,
|
||||
)
|
||||
existing_account.status = "active"
|
||||
existing_account.status = AccountStatus.ACTIVE
|
||||
|
||||
db_session_with_containers.commit()
|
||||
|
||||
@ -3331,7 +3331,7 @@ class TestRegisterService:
|
||||
TenantService.create_tenant_member(tenant, account, role="normal")
|
||||
|
||||
# Change tenant status to non-normal
|
||||
tenant.status = "archive"
|
||||
tenant.status = TenantStatus.ARCHIVE
|
||||
|
||||
db_session_with_containers.commit()
|
||||
|
||||
|
||||
@ -7,7 +7,14 @@ from graphon.model_runtime.entities.model_entities import ModelType
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from core.rag.index_processor.constant.index_type import IndexTechniqueType
|
||||
from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole
|
||||
from models.account import (
|
||||
Account,
|
||||
AccountStatus,
|
||||
Tenant,
|
||||
TenantAccountJoin,
|
||||
TenantAccountRole,
|
||||
TenantStatus,
|
||||
)
|
||||
from models.dataset import Dataset, ExternalKnowledgeApis, ExternalKnowledgeBindings
|
||||
from models.enums import DataSourceType
|
||||
from services.dataset_service import DatasetService
|
||||
@ -26,12 +33,12 @@ class DatasetUpdateTestDataFactory:
|
||||
email=f"{uuid4()}@example.com",
|
||||
name=f"user-{uuid4()}",
|
||||
interface_language="en-US",
|
||||
status="active",
|
||||
status=AccountStatus.ACTIVE,
|
||||
)
|
||||
db_session_with_containers.add(account)
|
||||
db_session_with_containers.commit()
|
||||
|
||||
tenant = Tenant(name=f"tenant-{account.id}", status="normal")
|
||||
tenant = Tenant(name=f"tenant-{account.id}", status=TenantStatus.NORMAL)
|
||||
db_session_with_containers.add(tenant)
|
||||
db_session_with_containers.commit()
|
||||
|
||||
|
||||
@ -274,6 +274,7 @@ class TestFeatureService:
|
||||
mock_config.ENABLE_EMAIL_CODE_LOGIN = True
|
||||
mock_config.ENABLE_EMAIL_PASSWORD_LOGIN = True
|
||||
mock_config.ENABLE_SOCIAL_OAUTH_LOGIN = False
|
||||
mock_config.ENABLE_COLLABORATION_MODE = True
|
||||
mock_config.ALLOW_REGISTER = False
|
||||
mock_config.ALLOW_CREATE_WORKSPACE = False
|
||||
mock_config.MAIL_TYPE = "smtp"
|
||||
@ -298,6 +299,7 @@ class TestFeatureService:
|
||||
# Verify authentication settings
|
||||
assert result.enable_email_code_login is True
|
||||
assert result.enable_email_password_login is False
|
||||
assert result.enable_collaboration_mode is True
|
||||
assert result.is_allow_register is False
|
||||
assert result.is_allow_create_workspace is False
|
||||
|
||||
@ -401,6 +403,7 @@ class TestFeatureService:
|
||||
mock_config.ENABLE_EMAIL_CODE_LOGIN = True
|
||||
mock_config.ENABLE_EMAIL_PASSWORD_LOGIN = True
|
||||
mock_config.ENABLE_SOCIAL_OAUTH_LOGIN = False
|
||||
mock_config.ENABLE_COLLABORATION_MODE = False
|
||||
mock_config.ALLOW_REGISTER = True
|
||||
mock_config.ALLOW_CREATE_WORKSPACE = True
|
||||
mock_config.MAIL_TYPE = "smtp"
|
||||
@ -422,6 +425,7 @@ class TestFeatureService:
|
||||
assert result.enable_email_code_login is True
|
||||
assert result.enable_email_password_login is True
|
||||
assert result.enable_social_oauth_login is False
|
||||
assert result.enable_collaboration_mode is False
|
||||
assert result.is_allow_register is True
|
||||
assert result.is_allow_create_workspace is True
|
||||
assert result.is_email_setup is True
|
||||
|
||||
@ -0,0 +1,507 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from types import SimpleNamespace
|
||||
from unittest.mock import MagicMock, patch
|
||||
from uuid import uuid4
|
||||
|
||||
import pytest
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from core.trigger.constants import TRIGGER_WEBHOOK_NODE_TYPE
|
||||
from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole
|
||||
from models.enums import AppTriggerStatus, AppTriggerType
|
||||
from models.model import App
|
||||
from models.trigger import AppTrigger, WorkflowWebhookTrigger
|
||||
from models.workflow import Workflow
|
||||
from services.errors.app import QuotaExceededError
|
||||
from services.trigger.webhook_service import WebhookService
|
||||
|
||||
|
||||
class WebhookServiceRelationshipFactory:
|
||||
@staticmethod
|
||||
def create_account_and_tenant(db_session_with_containers: Session) -> tuple[Account, Tenant]:
|
||||
account = Account(
|
||||
name=f"Account {uuid4()}",
|
||||
email=f"webhook-{uuid4()}@example.com",
|
||||
password="hashed-password",
|
||||
password_salt="salt",
|
||||
interface_language="en-US",
|
||||
timezone="UTC",
|
||||
)
|
||||
db_session_with_containers.add(account)
|
||||
db_session_with_containers.commit()
|
||||
|
||||
tenant = Tenant(name=f"Tenant {uuid4()}", plan="basic", status="normal")
|
||||
db_session_with_containers.add(tenant)
|
||||
db_session_with_containers.commit()
|
||||
|
||||
join = TenantAccountJoin(
|
||||
tenant_id=tenant.id,
|
||||
account_id=account.id,
|
||||
role=TenantAccountRole.OWNER,
|
||||
current=True,
|
||||
)
|
||||
db_session_with_containers.add(join)
|
||||
db_session_with_containers.commit()
|
||||
|
||||
account.current_tenant = tenant
|
||||
return account, tenant
|
||||
|
||||
@staticmethod
|
||||
def create_app(db_session_with_containers: Session, tenant: Tenant, account: Account) -> App:
|
||||
app = App(
|
||||
tenant_id=tenant.id,
|
||||
name=f"Webhook App {uuid4()}",
|
||||
description="",
|
||||
mode="workflow",
|
||||
icon_type="emoji",
|
||||
icon="bot",
|
||||
icon_background="#FFFFFF",
|
||||
enable_site=False,
|
||||
enable_api=True,
|
||||
api_rpm=100,
|
||||
api_rph=100,
|
||||
is_demo=False,
|
||||
is_public=False,
|
||||
is_universal=False,
|
||||
created_by=account.id,
|
||||
updated_by=account.id,
|
||||
)
|
||||
db_session_with_containers.add(app)
|
||||
db_session_with_containers.commit()
|
||||
return app
|
||||
|
||||
@staticmethod
|
||||
def create_workflow(
|
||||
db_session_with_containers: Session,
|
||||
*,
|
||||
app: App,
|
||||
account: Account,
|
||||
node_ids: list[str],
|
||||
version: str,
|
||||
) -> Workflow:
|
||||
graph = {
|
||||
"nodes": [
|
||||
{
|
||||
"id": node_id,
|
||||
"data": {
|
||||
"type": TRIGGER_WEBHOOK_NODE_TYPE,
|
||||
"title": f"Webhook {node_id}",
|
||||
"method": "post",
|
||||
"content_type": "application/json",
|
||||
"headers": [],
|
||||
"params": [],
|
||||
"body": [],
|
||||
"status_code": 200,
|
||||
"response_body": '{"status": "ok"}',
|
||||
"timeout": 30,
|
||||
},
|
||||
}
|
||||
for node_id in node_ids
|
||||
],
|
||||
"edges": [],
|
||||
}
|
||||
|
||||
workflow = Workflow(
|
||||
tenant_id=app.tenant_id,
|
||||
app_id=app.id,
|
||||
type="workflow",
|
||||
graph=json.dumps(graph),
|
||||
features=json.dumps({}),
|
||||
created_by=account.id,
|
||||
updated_by=account.id,
|
||||
environment_variables=[],
|
||||
conversation_variables=[],
|
||||
version=version,
|
||||
)
|
||||
db_session_with_containers.add(workflow)
|
||||
db_session_with_containers.commit()
|
||||
return workflow
|
||||
|
||||
@staticmethod
|
||||
def create_webhook_trigger(
|
||||
db_session_with_containers: Session,
|
||||
*,
|
||||
app: App,
|
||||
account: Account,
|
||||
node_id: str,
|
||||
webhook_id: str | None = None,
|
||||
) -> WorkflowWebhookTrigger:
|
||||
webhook_trigger = WorkflowWebhookTrigger(
|
||||
app_id=app.id,
|
||||
node_id=node_id,
|
||||
tenant_id=app.tenant_id,
|
||||
webhook_id=webhook_id or uuid4().hex[:24],
|
||||
created_by=account.id,
|
||||
)
|
||||
db_session_with_containers.add(webhook_trigger)
|
||||
db_session_with_containers.commit()
|
||||
return webhook_trigger
|
||||
|
||||
@staticmethod
|
||||
def create_app_trigger(
|
||||
db_session_with_containers: Session,
|
||||
*,
|
||||
app: App,
|
||||
node_id: str,
|
||||
status: AppTriggerStatus,
|
||||
) -> AppTrigger:
|
||||
app_trigger = AppTrigger(
|
||||
tenant_id=app.tenant_id,
|
||||
app_id=app.id,
|
||||
node_id=node_id,
|
||||
trigger_type=AppTriggerType.TRIGGER_WEBHOOK,
|
||||
provider_name="webhook",
|
||||
title=f"Webhook {node_id}",
|
||||
status=status,
|
||||
)
|
||||
db_session_with_containers.add(app_trigger)
|
||||
db_session_with_containers.commit()
|
||||
return app_trigger
|
||||
|
||||
|
||||
class TestWebhookServiceLookupWithContainers:
|
||||
def test_get_webhook_trigger_and_workflow_raises_when_app_trigger_missing(
|
||||
self, db_session_with_containers: Session, flask_app_with_containers
|
||||
):
|
||||
del flask_app_with_containers
|
||||
factory = WebhookServiceRelationshipFactory
|
||||
account, tenant = factory.create_account_and_tenant(db_session_with_containers)
|
||||
app = factory.create_app(db_session_with_containers, tenant, account)
|
||||
factory.create_workflow(
|
||||
db_session_with_containers, app=app, account=account, node_ids=["node-1"], version="2026-04-14.001"
|
||||
)
|
||||
webhook_trigger = factory.create_webhook_trigger(
|
||||
db_session_with_containers, app=app, account=account, node_id="node-1"
|
||||
)
|
||||
|
||||
with pytest.raises(ValueError, match="App trigger not found"):
|
||||
WebhookService.get_webhook_trigger_and_workflow(webhook_trigger.webhook_id)
|
||||
|
||||
def test_get_webhook_trigger_and_workflow_raises_when_app_trigger_rate_limited(
|
||||
self, db_session_with_containers: Session, flask_app_with_containers
|
||||
):
|
||||
del flask_app_with_containers
|
||||
factory = WebhookServiceRelationshipFactory
|
||||
account, tenant = factory.create_account_and_tenant(db_session_with_containers)
|
||||
app = factory.create_app(db_session_with_containers, tenant, account)
|
||||
factory.create_workflow(
|
||||
db_session_with_containers, app=app, account=account, node_ids=["node-1"], version="2026-04-14.001"
|
||||
)
|
||||
webhook_trigger = factory.create_webhook_trigger(
|
||||
db_session_with_containers, app=app, account=account, node_id="node-1"
|
||||
)
|
||||
factory.create_app_trigger(
|
||||
db_session_with_containers, app=app, node_id="node-1", status=AppTriggerStatus.RATE_LIMITED
|
||||
)
|
||||
|
||||
with pytest.raises(ValueError, match="rate limited"):
|
||||
WebhookService.get_webhook_trigger_and_workflow(webhook_trigger.webhook_id)
|
||||
|
||||
def test_get_webhook_trigger_and_workflow_raises_when_app_trigger_disabled(
|
||||
self, db_session_with_containers: Session, flask_app_with_containers
|
||||
):
|
||||
del flask_app_with_containers
|
||||
factory = WebhookServiceRelationshipFactory
|
||||
account, tenant = factory.create_account_and_tenant(db_session_with_containers)
|
||||
app = factory.create_app(db_session_with_containers, tenant, account)
|
||||
factory.create_workflow(
|
||||
db_session_with_containers, app=app, account=account, node_ids=["node-1"], version="2026-04-14.001"
|
||||
)
|
||||
webhook_trigger = factory.create_webhook_trigger(
|
||||
db_session_with_containers, app=app, account=account, node_id="node-1"
|
||||
)
|
||||
factory.create_app_trigger(
|
||||
db_session_with_containers, app=app, node_id="node-1", status=AppTriggerStatus.DISABLED
|
||||
)
|
||||
|
||||
with pytest.raises(ValueError, match="disabled"):
|
||||
WebhookService.get_webhook_trigger_and_workflow(webhook_trigger.webhook_id)
|
||||
|
||||
def test_get_webhook_trigger_and_workflow_raises_when_workflow_missing(
|
||||
self, db_session_with_containers: Session, flask_app_with_containers
|
||||
):
|
||||
del flask_app_with_containers
|
||||
factory = WebhookServiceRelationshipFactory
|
||||
account, tenant = factory.create_account_and_tenant(db_session_with_containers)
|
||||
app = factory.create_app(db_session_with_containers, tenant, account)
|
||||
webhook_trigger = factory.create_webhook_trigger(
|
||||
db_session_with_containers, app=app, account=account, node_id="node-1"
|
||||
)
|
||||
factory.create_app_trigger(
|
||||
db_session_with_containers, app=app, node_id="node-1", status=AppTriggerStatus.ENABLED
|
||||
)
|
||||
|
||||
with pytest.raises(ValueError, match="Workflow not found"):
|
||||
WebhookService.get_webhook_trigger_and_workflow(webhook_trigger.webhook_id)
|
||||
|
||||
def test_get_webhook_trigger_and_workflow_returns_debug_draft_workflow(
|
||||
self, db_session_with_containers: Session, flask_app_with_containers
|
||||
):
|
||||
del flask_app_with_containers
|
||||
factory = WebhookServiceRelationshipFactory
|
||||
account, tenant = factory.create_account_and_tenant(db_session_with_containers)
|
||||
app = factory.create_app(db_session_with_containers, tenant, account)
|
||||
factory.create_workflow(
|
||||
db_session_with_containers,
|
||||
app=app,
|
||||
account=account,
|
||||
node_ids=["published-node"],
|
||||
version="2026-04-14.001",
|
||||
)
|
||||
draft_workflow = factory.create_workflow(
|
||||
db_session_with_containers,
|
||||
app=app,
|
||||
account=account,
|
||||
node_ids=["debug-node"],
|
||||
version=Workflow.VERSION_DRAFT,
|
||||
)
|
||||
webhook_trigger = factory.create_webhook_trigger(
|
||||
db_session_with_containers, app=app, account=account, node_id="debug-node"
|
||||
)
|
||||
|
||||
got_trigger, got_workflow, got_node_config = WebhookService.get_webhook_trigger_and_workflow(
|
||||
webhook_trigger.webhook_id,
|
||||
is_debug=True,
|
||||
)
|
||||
|
||||
assert got_trigger.id == webhook_trigger.id
|
||||
assert got_workflow.id == draft_workflow.id
|
||||
assert got_node_config["id"] == "debug-node"
|
||||
|
||||
|
||||
class TestWebhookServiceTriggerExecutionWithContainers:
|
||||
def test_trigger_workflow_execution_triggers_async_workflow_successfully(
|
||||
self, db_session_with_containers: Session, flask_app_with_containers
|
||||
):
|
||||
del flask_app_with_containers
|
||||
factory = WebhookServiceRelationshipFactory
|
||||
account, tenant = factory.create_account_and_tenant(db_session_with_containers)
|
||||
app = factory.create_app(db_session_with_containers, tenant, account)
|
||||
workflow = factory.create_workflow(
|
||||
db_session_with_containers, app=app, account=account, node_ids=["node-1"], version="2026-04-14.001"
|
||||
)
|
||||
webhook_trigger = factory.create_webhook_trigger(
|
||||
db_session_with_containers, app=app, account=account, node_id="node-1"
|
||||
)
|
||||
|
||||
end_user = SimpleNamespace(id=str(uuid4()))
|
||||
webhook_data = {"body": {"value": 1}, "headers": {}, "query_params": {}, "files": {}, "method": "POST"}
|
||||
|
||||
with (
|
||||
patch(
|
||||
"services.trigger.webhook_service.EndUserService.get_or_create_end_user_by_type",
|
||||
return_value=end_user,
|
||||
),
|
||||
patch("services.trigger.webhook_service.QuotaType.TRIGGER.consume") as mock_consume,
|
||||
patch("services.trigger.webhook_service.AsyncWorkflowService.trigger_workflow_async") as mock_trigger,
|
||||
):
|
||||
WebhookService.trigger_workflow_execution(webhook_trigger, webhook_data, workflow)
|
||||
|
||||
mock_consume.assert_called_once_with(webhook_trigger.tenant_id)
|
||||
mock_trigger.assert_called_once()
|
||||
trigger_args = mock_trigger.call_args.args
|
||||
assert trigger_args[1] is end_user
|
||||
assert trigger_args[2].workflow_id == workflow.id
|
||||
assert trigger_args[2].root_node_id == webhook_trigger.node_id
|
||||
|
||||
def test_trigger_workflow_execution_marks_tenant_rate_limited_when_quota_exceeded(
|
||||
self, db_session_with_containers: Session, flask_app_with_containers
|
||||
):
|
||||
del flask_app_with_containers
|
||||
factory = WebhookServiceRelationshipFactory
|
||||
account, tenant = factory.create_account_and_tenant(db_session_with_containers)
|
||||
app = factory.create_app(db_session_with_containers, tenant, account)
|
||||
workflow = factory.create_workflow(
|
||||
db_session_with_containers, app=app, account=account, node_ids=["node-1"], version="2026-04-14.001"
|
||||
)
|
||||
webhook_trigger = factory.create_webhook_trigger(
|
||||
db_session_with_containers, app=app, account=account, node_id="node-1"
|
||||
)
|
||||
|
||||
with (
|
||||
patch(
|
||||
"services.trigger.webhook_service.EndUserService.get_or_create_end_user_by_type",
|
||||
return_value=SimpleNamespace(id=str(uuid4())),
|
||||
),
|
||||
patch(
|
||||
"services.trigger.webhook_service.QuotaType.TRIGGER.consume",
|
||||
side_effect=QuotaExceededError(feature="trigger", tenant_id=tenant.id, required=1),
|
||||
),
|
||||
patch(
|
||||
"services.trigger.webhook_service.AppTriggerService.mark_tenant_triggers_rate_limited"
|
||||
) as mock_mark_rate_limited,
|
||||
):
|
||||
with pytest.raises(QuotaExceededError):
|
||||
WebhookService.trigger_workflow_execution(
|
||||
webhook_trigger,
|
||||
{"body": {}, "headers": {}, "query_params": {}, "files": {}, "method": "POST"},
|
||||
workflow,
|
||||
)
|
||||
|
||||
mock_mark_rate_limited.assert_called_once_with(tenant.id)
|
||||
|
||||
def test_trigger_workflow_execution_logs_and_reraises_unexpected_errors(
|
||||
self, db_session_with_containers: Session, flask_app_with_containers
|
||||
):
|
||||
del flask_app_with_containers
|
||||
factory = WebhookServiceRelationshipFactory
|
||||
account, tenant = factory.create_account_and_tenant(db_session_with_containers)
|
||||
app = factory.create_app(db_session_with_containers, tenant, account)
|
||||
workflow = factory.create_workflow(
|
||||
db_session_with_containers, app=app, account=account, node_ids=["node-1"], version="2026-04-14.001"
|
||||
)
|
||||
webhook_trigger = factory.create_webhook_trigger(
|
||||
db_session_with_containers, app=app, account=account, node_id="node-1"
|
||||
)
|
||||
|
||||
with (
|
||||
patch(
|
||||
"services.trigger.webhook_service.EndUserService.get_or_create_end_user_by_type",
|
||||
side_effect=RuntimeError("boom"),
|
||||
),
|
||||
patch("services.trigger.webhook_service.logger.exception") as mock_logger_exception,
|
||||
):
|
||||
with pytest.raises(RuntimeError, match="boom"):
|
||||
WebhookService.trigger_workflow_execution(
|
||||
webhook_trigger,
|
||||
{"body": {}, "headers": {}, "query_params": {}, "files": {}, "method": "POST"},
|
||||
workflow,
|
||||
)
|
||||
|
||||
mock_logger_exception.assert_called_once()
|
||||
|
||||
|
||||
class TestWebhookServiceRelationshipSyncWithContainers:
|
||||
def test_sync_webhook_relationships_raises_when_workflow_exceeds_node_limit(
|
||||
self, db_session_with_containers: Session, flask_app_with_containers
|
||||
):
|
||||
del flask_app_with_containers
|
||||
factory = WebhookServiceRelationshipFactory
|
||||
account, tenant = factory.create_account_and_tenant(db_session_with_containers)
|
||||
app = factory.create_app(db_session_with_containers, tenant, account)
|
||||
node_ids = [f"node-{index}" for index in range(WebhookService.MAX_WEBHOOK_NODES_PER_WORKFLOW + 1)]
|
||||
workflow = factory.create_workflow(
|
||||
db_session_with_containers, app=app, account=account, node_ids=node_ids, version=Workflow.VERSION_DRAFT
|
||||
)
|
||||
|
||||
with pytest.raises(ValueError, match="maximum webhook node limit"):
|
||||
WebhookService.sync_webhook_relationships(app, workflow)
|
||||
|
||||
def test_sync_webhook_relationships_raises_when_lock_not_acquired(
|
||||
self, db_session_with_containers: Session, flask_app_with_containers
|
||||
):
|
||||
del flask_app_with_containers
|
||||
factory = WebhookServiceRelationshipFactory
|
||||
account, tenant = factory.create_account_and_tenant(db_session_with_containers)
|
||||
app = factory.create_app(db_session_with_containers, tenant, account)
|
||||
workflow = factory.create_workflow(
|
||||
db_session_with_containers, app=app, account=account, node_ids=["node-1"], version=Workflow.VERSION_DRAFT
|
||||
)
|
||||
lock = MagicMock()
|
||||
lock.acquire.return_value = False
|
||||
|
||||
with patch("services.trigger.webhook_service.redis_client.lock", return_value=lock):
|
||||
with pytest.raises(RuntimeError, match="Failed to acquire lock"):
|
||||
WebhookService.sync_webhook_relationships(app, workflow)
|
||||
|
||||
def test_sync_webhook_relationships_creates_missing_records_and_deletes_stale_records(
|
||||
self, db_session_with_containers: Session, flask_app_with_containers
|
||||
):
|
||||
del flask_app_with_containers
|
||||
factory = WebhookServiceRelationshipFactory
|
||||
account, tenant = factory.create_account_and_tenant(db_session_with_containers)
|
||||
app = factory.create_app(db_session_with_containers, tenant, account)
|
||||
stale_trigger = factory.create_webhook_trigger(
|
||||
db_session_with_containers,
|
||||
app=app,
|
||||
account=account,
|
||||
node_id="node-stale",
|
||||
webhook_id="stale-webhook-id-000001",
|
||||
)
|
||||
stale_trigger_id = stale_trigger.id
|
||||
workflow = factory.create_workflow(
|
||||
db_session_with_containers,
|
||||
app=app,
|
||||
account=account,
|
||||
node_ids=["node-new"],
|
||||
version=Workflow.VERSION_DRAFT,
|
||||
)
|
||||
|
||||
with patch(
|
||||
"services.trigger.webhook_service.WebhookService.generate_webhook_id", return_value="new-webhook-id-000001"
|
||||
):
|
||||
WebhookService.sync_webhook_relationships(app, workflow)
|
||||
|
||||
db_session_with_containers.expire_all()
|
||||
records = db_session_with_containers.scalars(
|
||||
select(WorkflowWebhookTrigger).where(WorkflowWebhookTrigger.app_id == app.id)
|
||||
).all()
|
||||
|
||||
assert [record.node_id for record in records] == ["node-new"]
|
||||
assert records[0].webhook_id == "new-webhook-id-000001"
|
||||
assert db_session_with_containers.get(WorkflowWebhookTrigger, stale_trigger_id) is None
|
||||
|
||||
def test_sync_webhook_relationships_sets_redis_cache_for_new_record(
|
||||
self, db_session_with_containers: Session, flask_app_with_containers
|
||||
):
|
||||
del flask_app_with_containers
|
||||
factory = WebhookServiceRelationshipFactory
|
||||
account, tenant = factory.create_account_and_tenant(db_session_with_containers)
|
||||
app = factory.create_app(db_session_with_containers, tenant, account)
|
||||
workflow = factory.create_workflow(
|
||||
db_session_with_containers,
|
||||
app=app,
|
||||
account=account,
|
||||
node_ids=["node-cache"],
|
||||
version=Workflow.VERSION_DRAFT,
|
||||
)
|
||||
cache_key = f"{WebhookService.__WEBHOOK_NODE_CACHE_KEY__}:{app.id}:node-cache"
|
||||
|
||||
with patch(
|
||||
"services.trigger.webhook_service.WebhookService.generate_webhook_id", return_value="cache-webhook-id-00001"
|
||||
):
|
||||
WebhookService.sync_webhook_relationships(app, workflow)
|
||||
|
||||
cached_payload = WebhookServiceRelationshipFactory._read_cache(cache_key)
|
||||
assert cached_payload is not None
|
||||
assert cached_payload["node_id"] == "node-cache"
|
||||
assert cached_payload["webhook_id"] == "cache-webhook-id-00001"
|
||||
|
||||
def test_sync_webhook_relationships_logs_when_lock_release_fails(
|
||||
self, db_session_with_containers: Session, flask_app_with_containers
|
||||
):
|
||||
del flask_app_with_containers
|
||||
factory = WebhookServiceRelationshipFactory
|
||||
account, tenant = factory.create_account_and_tenant(db_session_with_containers)
|
||||
app = factory.create_app(db_session_with_containers, tenant, account)
|
||||
workflow = factory.create_workflow(
|
||||
db_session_with_containers, app=app, account=account, node_ids=[], version=Workflow.VERSION_DRAFT
|
||||
)
|
||||
lock = MagicMock()
|
||||
lock.acquire.return_value = True
|
||||
lock.release.side_effect = RuntimeError("release failed")
|
||||
|
||||
with (
|
||||
patch("services.trigger.webhook_service.redis_client.lock", return_value=lock),
|
||||
patch("services.trigger.webhook_service.logger.exception") as mock_logger_exception,
|
||||
):
|
||||
WebhookService.sync_webhook_relationships(app, workflow)
|
||||
|
||||
mock_logger_exception.assert_called_once()
|
||||
|
||||
|
||||
def _read_cache(cache_key: str) -> dict[str, str] | None:
|
||||
from extensions.ext_redis import redis_client
|
||||
|
||||
cached = redis_client.get(cache_key)
|
||||
if not cached:
|
||||
return None
|
||||
if isinstance(cached, bytes):
|
||||
cached = cached.decode("utf-8")
|
||||
return json.loads(cached)
|
||||
|
||||
|
||||
WebhookServiceRelationshipFactory._read_cache = staticmethod(_read_cache)
|
||||
@ -16,7 +16,7 @@ from sqlalchemy import delete, func, select, update
|
||||
|
||||
from core.indexing_runner import DocumentIsPausedError, IndexingRunner
|
||||
from core.rag.index_processor.constant.index_type import IndexStructureType, IndexTechniqueType
|
||||
from models import Account, Tenant, TenantAccountJoin, TenantAccountRole
|
||||
from models import Account, AccountStatus, Tenant, TenantAccountJoin, TenantAccountRole, TenantStatus
|
||||
from models.dataset import Dataset, Document, DocumentSegment
|
||||
from models.enums import DataSourceType, DocumentCreatedFrom, IndexingStatus, SegmentStatus
|
||||
from tasks.document_indexing_sync_task import document_indexing_sync_task
|
||||
@ -31,12 +31,12 @@ class DocumentIndexingSyncTaskTestDataFactory:
|
||||
email=f"{uuid4()}@example.com",
|
||||
name=f"user-{uuid4()}",
|
||||
interface_language="en-US",
|
||||
status="active",
|
||||
status=AccountStatus.ACTIVE,
|
||||
)
|
||||
db_session_with_containers.add(account)
|
||||
db_session_with_containers.flush()
|
||||
|
||||
tenant = Tenant(name=f"tenant-{account.id}", status="normal")
|
||||
tenant = Tenant(name=f"tenant-{account.id}", status=TenantStatus.NORMAL)
|
||||
db_session_with_containers.add(tenant)
|
||||
db_session_with_containers.flush()
|
||||
|
||||
|
||||
@ -33,7 +33,7 @@ from extensions.ext_storage import storage
|
||||
from libs.datetime_utils import naive_utc_now
|
||||
from models import Account
|
||||
from models import WorkflowPause as WorkflowPauseModel
|
||||
from models.account import Tenant, TenantAccountJoin, TenantAccountRole
|
||||
from models.account import AccountStatus, Tenant, TenantAccountJoin, TenantAccountRole, TenantStatus
|
||||
from models.model import UploadFile
|
||||
from models.workflow import Workflow, WorkflowRun
|
||||
from repositories.sqlalchemy_api_workflow_run_repository import (
|
||||
@ -181,7 +181,7 @@ class TestWorkflowPauseIntegration:
|
||||
|
||||
tenant = Tenant(
|
||||
name="Test Tenant",
|
||||
status="normal",
|
||||
status=TenantStatus.NORMAL,
|
||||
)
|
||||
db_session_with_containers.add(tenant)
|
||||
db_session_with_containers.commit()
|
||||
@ -190,7 +190,7 @@ class TestWorkflowPauseIntegration:
|
||||
email="test@example.com",
|
||||
name="Test User",
|
||||
interface_language="en-US",
|
||||
status="active",
|
||||
status=AccountStatus.ACTIVE,
|
||||
)
|
||||
db_session_with_containers.add(account)
|
||||
db_session_with_containers.commit()
|
||||
@ -696,7 +696,7 @@ class TestWorkflowPauseIntegration:
|
||||
|
||||
tenant2 = Tenant(
|
||||
name="Test Tenant 2",
|
||||
status="normal",
|
||||
status=TenantStatus.NORMAL,
|
||||
)
|
||||
self.session.add(tenant2)
|
||||
self.session.commit()
|
||||
@ -705,7 +705,7 @@ class TestWorkflowPauseIntegration:
|
||||
email="test2@example.com",
|
||||
name="Test User 2",
|
||||
interface_language="en-US",
|
||||
status="active",
|
||||
status=AccountStatus.ACTIVE,
|
||||
)
|
||||
self.session.add(account2)
|
||||
self.session.commit()
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from datetime import datetime
|
||||
from types import SimpleNamespace
|
||||
from unittest.mock import Mock
|
||||
@ -347,3 +348,87 @@ def test_advanced_chat_run_conversation_not_exists(app, monkeypatch: pytest.Monk
|
||||
):
|
||||
with pytest.raises(NotFound):
|
||||
handler(api, app_model=SimpleNamespace(id="app"))
|
||||
|
||||
|
||||
def test_workflow_online_users_filters_inaccessible_workflow(app, monkeypatch: pytest.MonkeyPatch) -> None:
|
||||
app_id_1 = "11111111-1111-1111-1111-111111111111"
|
||||
app_id_2 = "22222222-2222-2222-2222-222222222222"
|
||||
signed_avatar_url = "https://files.example.com/signed/avatar-1"
|
||||
sign_avatar = Mock(return_value=signed_avatar_url)
|
||||
monkeypatch.setattr(workflow_module, "current_account_with_tenant", lambda: (SimpleNamespace(), "tenant-1"))
|
||||
monkeypatch.setattr(
|
||||
workflow_module,
|
||||
"WorkflowService",
|
||||
lambda: SimpleNamespace(get_accessible_app_ids=lambda app_ids, tenant_id: {app_id_1}),
|
||||
)
|
||||
monkeypatch.setattr(workflow_module.file_helpers, "get_signed_file_url", sign_avatar)
|
||||
|
||||
workflow_module.redis_client.hgetall.side_effect = lambda key: (
|
||||
{
|
||||
b"sid-1": json.dumps(
|
||||
{
|
||||
"user_id": "u-1",
|
||||
"username": "Alice",
|
||||
"avatar": "avatar-file-id",
|
||||
"sid": "sid-1",
|
||||
}
|
||||
)
|
||||
}
|
||||
if key == f"{workflow_module.WORKFLOW_ONLINE_USERS_PREFIX}{app_id_1}"
|
||||
else {}
|
||||
)
|
||||
|
||||
api = workflow_module.WorkflowOnlineUsersApi()
|
||||
handler = _unwrap(api.get)
|
||||
|
||||
with app.test_request_context(
|
||||
f"/apps/workflows/online-users?app_ids={app_id_1},{app_id_2}",
|
||||
method="GET",
|
||||
):
|
||||
response = handler(api)
|
||||
|
||||
assert response == {
|
||||
"data": [
|
||||
{
|
||||
"app_id": app_id_1,
|
||||
"users": [
|
||||
{
|
||||
"user_id": "u-1",
|
||||
"username": "Alice",
|
||||
"avatar": signed_avatar_url,
|
||||
"sid": "sid-1",
|
||||
}
|
||||
],
|
||||
}
|
||||
]
|
||||
}
|
||||
workflow_module.redis_client.hgetall.assert_called_once_with(
|
||||
f"{workflow_module.WORKFLOW_ONLINE_USERS_PREFIX}{app_id_1}"
|
||||
)
|
||||
sign_avatar.assert_called_once_with("avatar-file-id")
|
||||
|
||||
|
||||
def test_workflow_online_users_rejects_excessive_workflow_ids(app, monkeypatch: pytest.MonkeyPatch) -> None:
|
||||
monkeypatch.setattr(workflow_module, "current_account_with_tenant", lambda: (SimpleNamespace(), "tenant-1"))
|
||||
accessible_app_ids = Mock(return_value=set())
|
||||
monkeypatch.setattr(
|
||||
workflow_module,
|
||||
"WorkflowService",
|
||||
lambda: SimpleNamespace(get_accessible_app_ids=accessible_app_ids),
|
||||
)
|
||||
|
||||
excessive_ids = ",".join(f"wf-{index}" for index in range(workflow_module.MAX_WORKFLOW_ONLINE_USERS_QUERY_IDS + 1))
|
||||
|
||||
api = workflow_module.WorkflowOnlineUsersApi()
|
||||
handler = _unwrap(api.get)
|
||||
|
||||
with app.test_request_context(
|
||||
f"/apps/workflows/online-users?app_ids={excessive_ids}",
|
||||
method="GET",
|
||||
):
|
||||
with pytest.raises(HTTPException) as exc:
|
||||
handler(api)
|
||||
|
||||
assert exc.value.code == 400
|
||||
assert "Maximum" in exc.value.description
|
||||
accessible_app_ids.assert_not_called()
|
||||
|
||||
@ -0,0 +1,201 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from contextlib import nullcontext
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from types import SimpleNamespace
|
||||
from unittest.mock import MagicMock, PropertyMock, patch
|
||||
|
||||
import pytest
|
||||
from flask import Flask
|
||||
from werkzeug.exceptions import Forbidden
|
||||
|
||||
from controllers.console import console_ns
|
||||
from controllers.console import wraps as console_wraps
|
||||
from controllers.console.app import workflow_comment as workflow_comment_module
|
||||
from controllers.console.app import wraps as app_wraps
|
||||
from libs import login as login_lib
|
||||
from models.account import Account, AccountStatus, TenantAccountRole
|
||||
|
||||
|
||||
def _make_account(role: TenantAccountRole) -> Account:
|
||||
account = Account(name="tester", email="tester@example.com")
|
||||
account.status = AccountStatus.ACTIVE
|
||||
account.role = role
|
||||
account.id = "account-123" # type: ignore[assignment]
|
||||
account._current_tenant = SimpleNamespace(id="tenant-123") # type: ignore[attr-defined]
|
||||
account._get_current_object = lambda: account # type: ignore[attr-defined]
|
||||
return account
|
||||
|
||||
|
||||
def _make_app() -> SimpleNamespace:
|
||||
return SimpleNamespace(id="app-123", tenant_id="tenant-123", status="normal", mode="workflow")
|
||||
|
||||
|
||||
def _patch_console_guards(monkeypatch: pytest.MonkeyPatch, account: Account, app_model: SimpleNamespace) -> None:
|
||||
monkeypatch.setattr(login_lib.dify_config, "LOGIN_DISABLED", True)
|
||||
monkeypatch.setattr(login_lib, "current_user", account)
|
||||
monkeypatch.setattr(login_lib, "current_account_with_tenant", lambda: (account, account.current_tenant_id))
|
||||
monkeypatch.setattr(login_lib, "check_csrf_token", lambda *_, **__: None)
|
||||
monkeypatch.setattr(console_wraps, "current_account_with_tenant", lambda: (account, account.current_tenant_id))
|
||||
monkeypatch.setattr(console_wraps.dify_config, "EDITION", "CLOUD")
|
||||
monkeypatch.setattr(app_wraps, "current_account_with_tenant", lambda: (account, account.current_tenant_id))
|
||||
monkeypatch.setattr(app_wraps, "_load_app_model", lambda _app_id: app_model)
|
||||
monkeypatch.setattr(workflow_comment_module, "current_user", account)
|
||||
|
||||
|
||||
def _patch_write_services(monkeypatch: pytest.MonkeyPatch) -> None:
|
||||
for method_name in (
|
||||
"create_comment",
|
||||
"update_comment",
|
||||
"delete_comment",
|
||||
"resolve_comment",
|
||||
"validate_comment_access",
|
||||
"create_reply",
|
||||
"update_reply",
|
||||
"delete_reply",
|
||||
):
|
||||
monkeypatch.setattr(workflow_comment_module.WorkflowCommentService, method_name, MagicMock())
|
||||
|
||||
|
||||
def _patch_payload(payload: dict[str, object] | None):
|
||||
if payload is None:
|
||||
return nullcontext()
|
||||
return patch.object(
|
||||
type(console_ns),
|
||||
"payload",
|
||||
new_callable=PropertyMock,
|
||||
return_value=payload,
|
||||
)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class WriteCase:
|
||||
resource_cls: type
|
||||
method_name: str
|
||||
path: str
|
||||
kwargs: dict[str, str]
|
||||
payload: dict[str, object] | None = None
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"case",
|
||||
[
|
||||
WriteCase(
|
||||
resource_cls=workflow_comment_module.WorkflowCommentListApi,
|
||||
method_name="post",
|
||||
path="/console/api/apps/app-123/workflow/comments",
|
||||
kwargs={"app_id": "app-123"},
|
||||
payload={"content": "hello", "position_x": 1.0, "position_y": 2.0, "mentioned_user_ids": []},
|
||||
),
|
||||
WriteCase(
|
||||
resource_cls=workflow_comment_module.WorkflowCommentDetailApi,
|
||||
method_name="put",
|
||||
path="/console/api/apps/app-123/workflow/comments/comment-1",
|
||||
kwargs={"app_id": "app-123", "comment_id": "comment-1"},
|
||||
payload={"content": "hello", "position_x": 1.0, "position_y": 2.0, "mentioned_user_ids": []},
|
||||
),
|
||||
WriteCase(
|
||||
resource_cls=workflow_comment_module.WorkflowCommentDetailApi,
|
||||
method_name="delete",
|
||||
path="/console/api/apps/app-123/workflow/comments/comment-1",
|
||||
kwargs={"app_id": "app-123", "comment_id": "comment-1"},
|
||||
),
|
||||
WriteCase(
|
||||
resource_cls=workflow_comment_module.WorkflowCommentResolveApi,
|
||||
method_name="post",
|
||||
path="/console/api/apps/app-123/workflow/comments/comment-1/resolve",
|
||||
kwargs={"app_id": "app-123", "comment_id": "comment-1"},
|
||||
),
|
||||
WriteCase(
|
||||
resource_cls=workflow_comment_module.WorkflowCommentReplyApi,
|
||||
method_name="post",
|
||||
path="/console/api/apps/app-123/workflow/comments/comment-1/replies",
|
||||
kwargs={"app_id": "app-123", "comment_id": "comment-1"},
|
||||
payload={"content": "reply", "mentioned_user_ids": []},
|
||||
),
|
||||
WriteCase(
|
||||
resource_cls=workflow_comment_module.WorkflowCommentReplyDetailApi,
|
||||
method_name="put",
|
||||
path="/console/api/apps/app-123/workflow/comments/comment-1/replies/reply-1",
|
||||
kwargs={"app_id": "app-123", "comment_id": "comment-1", "reply_id": "reply-1"},
|
||||
payload={"content": "reply", "mentioned_user_ids": []},
|
||||
),
|
||||
WriteCase(
|
||||
resource_cls=workflow_comment_module.WorkflowCommentReplyDetailApi,
|
||||
method_name="delete",
|
||||
path="/console/api/apps/app-123/workflow/comments/comment-1/replies/reply-1",
|
||||
kwargs={"app_id": "app-123", "comment_id": "comment-1", "reply_id": "reply-1"},
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_write_endpoints_require_edit_permission(app: Flask, monkeypatch: pytest.MonkeyPatch, case: WriteCase) -> None:
|
||||
app.config.setdefault("RESTX_MASK_HEADER", "X-Fields")
|
||||
account = _make_account(TenantAccountRole.NORMAL)
|
||||
app_model = _make_app()
|
||||
_patch_console_guards(monkeypatch, account, app_model)
|
||||
_patch_write_services(monkeypatch)
|
||||
|
||||
with app.test_request_context(case.path, method=case.method_name.upper(), json=case.payload):
|
||||
with _patch_payload(case.payload):
|
||||
handler = getattr(case.resource_cls(), case.method_name)
|
||||
with pytest.raises(Forbidden):
|
||||
handler(**case.kwargs)
|
||||
|
||||
|
||||
def test_create_comment_allows_editor(app: Flask, monkeypatch: pytest.MonkeyPatch) -> None:
|
||||
app.config.setdefault("RESTX_MASK_HEADER", "X-Fields")
|
||||
account = _make_account(TenantAccountRole.EDITOR)
|
||||
app_model = _make_app()
|
||||
_patch_console_guards(monkeypatch, account, app_model)
|
||||
|
||||
create_comment_mock = MagicMock(return_value={"id": "comment-1"})
|
||||
monkeypatch.setattr(workflow_comment_module.WorkflowCommentService, "create_comment", create_comment_mock)
|
||||
payload = {"content": "hello", "position_x": 1.0, "position_y": 2.0, "mentioned_user_ids": []}
|
||||
|
||||
with app.test_request_context("/console/api/apps/app-123/workflow/comments", method="POST", json=payload):
|
||||
with _patch_payload(payload):
|
||||
result = workflow_comment_module.WorkflowCommentListApi().post(app_id="app-123")
|
||||
|
||||
if isinstance(result, tuple):
|
||||
response = result[0]
|
||||
else:
|
||||
response = result
|
||||
assert response["id"] == "comment-1"
|
||||
create_comment_mock.assert_called_once_with(
|
||||
tenant_id="tenant-123",
|
||||
app_id="app-123",
|
||||
created_by="account-123",
|
||||
content="hello",
|
||||
position_x=1.0,
|
||||
position_y=2.0,
|
||||
mentioned_user_ids=[],
|
||||
)
|
||||
|
||||
|
||||
def test_update_comment_omits_mentions_when_payload_does_not_include_them(
|
||||
app: Flask, monkeypatch: pytest.MonkeyPatch
|
||||
) -> None:
|
||||
app.config.setdefault("RESTX_MASK_HEADER", "X-Fields")
|
||||
account = _make_account(TenantAccountRole.EDITOR)
|
||||
app_model = _make_app()
|
||||
_patch_console_guards(monkeypatch, account, app_model)
|
||||
|
||||
update_comment_mock = MagicMock(return_value={"id": "comment-1", "updated_at": datetime(2024, 1, 1, 12, 0, 0)})
|
||||
monkeypatch.setattr(workflow_comment_module.WorkflowCommentService, "update_comment", update_comment_mock)
|
||||
payload = {"content": "hello", "position_x": 10.0, "position_y": 20.0}
|
||||
|
||||
with app.test_request_context("/console/api/apps/app-123/workflow/comments/comment-1", method="PUT", json=payload):
|
||||
with _patch_payload(payload):
|
||||
workflow_comment_module.WorkflowCommentDetailApi().put(app_id="app-123", comment_id="comment-1")
|
||||
|
||||
update_comment_mock.assert_called_once_with(
|
||||
tenant_id="tenant-123",
|
||||
app_id="app-123",
|
||||
comment_id="comment-1",
|
||||
user_id="account-123",
|
||||
content="hello",
|
||||
position_x=10.0,
|
||||
position_y=20.0,
|
||||
mentioned_user_ids=None,
|
||||
)
|
||||
@ -94,7 +94,7 @@ class TestTrialAppWorkflowRunApi:
|
||||
|
||||
with app.test_request_context("/"):
|
||||
with pytest.raises(NotWorkflowAppError):
|
||||
method(MagicMock(mode=AppMode.CHAT))
|
||||
method(api, MagicMock(mode=AppMode.CHAT))
|
||||
|
||||
def test_success(self, app, trial_app_workflow, account):
|
||||
api = module.TrialAppWorkflowRunApi()
|
||||
@ -106,7 +106,7 @@ class TestTrialAppWorkflowRunApi:
|
||||
patch.object(module.AppGenerateService, "generate", return_value=MagicMock()),
|
||||
patch.object(module.RecommendedAppService, "add_trial_app_record"),
|
||||
):
|
||||
result = method(trial_app_workflow)
|
||||
result = method(api, trial_app_workflow)
|
||||
|
||||
assert result is not None
|
||||
|
||||
@ -124,7 +124,7 @@ class TestTrialAppWorkflowRunApi:
|
||||
),
|
||||
):
|
||||
with pytest.raises(ProviderNotInitializeError):
|
||||
method(trial_app_workflow)
|
||||
method(api, trial_app_workflow)
|
||||
|
||||
def test_workflow_quota_exceeded(self, app, trial_app_workflow, account):
|
||||
api = module.TrialAppWorkflowRunApi()
|
||||
@ -140,7 +140,7 @@ class TestTrialAppWorkflowRunApi:
|
||||
),
|
||||
):
|
||||
with pytest.raises(ProviderQuotaExceededError):
|
||||
method(trial_app_workflow)
|
||||
method(api, trial_app_workflow)
|
||||
|
||||
def test_workflow_model_not_support(self, app, trial_app_workflow, account):
|
||||
api = module.TrialAppWorkflowRunApi()
|
||||
@ -156,7 +156,7 @@ class TestTrialAppWorkflowRunApi:
|
||||
),
|
||||
):
|
||||
with pytest.raises(ProviderModelCurrentlyNotSupportError):
|
||||
method(trial_app_workflow)
|
||||
method(api, trial_app_workflow)
|
||||
|
||||
def test_workflow_invoke_error(self, app, trial_app_workflow, account):
|
||||
api = module.TrialAppWorkflowRunApi()
|
||||
@ -172,7 +172,7 @@ class TestTrialAppWorkflowRunApi:
|
||||
),
|
||||
):
|
||||
with pytest.raises(CompletionRequestError):
|
||||
method(trial_app_workflow)
|
||||
method(api, trial_app_workflow)
|
||||
|
||||
def test_workflow_rate_limit_error(self, app, trial_app_workflow, account):
|
||||
api = module.TrialAppWorkflowRunApi()
|
||||
@ -188,7 +188,7 @@ class TestTrialAppWorkflowRunApi:
|
||||
),
|
||||
):
|
||||
with pytest.raises(InvokeRateLimitHttpError):
|
||||
method(trial_app_workflow)
|
||||
method(api, trial_app_workflow)
|
||||
|
||||
def test_workflow_value_error(self, app, trial_app_workflow, account):
|
||||
api = module.TrialAppWorkflowRunApi()
|
||||
@ -204,7 +204,7 @@ class TestTrialAppWorkflowRunApi:
|
||||
),
|
||||
):
|
||||
with pytest.raises(ValueError):
|
||||
method(trial_app_workflow)
|
||||
method(api, trial_app_workflow)
|
||||
|
||||
def test_workflow_generic_exception(self, app, trial_app_workflow, account):
|
||||
api = module.TrialAppWorkflowRunApi()
|
||||
@ -220,7 +220,7 @@ class TestTrialAppWorkflowRunApi:
|
||||
),
|
||||
):
|
||||
with pytest.raises(InternalServerError):
|
||||
method(trial_app_workflow)
|
||||
method(api, trial_app_workflow)
|
||||
|
||||
|
||||
class TestTrialChatApi:
|
||||
@ -566,7 +566,7 @@ class TestTrialMessageSuggestedQuestionApi:
|
||||
|
||||
with app.test_request_context("/"):
|
||||
with pytest.raises(NotChatAppError):
|
||||
method(api, MagicMock(mode="completion"), str(uuid4()))
|
||||
method(MagicMock(mode="completion"), str(uuid4()))
|
||||
|
||||
def test_success(self, app, trial_app_chat, account):
|
||||
api = module.TrialMessageSuggestedQuestionApi()
|
||||
@ -581,7 +581,7 @@ class TestTrialMessageSuggestedQuestionApi:
|
||||
return_value=["q1", "q2"],
|
||||
),
|
||||
):
|
||||
result = method(api, trial_app_chat, str(uuid4()))
|
||||
result = method(trial_app_chat, str(uuid4()))
|
||||
|
||||
assert result == {"data": ["q1", "q2"]}
|
||||
|
||||
@ -599,7 +599,7 @@ class TestTrialMessageSuggestedQuestionApi:
|
||||
),
|
||||
):
|
||||
with pytest.raises(NotFound):
|
||||
method(api, trial_app_chat, str(uuid4()))
|
||||
method(trial_app_chat, str(uuid4()))
|
||||
|
||||
|
||||
class TestTrialAppParameterApi:
|
||||
@ -931,7 +931,7 @@ class TestTrialAppWorkflowTaskStopApi:
|
||||
|
||||
with app.test_request_context("/"):
|
||||
with pytest.raises(NotWorkflowAppError):
|
||||
method(trial_app_chat, str(uuid4()))
|
||||
method(api, trial_app_chat, str(uuid4()))
|
||||
|
||||
def test_success(self, app, trial_app_workflow, account):
|
||||
api = module.TrialAppWorkflowTaskStopApi()
|
||||
@ -944,7 +944,7 @@ class TestTrialAppWorkflowTaskStopApi:
|
||||
patch.object(module.AppQueueManager, "set_stop_flag_no_user_check") as mock_set_flag,
|
||||
patch.object(module.GraphEngineManager, "send_stop_command") as mock_send_cmd,
|
||||
):
|
||||
result = method(trial_app_workflow, task_id)
|
||||
result = method(api, trial_app_workflow, task_id)
|
||||
|
||||
assert result == {"result": "success"}
|
||||
mock_set_flag.assert_called_once_with(task_id)
|
||||
|
||||
@ -1,9 +1,11 @@
|
||||
from types import SimpleNamespace
|
||||
from unittest.mock import MagicMock, PropertyMock, patch
|
||||
|
||||
import pytest
|
||||
from flask import Flask
|
||||
from werkzeug.exceptions import Forbidden
|
||||
|
||||
import controllers.console.tag.tags as module
|
||||
from controllers.console import console_ns
|
||||
from controllers.console.tag.tags import (
|
||||
TagBindingCreateApi,
|
||||
@ -83,13 +85,20 @@ class TestTagListApi:
|
||||
),
|
||||
patch(
|
||||
"controllers.console.tag.tags.TagService.get_tags",
|
||||
return_value=[{"id": "1", "name": "tag"}],
|
||||
return_value=[
|
||||
SimpleNamespace(
|
||||
id="1",
|
||||
name="tag",
|
||||
type=TagType.KNOWLEDGE,
|
||||
binding_count=1,
|
||||
)
|
||||
],
|
||||
),
|
||||
):
|
||||
result, status = method(api)
|
||||
|
||||
assert status == 200
|
||||
assert isinstance(result, list)
|
||||
assert result == [{"id": "1", "name": "tag", "type": "knowledge", "binding_count": "1"}]
|
||||
|
||||
def test_post_success(self, app, admin_user, tag, payload_patch):
|
||||
api = TagListApi()
|
||||
@ -113,6 +122,7 @@ class TestTagListApi:
|
||||
|
||||
assert status == 200
|
||||
assert result["name"] == "test-tag"
|
||||
assert result["binding_count"] == "0"
|
||||
|
||||
def test_post_forbidden(self, app, readonly_user, payload_patch):
|
||||
api = TagListApi()
|
||||
@ -158,7 +168,7 @@ class TestTagUpdateDeleteApi:
|
||||
result, status = method(api, "tag-1")
|
||||
|
||||
assert status == 200
|
||||
assert result["binding_count"] == 3
|
||||
assert result["binding_count"] == "3"
|
||||
|
||||
def test_patch_forbidden(self, app, readonly_user, payload_patch):
|
||||
api = TagUpdateDeleteApi()
|
||||
@ -277,3 +287,13 @@ class TestTagBindingDeleteApi:
|
||||
):
|
||||
with pytest.raises(Forbidden):
|
||||
method(api)
|
||||
|
||||
|
||||
class TestTagResponseModel:
|
||||
def test_tag_response_normalizes_enum_type(self):
|
||||
payload = module.TagResponse.model_validate(
|
||||
{"id": "tag-1", "name": "tag", "type": TagType.KNOWLEDGE, "binding_count": 1}
|
||||
).model_dump(mode="json")
|
||||
|
||||
assert payload["type"] == "knowledge"
|
||||
assert payload["binding_count"] == "1"
|
||||
|
||||
@ -11,7 +11,7 @@ from controllers.console.workspace.account import (
|
||||
ChangeEmailSendEmailApi,
|
||||
CheckEmailUnique,
|
||||
)
|
||||
from models import Account
|
||||
from models import Account, AccountStatus
|
||||
from services.account_service import AccountService
|
||||
|
||||
|
||||
@ -33,7 +33,7 @@ def _build_account(email: str, account_id: str = "acc", tenant: object | None =
|
||||
account = Account(name=account_id, email=email)
|
||||
account.email = email
|
||||
account.id = account_id
|
||||
account.status = "active"
|
||||
account.status = AccountStatus.ACTIVE
|
||||
account._current_tenant = tenant_obj
|
||||
return account
|
||||
|
||||
|
||||
@ -18,6 +18,7 @@ from controllers.inner_api.app.dsl import (
|
||||
InnerAppDSLImportPayload,
|
||||
_get_active_account,
|
||||
)
|
||||
from models.account import AccountStatus
|
||||
from services.app_dsl_service import ImportStatus
|
||||
|
||||
|
||||
@ -63,7 +64,7 @@ class TestGetActiveAccount:
|
||||
@patch("controllers.inner_api.app.dsl.db")
|
||||
def test_returns_active_account(self, mock_db):
|
||||
mock_account = MagicMock()
|
||||
mock_account.status = "active"
|
||||
mock_account.status = AccountStatus.ACTIVE
|
||||
mock_db.session.scalar.return_value = mock_account
|
||||
|
||||
result = _get_active_account("user@example.com")
|
||||
@ -74,7 +75,7 @@ class TestGetActiveAccount:
|
||||
@patch("controllers.inner_api.app.dsl.db")
|
||||
def test_returns_none_for_inactive_account(self, mock_db):
|
||||
mock_account = MagicMock()
|
||||
mock_account.status = "banned"
|
||||
mock_account.status = AccountStatus.BANNED
|
||||
mock_db.session.scalar.return_value = mock_account
|
||||
|
||||
result = _get_active_account("banned@example.com")
|
||||
|
||||
@ -20,6 +20,7 @@ from controllers.inner_api.workspace.workspace import (
|
||||
WorkspaceCreatePayload,
|
||||
WorkspaceOwnerlessPayload,
|
||||
)
|
||||
from models.account import TenantStatus
|
||||
|
||||
|
||||
class TestWorkspaceCreatePayload:
|
||||
@ -98,7 +99,7 @@ class TestEnterpriseWorkspace:
|
||||
mock_tenant.id = "tenant-id"
|
||||
mock_tenant.name = "My Workspace"
|
||||
mock_tenant.plan = "sandbox"
|
||||
mock_tenant.status = "normal"
|
||||
mock_tenant.status = TenantStatus.NORMAL
|
||||
mock_tenant.created_at = now
|
||||
mock_tenant.updated_at = now
|
||||
mock_tenant_svc.create_tenant.return_value = mock_tenant
|
||||
@ -162,7 +163,7 @@ class TestEnterpriseWorkspaceNoOwnerEmail:
|
||||
mock_tenant.name = "My Workspace"
|
||||
mock_tenant.encrypt_public_key = "pub-key"
|
||||
mock_tenant.plan = "sandbox"
|
||||
mock_tenant.status = "normal"
|
||||
mock_tenant.status = TenantStatus.NORMAL
|
||||
mock_tenant.custom_config = None
|
||||
mock_tenant.created_at = now
|
||||
mock_tenant.updated_at = now
|
||||
|
||||
@ -10,6 +10,7 @@ from flask import Flask
|
||||
|
||||
from controllers.service_api.app.app import AppInfoApi, AppMetaApi, AppParameterApi
|
||||
from controllers.service_api.app.error import AppUnavailableError
|
||||
from models.account import TenantStatus
|
||||
from models.model import App, AppMode
|
||||
from tests.unit_tests.conftest import setup_mock_tenant_account_query
|
||||
|
||||
@ -62,7 +63,7 @@ class TestAppParameterApi:
|
||||
mock_validate_token.return_value = mock_api_token
|
||||
|
||||
mock_tenant = Mock()
|
||||
mock_tenant.status = "normal"
|
||||
mock_tenant.status = TenantStatus.NORMAL
|
||||
|
||||
# Mock DB queries for app and tenant
|
||||
mock_db.session.get.side_effect = [
|
||||
@ -110,7 +111,7 @@ class TestAppParameterApi:
|
||||
mock_validate_token.return_value = mock_api_token
|
||||
|
||||
mock_tenant = Mock()
|
||||
mock_tenant.status = "normal"
|
||||
mock_tenant.status = TenantStatus.NORMAL
|
||||
|
||||
mock_db.session.get.side_effect = [
|
||||
mock_app_model,
|
||||
@ -151,7 +152,7 @@ class TestAppParameterApi:
|
||||
mock_validate_token.return_value = mock_api_token
|
||||
|
||||
mock_tenant = Mock()
|
||||
mock_tenant.status = "normal"
|
||||
mock_tenant.status = TenantStatus.NORMAL
|
||||
|
||||
mock_db.session.get.side_effect = [
|
||||
mock_app_model,
|
||||
@ -190,7 +191,7 @@ class TestAppParameterApi:
|
||||
mock_validate_token.return_value = mock_api_token
|
||||
|
||||
mock_tenant = Mock()
|
||||
mock_tenant.status = "normal"
|
||||
mock_tenant.status = TenantStatus.NORMAL
|
||||
|
||||
mock_db.session.get.side_effect = [
|
||||
mock_app_model,
|
||||
@ -253,7 +254,7 @@ class TestAppMetaApi:
|
||||
mock_validate_token.return_value = mock_api_token
|
||||
|
||||
mock_tenant = Mock()
|
||||
mock_tenant.status = "normal"
|
||||
mock_tenant.status = TenantStatus.NORMAL
|
||||
|
||||
mock_db.session.get.side_effect = [
|
||||
mock_app_model,
|
||||
@ -321,7 +322,7 @@ class TestAppInfoApi:
|
||||
mock_validate_token.return_value = mock_api_token
|
||||
|
||||
mock_tenant = Mock()
|
||||
mock_tenant.status = "normal"
|
||||
mock_tenant.status = TenantStatus.NORMAL
|
||||
|
||||
mock_db.session.get.side_effect = [
|
||||
mock_app_model,
|
||||
@ -378,7 +379,7 @@ class TestAppInfoApi:
|
||||
mock_validate_token.return_value = mock_api_token
|
||||
|
||||
mock_tenant = Mock()
|
||||
mock_tenant.status = "normal"
|
||||
mock_tenant.status = TenantStatus.NORMAL
|
||||
|
||||
mock_db.session.get.side_effect = [
|
||||
mock_app,
|
||||
@ -424,7 +425,7 @@ class TestAppInfoApi:
|
||||
mock_validate_token.return_value = mock_api_token
|
||||
|
||||
mock_tenant = Mock()
|
||||
mock_tenant.status = "normal"
|
||||
mock_tenant.status = TenantStatus.NORMAL
|
||||
|
||||
mock_db.session.get.side_effect = [
|
||||
mock_app,
|
||||
@ -476,7 +477,7 @@ class TestAppInfoApi:
|
||||
mock_validate_token.return_value = mock_api_token
|
||||
|
||||
mock_tenant = Mock()
|
||||
mock_tenant.status = "normal"
|
||||
mock_tenant.status = TenantStatus.NORMAL
|
||||
|
||||
mock_db.session.get.side_effect = [
|
||||
mock_app,
|
||||
|
||||
@ -503,6 +503,7 @@ class TestEmailI18nIntegration:
|
||||
EmailType.ACCOUNT_DELETION_VERIFICATION,
|
||||
EmailType.QUEUE_MONITOR_ALERT,
|
||||
EmailType.DOCUMENT_CLEAN_NOTIFY,
|
||||
EmailType.WORKFLOW_COMMENT_MENTION,
|
||||
]
|
||||
|
||||
for email_type in expected_types:
|
||||
|
||||
100
api/tests/unit_tests/models/test_comment_models.py
Normal file
100
api/tests/unit_tests/models/test_comment_models.py
Normal file
@ -0,0 +1,100 @@
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
from models.comment import WorkflowComment, WorkflowCommentMention, WorkflowCommentReply
|
||||
|
||||
|
||||
def test_workflow_comment_account_properties_and_cache() -> None:
|
||||
comment = WorkflowComment(created_by="user-1", resolved_by="user-2", content="hello", position_x=1, position_y=2)
|
||||
created_account = Mock(id="user-1")
|
||||
resolved_account = Mock(id="user-2")
|
||||
|
||||
with patch("models.comment.db.session.get", side_effect=[created_account, resolved_account]) as get_mock:
|
||||
assert comment.created_by_account is created_account
|
||||
assert comment.resolved_by_account is resolved_account
|
||||
assert get_mock.call_count == 2
|
||||
|
||||
comment.cache_created_by_account(created_account)
|
||||
comment.cache_resolved_by_account(resolved_account)
|
||||
with patch("models.comment.db.session.get") as get_mock:
|
||||
assert comment.created_by_account is created_account
|
||||
assert comment.resolved_by_account is resolved_account
|
||||
get_mock.assert_not_called()
|
||||
|
||||
comment_without_resolver = WorkflowComment(
|
||||
created_by="user-1",
|
||||
resolved_by=None,
|
||||
content="hello",
|
||||
position_x=1,
|
||||
position_y=2,
|
||||
)
|
||||
with patch("models.comment.db.session.get") as get_mock:
|
||||
assert comment_without_resolver.resolved_by_account is None
|
||||
get_mock.assert_not_called()
|
||||
|
||||
|
||||
def test_workflow_comment_counts_and_participants() -> None:
|
||||
reply_1 = WorkflowCommentReply(comment_id="comment-1", content="reply-1", created_by="user-2")
|
||||
reply_2 = WorkflowCommentReply(comment_id="comment-1", content="reply-2", created_by="user-2")
|
||||
mention_1 = WorkflowCommentMention(comment_id="comment-1", mentioned_user_id="user-3")
|
||||
mention_2 = WorkflowCommentMention(comment_id="comment-1", mentioned_user_id="user-4")
|
||||
comment = WorkflowComment(created_by="user-1", resolved_by=None, content="hello", position_x=1, position_y=2)
|
||||
comment.replies = [reply_1, reply_2]
|
||||
comment.mentions = [mention_1, mention_2]
|
||||
|
||||
account_1 = Mock(id="user-1")
|
||||
account_2 = Mock(id="user-2")
|
||||
account_3 = Mock(id="user-3")
|
||||
account_map = {
|
||||
"user-1": account_1,
|
||||
"user-2": account_2,
|
||||
"user-3": account_3,
|
||||
"user-4": None,
|
||||
}
|
||||
|
||||
with patch("models.comment.db.session.get", side_effect=lambda _model, user_id: account_map[user_id]) as get_mock:
|
||||
participants = comment.participants
|
||||
|
||||
assert comment.reply_count == 2
|
||||
assert comment.mention_count == 2
|
||||
assert set(participants) == {account_1, account_2, account_3}
|
||||
assert get_mock.call_count == 4
|
||||
|
||||
|
||||
def test_workflow_comment_participants_use_cached_accounts() -> None:
|
||||
reply = WorkflowCommentReply(comment_id="comment-1", content="reply-1", created_by="user-2")
|
||||
mention = WorkflowCommentMention(comment_id="comment-1", mentioned_user_id="user-3")
|
||||
comment = WorkflowComment(created_by="user-1", resolved_by=None, content="hello", position_x=1, position_y=2)
|
||||
comment.replies = [reply]
|
||||
comment.mentions = [mention]
|
||||
|
||||
account_1 = Mock(id="user-1")
|
||||
account_2 = Mock(id="user-2")
|
||||
account_3 = Mock(id="user-3")
|
||||
comment.cache_created_by_account(account_1)
|
||||
reply.cache_created_by_account(account_2)
|
||||
mention.cache_mentioned_user_account(account_3)
|
||||
|
||||
with patch("models.comment.db.session.get") as get_mock:
|
||||
participants = comment.participants
|
||||
|
||||
assert set(participants) == {account_1, account_2, account_3}
|
||||
get_mock.assert_not_called()
|
||||
|
||||
|
||||
def test_reply_and_mention_account_properties_and_cache() -> None:
|
||||
reply = WorkflowCommentReply(comment_id="comment-1", content="reply", created_by="user-1")
|
||||
mention = WorkflowCommentMention(comment_id="comment-1", mentioned_user_id="user-2")
|
||||
reply_account = Mock(id="user-1")
|
||||
mention_account = Mock(id="user-2")
|
||||
|
||||
with patch("models.comment.db.session.get", side_effect=[reply_account, mention_account]) as get_mock:
|
||||
assert reply.created_by_account is reply_account
|
||||
assert mention.mentioned_user_account is mention_account
|
||||
assert get_mock.call_count == 2
|
||||
|
||||
reply.cache_created_by_account(reply_account)
|
||||
mention.cache_mentioned_user_account(mention_account)
|
||||
with patch("models.comment.db.session.get") as get_mock:
|
||||
assert reply.created_by_account is reply_account
|
||||
assert mention.mentioned_user_account is mention_account
|
||||
get_mock.assert_not_called()
|
||||
@ -0,0 +1,121 @@
|
||||
import json
|
||||
from unittest.mock import Mock
|
||||
|
||||
import pytest
|
||||
|
||||
from repositories import workflow_collaboration_repository as repo_module
|
||||
from repositories.workflow_collaboration_repository import WorkflowCollaborationRepository
|
||||
|
||||
|
||||
class TestWorkflowCollaborationRepository:
|
||||
@pytest.fixture
|
||||
def mock_redis(self, monkeypatch: pytest.MonkeyPatch) -> Mock:
|
||||
mock_redis = Mock()
|
||||
monkeypatch.setattr(repo_module, "redis_client", mock_redis)
|
||||
return mock_redis
|
||||
|
||||
def test_get_sid_mapping_returns_mapping(self, mock_redis: Mock) -> None:
|
||||
# Arrange
|
||||
mock_redis.get.return_value = b'{"workflow_id":"wf-1","user_id":"u-1"}'
|
||||
repository = WorkflowCollaborationRepository()
|
||||
|
||||
# Act
|
||||
result = repository.get_sid_mapping("sid-1")
|
||||
|
||||
# Assert
|
||||
assert result == {"workflow_id": "wf-1", "user_id": "u-1"}
|
||||
|
||||
def test_list_sessions_filters_invalid_entries(self, mock_redis: Mock) -> None:
|
||||
# Arrange
|
||||
mock_redis.hgetall.return_value = {
|
||||
b"sid-1": b'{"user_id":"u-1","username":"Jane","sid":"sid-1","connected_at":2}',
|
||||
b"sid-2": b'{"username":"Missing","sid":"sid-2"}',
|
||||
b"sid-3": b"not-json",
|
||||
}
|
||||
repository = WorkflowCollaborationRepository()
|
||||
|
||||
# Act
|
||||
result = repository.list_sessions("wf-1")
|
||||
|
||||
# Assert
|
||||
assert result == [
|
||||
{
|
||||
"user_id": "u-1",
|
||||
"username": "Jane",
|
||||
"avatar": None,
|
||||
"sid": "sid-1",
|
||||
"connected_at": 2,
|
||||
}
|
||||
]
|
||||
|
||||
def test_set_session_info_persists_payload(self, mock_redis: Mock) -> None:
|
||||
# Arrange
|
||||
mock_redis.exists.return_value = True
|
||||
repository = WorkflowCollaborationRepository()
|
||||
payload = {
|
||||
"user_id": "u-1",
|
||||
"username": "Jane",
|
||||
"avatar": None,
|
||||
"sid": "sid-1",
|
||||
"connected_at": 1,
|
||||
}
|
||||
|
||||
# Act
|
||||
repository.set_session_info("wf-1", payload)
|
||||
|
||||
# Assert
|
||||
assert mock_redis.hset.called
|
||||
workflow_key, sid, session_json = mock_redis.hset.call_args.args
|
||||
assert workflow_key == "workflow_online_users:wf-1"
|
||||
assert sid == "sid-1"
|
||||
assert json.loads(session_json)["user_id"] == "u-1"
|
||||
assert mock_redis.set.called
|
||||
|
||||
def test_refresh_session_state_expires_keys(self, mock_redis: Mock) -> None:
|
||||
# Arrange
|
||||
mock_redis.exists.return_value = True
|
||||
repository = WorkflowCollaborationRepository()
|
||||
|
||||
# Act
|
||||
repository.refresh_session_state("wf-1", "sid-1")
|
||||
|
||||
# Assert
|
||||
assert mock_redis.expire.call_count == 2
|
||||
|
||||
def test_get_current_leader_decodes_bytes(self, mock_redis: Mock) -> None:
|
||||
# Arrange
|
||||
mock_redis.get.return_value = b"sid-1"
|
||||
repository = WorkflowCollaborationRepository()
|
||||
|
||||
# Act
|
||||
result = repository.get_current_leader("wf-1")
|
||||
|
||||
# Assert
|
||||
assert result == "sid-1"
|
||||
|
||||
def test_set_leader_if_absent_uses_nx(self, mock_redis: Mock) -> None:
|
||||
# Arrange
|
||||
mock_redis.set.return_value = True
|
||||
repository = WorkflowCollaborationRepository()
|
||||
|
||||
# Act
|
||||
result = repository.set_leader_if_absent("wf-1", "sid-1")
|
||||
|
||||
# Assert
|
||||
assert result is True
|
||||
_key, _value = mock_redis.set.call_args.args
|
||||
assert _key == "workflow_leader:wf-1"
|
||||
assert _value == "sid-1"
|
||||
assert mock_redis.set.call_args.kwargs["nx"] is True
|
||||
assert "ex" in mock_redis.set.call_args.kwargs
|
||||
|
||||
def test_get_session_sids_decodes(self, mock_redis: Mock) -> None:
|
||||
# Arrange
|
||||
mock_redis.hkeys.return_value = [b"sid-1", "sid-2"]
|
||||
repository = WorkflowCollaborationRepository()
|
||||
|
||||
# Act
|
||||
result = repository.get_session_sids("wf-1")
|
||||
|
||||
# Assert
|
||||
assert result == ["sid-1", "sid-2"]
|
||||
@ -5,7 +5,7 @@ from unittest.mock import MagicMock, patch
|
||||
import pytest
|
||||
|
||||
from configs import dify_config
|
||||
from models.account import Account, AccountStatus
|
||||
from models.account import Account, AccountStatus, TenantStatus
|
||||
from services.account_service import AccountService, RegisterService, TenantService
|
||||
from services.errors.account import (
|
||||
AccountAlreadyInTenantError,
|
||||
@ -1697,7 +1697,7 @@ class TestRegisterService:
|
||||
# Setup test data
|
||||
mock_tenant = MagicMock()
|
||||
mock_tenant.id = "tenant-456"
|
||||
mock_tenant.status = "normal"
|
||||
mock_tenant.status = TenantStatus.NORMAL
|
||||
mock_account = TestAccountAssociatedDataFactory.create_account_mock(
|
||||
account_id="user-123", email="test@example.com"
|
||||
)
|
||||
@ -1759,7 +1759,7 @@ class TestRegisterService:
|
||||
# Setup test data
|
||||
mock_tenant = MagicMock()
|
||||
mock_tenant.id = "tenant-456"
|
||||
mock_tenant.status = "normal"
|
||||
mock_tenant.status = TenantStatus.NORMAL
|
||||
|
||||
# Mock Redis data
|
||||
invitation_data = {
|
||||
@ -1784,7 +1784,7 @@ class TestRegisterService:
|
||||
# Setup test data
|
||||
mock_tenant = MagicMock()
|
||||
mock_tenant.id = "tenant-456"
|
||||
mock_tenant.status = "normal"
|
||||
mock_tenant.status = TenantStatus.NORMAL
|
||||
mock_account = TestAccountAssociatedDataFactory.create_account_mock(
|
||||
account_id="different-user-456", email="test@example.com"
|
||||
)
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
from types import SimpleNamespace
|
||||
from typing import Any, cast
|
||||
from typing import Any
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
@ -13,11 +13,6 @@ from core.workflow.nodes.trigger_webhook.entities import (
|
||||
WebhookData,
|
||||
WebhookParameter,
|
||||
)
|
||||
from models.enums import AppTriggerStatus
|
||||
from models.model import App
|
||||
from models.trigger import WorkflowWebhookTrigger
|
||||
from models.workflow import Workflow
|
||||
from services.errors.app import QuotaExceededError
|
||||
from services.trigger import webhook_service as service_module
|
||||
from services.trigger.webhook_service import WebhookService
|
||||
|
||||
@ -39,156 +34,13 @@ class _FakeQuery:
|
||||
return self._result
|
||||
|
||||
|
||||
class _SessionContext:
|
||||
def __init__(self, session: Any) -> None:
|
||||
self._session = session
|
||||
|
||||
def __enter__(self) -> Any:
|
||||
return self._session
|
||||
|
||||
def __exit__(self, exc_type: Any, exc: Any, tb: Any) -> bool:
|
||||
return False
|
||||
|
||||
|
||||
class _SessionmakerContext:
|
||||
def __init__(self, session: Any) -> None:
|
||||
self._session = session
|
||||
|
||||
def begin(self) -> "_SessionmakerContext":
|
||||
return self
|
||||
|
||||
def __enter__(self) -> Any:
|
||||
return self._session
|
||||
|
||||
def __exit__(self, exc_type: Any, exc: Any, tb: Any) -> bool:
|
||||
return False
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def flask_app() -> Flask:
|
||||
return Flask(__name__)
|
||||
|
||||
|
||||
def _patch_session(monkeypatch: pytest.MonkeyPatch, session: Any) -> None:
|
||||
monkeypatch.setattr(service_module, "db", SimpleNamespace(engine=MagicMock(), session=MagicMock()))
|
||||
monkeypatch.setattr(service_module, "Session", lambda *args, **kwargs: _SessionContext(session))
|
||||
monkeypatch.setattr(service_module, "sessionmaker", lambda *args, **kwargs: _SessionmakerContext(session))
|
||||
|
||||
|
||||
def _workflow_trigger(**kwargs: Any) -> WorkflowWebhookTrigger:
|
||||
return cast(WorkflowWebhookTrigger, SimpleNamespace(**kwargs))
|
||||
|
||||
|
||||
def _workflow(**kwargs: Any) -> Workflow:
|
||||
return cast(Workflow, SimpleNamespace(**kwargs))
|
||||
|
||||
|
||||
def _app(**kwargs: Any) -> App:
|
||||
return cast(App, SimpleNamespace(**kwargs))
|
||||
|
||||
|
||||
class TestWebhookServiceLookup:
|
||||
def test_get_webhook_trigger_and_workflow_should_raise_when_webhook_not_found(
|
||||
self,
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
) -> None:
|
||||
fake_session = MagicMock()
|
||||
fake_session.scalar.return_value = None
|
||||
_patch_session(monkeypatch, fake_session)
|
||||
|
||||
with pytest.raises(ValueError, match="Webhook not found"):
|
||||
WebhookService.get_webhook_trigger_and_workflow("webhook-1")
|
||||
|
||||
def test_get_webhook_trigger_and_workflow_should_raise_when_app_trigger_not_found(
|
||||
self,
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
) -> None:
|
||||
webhook_trigger = SimpleNamespace(app_id="app-1", node_id="node-1")
|
||||
fake_session = MagicMock()
|
||||
fake_session.scalar.side_effect = [webhook_trigger, None]
|
||||
_patch_session(monkeypatch, fake_session)
|
||||
|
||||
with pytest.raises(ValueError, match="App trigger not found"):
|
||||
WebhookService.get_webhook_trigger_and_workflow("webhook-1")
|
||||
|
||||
def test_get_webhook_trigger_and_workflow_should_raise_when_app_trigger_rate_limited(
|
||||
self,
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
) -> None:
|
||||
webhook_trigger = SimpleNamespace(app_id="app-1", node_id="node-1")
|
||||
app_trigger = SimpleNamespace(status=AppTriggerStatus.RATE_LIMITED)
|
||||
fake_session = MagicMock()
|
||||
fake_session.scalar.side_effect = [webhook_trigger, app_trigger]
|
||||
_patch_session(monkeypatch, fake_session)
|
||||
|
||||
with pytest.raises(ValueError, match="rate limited"):
|
||||
WebhookService.get_webhook_trigger_and_workflow("webhook-1")
|
||||
|
||||
def test_get_webhook_trigger_and_workflow_should_raise_when_app_trigger_disabled(
|
||||
self,
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
) -> None:
|
||||
webhook_trigger = SimpleNamespace(app_id="app-1", node_id="node-1")
|
||||
app_trigger = SimpleNamespace(status=AppTriggerStatus.DISABLED)
|
||||
fake_session = MagicMock()
|
||||
fake_session.scalar.side_effect = [webhook_trigger, app_trigger]
|
||||
_patch_session(monkeypatch, fake_session)
|
||||
|
||||
with pytest.raises(ValueError, match="disabled"):
|
||||
WebhookService.get_webhook_trigger_and_workflow("webhook-1")
|
||||
|
||||
def test_get_webhook_trigger_and_workflow_should_raise_when_workflow_not_found(
|
||||
self,
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
) -> None:
|
||||
webhook_trigger = SimpleNamespace(app_id="app-1", node_id="node-1")
|
||||
app_trigger = SimpleNamespace(status=AppTriggerStatus.ENABLED)
|
||||
fake_session = MagicMock()
|
||||
fake_session.scalar.side_effect = [webhook_trigger, app_trigger, None]
|
||||
_patch_session(monkeypatch, fake_session)
|
||||
|
||||
with pytest.raises(ValueError, match="Workflow not found"):
|
||||
WebhookService.get_webhook_trigger_and_workflow("webhook-1")
|
||||
|
||||
def test_get_webhook_trigger_and_workflow_should_return_values_for_non_debug_mode(
|
||||
self,
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
) -> None:
|
||||
webhook_trigger = SimpleNamespace(app_id="app-1", node_id="node-1")
|
||||
app_trigger = SimpleNamespace(status=AppTriggerStatus.ENABLED)
|
||||
workflow = MagicMock()
|
||||
workflow.get_node_config_by_id.return_value = {"data": {"key": "value"}}
|
||||
|
||||
fake_session = MagicMock()
|
||||
fake_session.scalar.side_effect = [webhook_trigger, app_trigger, workflow]
|
||||
_patch_session(monkeypatch, fake_session)
|
||||
|
||||
got_trigger, got_workflow, got_node_config = WebhookService.get_webhook_trigger_and_workflow("webhook-1")
|
||||
|
||||
assert got_trigger is webhook_trigger
|
||||
assert got_workflow is workflow
|
||||
assert got_node_config == {"data": {"key": "value"}}
|
||||
|
||||
def test_get_webhook_trigger_and_workflow_should_return_values_for_debug_mode(
|
||||
self,
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
) -> None:
|
||||
webhook_trigger = SimpleNamespace(app_id="app-1", node_id="node-1")
|
||||
workflow = MagicMock()
|
||||
workflow.get_node_config_by_id.return_value = {"data": {"mode": "debug"}}
|
||||
|
||||
fake_session = MagicMock()
|
||||
fake_session.scalar.side_effect = [webhook_trigger, workflow]
|
||||
_patch_session(monkeypatch, fake_session)
|
||||
|
||||
got_trigger, got_workflow, got_node_config = WebhookService.get_webhook_trigger_and_workflow(
|
||||
"webhook-1",
|
||||
is_debug=True,
|
||||
)
|
||||
|
||||
assert got_trigger is webhook_trigger
|
||||
assert got_workflow is workflow
|
||||
assert got_node_config == {"data": {"mode": "debug"}}
|
||||
def _workflow_trigger(**kwargs: Any) -> Any:
|
||||
return SimpleNamespace(**kwargs)
|
||||
|
||||
|
||||
class TestWebhookServiceExtractionFallbacks:
|
||||
@ -420,237 +272,6 @@ class TestWebhookServiceValidationAndConversion:
|
||||
assert result["webhook_body"] == {"b": 2}
|
||||
|
||||
|
||||
class TestWebhookServiceExecutionAndSync:
|
||||
def test_trigger_workflow_execution_should_trigger_async_workflow_successfully(
|
||||
self,
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
) -> None:
|
||||
webhook_trigger = _workflow_trigger(
|
||||
app_id="app-1",
|
||||
node_id="node-1",
|
||||
tenant_id="tenant-1",
|
||||
webhook_id="webhook-1",
|
||||
)
|
||||
workflow = _workflow(id="wf-1")
|
||||
webhook_data = {"body": {"x": 1}}
|
||||
|
||||
session = MagicMock()
|
||||
_patch_session(monkeypatch, session)
|
||||
|
||||
end_user = SimpleNamespace(id="end-user-1")
|
||||
monkeypatch.setattr(
|
||||
service_module.EndUserService,
|
||||
"get_or_create_end_user_by_type",
|
||||
MagicMock(return_value=end_user),
|
||||
)
|
||||
quota_type = SimpleNamespace(TRIGGER=SimpleNamespace(consume=MagicMock()))
|
||||
monkeypatch.setattr(service_module, "QuotaType", quota_type)
|
||||
trigger_async_mock = MagicMock()
|
||||
monkeypatch.setattr(service_module.AsyncWorkflowService, "trigger_workflow_async", trigger_async_mock)
|
||||
|
||||
WebhookService.trigger_workflow_execution(webhook_trigger, webhook_data, workflow)
|
||||
|
||||
trigger_async_mock.assert_called_once()
|
||||
|
||||
def test_trigger_workflow_execution_should_mark_tenant_rate_limited_when_quota_exceeded(
|
||||
self,
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
) -> None:
|
||||
webhook_trigger = _workflow_trigger(
|
||||
app_id="app-1",
|
||||
node_id="node-1",
|
||||
tenant_id="tenant-1",
|
||||
webhook_id="webhook-1",
|
||||
)
|
||||
workflow = _workflow(id="wf-1")
|
||||
|
||||
session = MagicMock()
|
||||
_patch_session(monkeypatch, session)
|
||||
|
||||
monkeypatch.setattr(
|
||||
service_module.EndUserService,
|
||||
"get_or_create_end_user_by_type",
|
||||
MagicMock(return_value=SimpleNamespace(id="end-user-1")),
|
||||
)
|
||||
quota_type = SimpleNamespace(
|
||||
TRIGGER=SimpleNamespace(
|
||||
consume=MagicMock(side_effect=QuotaExceededError(feature="trigger", tenant_id="tenant-1", required=1))
|
||||
)
|
||||
)
|
||||
monkeypatch.setattr(service_module, "QuotaType", quota_type)
|
||||
mark_rate_limited_mock = MagicMock()
|
||||
monkeypatch.setattr(
|
||||
service_module.AppTriggerService, "mark_tenant_triggers_rate_limited", mark_rate_limited_mock
|
||||
)
|
||||
|
||||
with pytest.raises(QuotaExceededError):
|
||||
WebhookService.trigger_workflow_execution(webhook_trigger, {"body": {}}, workflow)
|
||||
|
||||
mark_rate_limited_mock.assert_called_once_with("tenant-1")
|
||||
|
||||
def test_trigger_workflow_execution_should_log_and_reraise_unexpected_errors(
|
||||
self,
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
) -> None:
|
||||
webhook_trigger = _workflow_trigger(
|
||||
app_id="app-1",
|
||||
node_id="node-1",
|
||||
tenant_id="tenant-1",
|
||||
webhook_id="webhook-1",
|
||||
)
|
||||
workflow = _workflow(id="wf-1")
|
||||
|
||||
session = MagicMock()
|
||||
_patch_session(monkeypatch, session)
|
||||
|
||||
monkeypatch.setattr(
|
||||
service_module.EndUserService,
|
||||
"get_or_create_end_user_by_type",
|
||||
MagicMock(side_effect=RuntimeError("boom")),
|
||||
)
|
||||
logger_exception_mock = MagicMock()
|
||||
monkeypatch.setattr(service_module.logger, "exception", logger_exception_mock)
|
||||
|
||||
with pytest.raises(RuntimeError, match="boom"):
|
||||
WebhookService.trigger_workflow_execution(webhook_trigger, {"body": {}}, workflow)
|
||||
|
||||
logger_exception_mock.assert_called_once()
|
||||
|
||||
def test_sync_webhook_relationships_should_raise_when_workflow_exceeds_node_limit(self) -> None:
|
||||
app = _app(id="app-1", tenant_id="tenant-1", created_by="user-1")
|
||||
workflow = _workflow(
|
||||
walk_nodes=lambda _node_type: [
|
||||
(f"node-{i}", {}) for i in range(WebhookService.MAX_WEBHOOK_NODES_PER_WORKFLOW + 1)
|
||||
]
|
||||
)
|
||||
|
||||
with pytest.raises(ValueError, match="maximum webhook node limit"):
|
||||
WebhookService.sync_webhook_relationships(app, workflow)
|
||||
|
||||
def test_sync_webhook_relationships_should_raise_when_lock_not_acquired(
|
||||
self,
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
) -> None:
|
||||
app = _app(id="app-1", tenant_id="tenant-1", created_by="user-1")
|
||||
workflow = _workflow(walk_nodes=lambda _node_type: [("node-1", {})])
|
||||
|
||||
lock = MagicMock()
|
||||
lock.acquire.return_value = False
|
||||
monkeypatch.setattr(service_module.redis_client, "get", MagicMock(return_value=None))
|
||||
monkeypatch.setattr(service_module.redis_client, "lock", MagicMock(return_value=lock))
|
||||
|
||||
with pytest.raises(RuntimeError, match="Failed to acquire lock"):
|
||||
WebhookService.sync_webhook_relationships(app, workflow)
|
||||
|
||||
def test_sync_webhook_relationships_should_create_missing_records_and_delete_stale_records(
|
||||
self,
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
) -> None:
|
||||
app = _app(id="app-1", tenant_id="tenant-1", created_by="user-1")
|
||||
workflow = _workflow(walk_nodes=lambda _node_type: [("node-new", {})])
|
||||
|
||||
class _WorkflowWebhookTrigger:
|
||||
app_id = "app_id"
|
||||
tenant_id = "tenant_id"
|
||||
webhook_id = "webhook_id"
|
||||
node_id = "node_id"
|
||||
|
||||
def __init__(self, app_id: str, tenant_id: str, node_id: str, webhook_id: str, created_by: str) -> None:
|
||||
self.id = None
|
||||
self.app_id = app_id
|
||||
self.tenant_id = tenant_id
|
||||
self.node_id = node_id
|
||||
self.webhook_id = webhook_id
|
||||
self.created_by = created_by
|
||||
|
||||
class _Select:
|
||||
def where(self, *args: Any, **kwargs: Any) -> "_Select":
|
||||
return self
|
||||
|
||||
class _Session:
|
||||
def __init__(self) -> None:
|
||||
self.added: list[Any] = []
|
||||
self.deleted: list[Any] = []
|
||||
self.commit_count = 0
|
||||
self.existing_records = [SimpleNamespace(node_id="node-stale")]
|
||||
|
||||
def scalars(self, _stmt: Any) -> Any:
|
||||
return SimpleNamespace(all=lambda: self.existing_records)
|
||||
|
||||
def add(self, obj: Any) -> None:
|
||||
self.added.append(obj)
|
||||
|
||||
def flush(self) -> None:
|
||||
for idx, obj in enumerate(self.added, start=1):
|
||||
if obj.id is None:
|
||||
obj.id = f"rec-{idx}"
|
||||
|
||||
def commit(self) -> None:
|
||||
self.commit_count += 1
|
||||
|
||||
def delete(self, obj: Any) -> None:
|
||||
self.deleted.append(obj)
|
||||
|
||||
lock = MagicMock()
|
||||
lock.acquire.return_value = True
|
||||
lock.release.return_value = None
|
||||
|
||||
fake_session = _Session()
|
||||
|
||||
monkeypatch.setattr(service_module, "WorkflowWebhookTrigger", _WorkflowWebhookTrigger)
|
||||
monkeypatch.setattr(service_module, "select", MagicMock(return_value=_Select()))
|
||||
monkeypatch.setattr(service_module.redis_client, "get", MagicMock(return_value=None))
|
||||
monkeypatch.setattr(service_module.redis_client, "lock", MagicMock(return_value=lock))
|
||||
redis_set_mock = MagicMock()
|
||||
redis_delete_mock = MagicMock()
|
||||
monkeypatch.setattr(service_module.redis_client, "set", redis_set_mock)
|
||||
monkeypatch.setattr(service_module.redis_client, "delete", redis_delete_mock)
|
||||
monkeypatch.setattr(WebhookService, "generate_webhook_id", MagicMock(return_value="generated-webhook-id"))
|
||||
_patch_session(monkeypatch, fake_session)
|
||||
|
||||
WebhookService.sync_webhook_relationships(app, workflow)
|
||||
|
||||
assert len(fake_session.added) == 1
|
||||
assert len(fake_session.deleted) == 1
|
||||
redis_set_mock.assert_called_once()
|
||||
redis_delete_mock.assert_called_once()
|
||||
lock.release.assert_called_once()
|
||||
|
||||
def test_sync_webhook_relationships_should_log_when_lock_release_fails(
|
||||
self,
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
) -> None:
|
||||
app = _app(id="app-1", tenant_id="tenant-1", created_by="user-1")
|
||||
workflow = _workflow(walk_nodes=lambda _node_type: [])
|
||||
|
||||
class _Select:
|
||||
def where(self, *args: Any, **kwargs: Any) -> "_Select":
|
||||
return self
|
||||
|
||||
class _Session:
|
||||
def scalars(self, _stmt: Any) -> Any:
|
||||
return SimpleNamespace(all=lambda: [])
|
||||
|
||||
def commit(self) -> None:
|
||||
return None
|
||||
|
||||
lock = MagicMock()
|
||||
lock.acquire.return_value = True
|
||||
lock.release.side_effect = RuntimeError("release failed")
|
||||
|
||||
logger_exception_mock = MagicMock()
|
||||
|
||||
monkeypatch.setattr(service_module, "select", MagicMock(return_value=_Select()))
|
||||
monkeypatch.setattr(service_module.redis_client, "get", MagicMock(return_value=None))
|
||||
monkeypatch.setattr(service_module.redis_client, "lock", MagicMock(return_value=lock))
|
||||
monkeypatch.setattr(service_module.logger, "exception", logger_exception_mock)
|
||||
_patch_session(monkeypatch, _Session())
|
||||
|
||||
WebhookService.sync_webhook_relationships(app, workflow)
|
||||
|
||||
assert logger_exception_mock.call_count == 1
|
||||
|
||||
|
||||
class TestWebhookServiceUtilities:
|
||||
def test_generate_webhook_response_should_fallback_when_response_body_is_not_json(self) -> None:
|
||||
node_config = {"data": {"status_code": 200, "response_body": "{bad-json"}}
|
||||
|
||||
@ -0,0 +1,608 @@
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from repositories.workflow_collaboration_repository import WorkflowCollaborationRepository
|
||||
from services.workflow_collaboration_service import WorkflowCollaborationService
|
||||
|
||||
|
||||
class TestWorkflowCollaborationService:
|
||||
@pytest.fixture
|
||||
def service(self) -> tuple[WorkflowCollaborationService, Mock, Mock]:
|
||||
repository = Mock(spec=WorkflowCollaborationRepository)
|
||||
socketio = Mock()
|
||||
return WorkflowCollaborationService(repository, socketio), repository, socketio
|
||||
|
||||
def test_authorize_and_join_workflow_room_returns_leader_status(
|
||||
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
|
||||
) -> None:
|
||||
# Arrange
|
||||
collaboration_service, repository, socketio = service
|
||||
socketio.get_session.return_value = {
|
||||
"user_id": "u-1",
|
||||
"username": "Jane",
|
||||
"avatar": None,
|
||||
"tenant_id": "t-1",
|
||||
}
|
||||
|
||||
with (
|
||||
patch.object(collaboration_service, "_can_access_workflow", return_value=True),
|
||||
patch.object(collaboration_service, "get_or_set_leader", return_value="sid-1"),
|
||||
patch.object(collaboration_service, "broadcast_online_users"),
|
||||
):
|
||||
# Act
|
||||
result = collaboration_service.authorize_and_join_workflow_room("wf-1", "sid-1")
|
||||
|
||||
# Assert
|
||||
assert result == ("u-1", True)
|
||||
repository.set_session_info.assert_called_once()
|
||||
socketio.enter_room.assert_called_once_with("sid-1", "wf-1")
|
||||
socketio.emit.assert_called_once_with("status", {"isLeader": True}, room="sid-1")
|
||||
|
||||
def test_authorize_and_join_workflow_room_returns_none_when_missing_user(
|
||||
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
|
||||
) -> None:
|
||||
# Arrange
|
||||
collaboration_service, _repository, socketio = service
|
||||
socketio.get_session.return_value = {}
|
||||
|
||||
# Act
|
||||
result = collaboration_service.authorize_and_join_workflow_room("wf-1", "sid-1")
|
||||
|
||||
# Assert
|
||||
assert result is None
|
||||
|
||||
def test_authorize_and_join_workflow_room_returns_none_when_missing_tenant(
|
||||
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
|
||||
) -> None:
|
||||
collaboration_service, repository, socketio = service
|
||||
socketio.get_session.return_value = {"user_id": "u-1", "username": "Jane", "avatar": None}
|
||||
|
||||
result = collaboration_service.authorize_and_join_workflow_room("wf-1", "sid-1")
|
||||
|
||||
assert result is None
|
||||
repository.set_session_info.assert_not_called()
|
||||
socketio.enter_room.assert_not_called()
|
||||
socketio.emit.assert_not_called()
|
||||
|
||||
def test_authorize_and_join_workflow_room_returns_none_when_workflow_is_not_accessible(
|
||||
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
|
||||
) -> None:
|
||||
collaboration_service, repository, socketio = service
|
||||
socketio.get_session.return_value = {
|
||||
"user_id": "u-1",
|
||||
"username": "Jane",
|
||||
"avatar": None,
|
||||
"tenant_id": "t-1",
|
||||
}
|
||||
|
||||
with patch.object(collaboration_service, "_can_access_workflow", return_value=False):
|
||||
result = collaboration_service.authorize_and_join_workflow_room("wf-1", "sid-1")
|
||||
|
||||
assert result is None
|
||||
repository.set_session_info.assert_not_called()
|
||||
socketio.enter_room.assert_not_called()
|
||||
socketio.emit.assert_not_called()
|
||||
|
||||
def test_repr_and_save_socket_identity(self, service: tuple[WorkflowCollaborationService, Mock, Mock]) -> None:
|
||||
collaboration_service, _repository, socketio = service
|
||||
user = Mock()
|
||||
user.id = "u-1"
|
||||
user.name = "Jane"
|
||||
user.avatar = "avatar.png"
|
||||
user.current_tenant_id = "t-1"
|
||||
|
||||
assert "WorkflowCollaborationService" in repr(collaboration_service)
|
||||
|
||||
collaboration_service.save_socket_identity("sid-1", user)
|
||||
|
||||
socketio.save_session.assert_called_once_with(
|
||||
"sid-1",
|
||||
{"user_id": "u-1", "username": "Jane", "avatar": "avatar.png", "tenant_id": "t-1"},
|
||||
)
|
||||
|
||||
def test_can_access_workflow_uses_session_factory(
|
||||
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
|
||||
) -> None:
|
||||
collaboration_service, _repository, _socketio = service
|
||||
session = Mock()
|
||||
session.scalar.return_value = "wf-1"
|
||||
session_context = Mock()
|
||||
session_context.__enter__ = Mock(return_value=session)
|
||||
session_context.__exit__ = Mock(return_value=False)
|
||||
|
||||
with patch(
|
||||
"services.workflow_collaboration_service.session_factory.create_session",
|
||||
return_value=session_context,
|
||||
):
|
||||
result = collaboration_service._can_access_workflow("wf-1", "tenant-1")
|
||||
|
||||
assert result is True
|
||||
session.scalar.assert_called_once()
|
||||
|
||||
def test_relay_collaboration_event_unauthorized(
|
||||
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
|
||||
) -> None:
|
||||
# Arrange
|
||||
collaboration_service, repository, _socketio = service
|
||||
repository.get_sid_mapping.return_value = None
|
||||
|
||||
# Act
|
||||
result = collaboration_service.relay_collaboration_event("sid-1", {})
|
||||
|
||||
# Assert
|
||||
assert result == ({"msg": "unauthorized"}, 401)
|
||||
|
||||
def test_relay_collaboration_event_emits_update(
|
||||
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
|
||||
) -> None:
|
||||
# Arrange
|
||||
collaboration_service, repository, socketio = service
|
||||
repository.get_sid_mapping.return_value = {"workflow_id": "wf-1", "user_id": "u-1"}
|
||||
payload = {"type": "mouse_move", "data": {"x": 1}, "timestamp": 123}
|
||||
|
||||
# Act
|
||||
result = collaboration_service.relay_collaboration_event("sid-1", payload)
|
||||
|
||||
# Assert
|
||||
assert result == ({"msg": "event_broadcasted"}, 200)
|
||||
socketio.emit.assert_called_once_with(
|
||||
"collaboration_update",
|
||||
{"type": "mouse_move", "userId": "u-1", "data": {"x": 1}, "timestamp": 123},
|
||||
room="wf-1",
|
||||
skip_sid="sid-1",
|
||||
)
|
||||
|
||||
def test_relay_collaboration_event_requires_event_type(
|
||||
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
|
||||
) -> None:
|
||||
collaboration_service, repository, _socketio = service
|
||||
repository.get_sid_mapping.return_value = {"workflow_id": "wf-1", "user_id": "u-1"}
|
||||
|
||||
result = collaboration_service.relay_collaboration_event("sid-1", {"data": {"x": 1}})
|
||||
|
||||
assert result == ({"msg": "invalid event type"}, 400)
|
||||
|
||||
def test_relay_collaboration_event_sync_request_forwards_to_active_leader(
|
||||
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
|
||||
) -> None:
|
||||
collaboration_service, repository, socketio = service
|
||||
repository.get_sid_mapping.return_value = {"workflow_id": "wf-1", "user_id": "u-1"}
|
||||
repository.get_current_leader.return_value = "sid-leader"
|
||||
payload = {"type": "sync_request", "data": {"reason": "join"}, "timestamp": 123}
|
||||
|
||||
with (
|
||||
patch.object(collaboration_service, "refresh_session_state"),
|
||||
patch.object(collaboration_service, "is_session_active", return_value=True),
|
||||
):
|
||||
result = collaboration_service.relay_collaboration_event("sid-1", payload)
|
||||
|
||||
assert result == ({"msg": "sync_request_forwarded"}, 200)
|
||||
socketio.emit.assert_called_once_with(
|
||||
"collaboration_update",
|
||||
{"type": "sync_request", "userId": "u-1", "data": {"reason": "join"}, "timestamp": 123},
|
||||
room="sid-leader",
|
||||
)
|
||||
repository.set_leader.assert_not_called()
|
||||
|
||||
def test_relay_collaboration_event_sync_request_reelects_active_leader(
|
||||
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
|
||||
) -> None:
|
||||
collaboration_service, repository, socketio = service
|
||||
repository.get_sid_mapping.return_value = {"workflow_id": "wf-1", "user_id": "u-1"}
|
||||
repository.get_current_leader.return_value = "sid-old"
|
||||
repository.list_sessions.return_value = [
|
||||
{
|
||||
"user_id": "u-2",
|
||||
"username": "B",
|
||||
"avatar": None,
|
||||
"sid": "sid-2",
|
||||
"connected_at": 1,
|
||||
"graph_active": True,
|
||||
},
|
||||
{
|
||||
"user_id": "u-3",
|
||||
"username": "C",
|
||||
"avatar": None,
|
||||
"sid": "sid-3",
|
||||
"connected_at": 2,
|
||||
"graph_active": True,
|
||||
},
|
||||
]
|
||||
payload = {"type": "sync_request", "data": {"reason": "join"}, "timestamp": 123}
|
||||
|
||||
def _is_session_active(_workflow_id: str, session_sid: str) -> bool:
|
||||
return session_sid != "sid-old"
|
||||
|
||||
with (
|
||||
patch.object(collaboration_service, "refresh_session_state"),
|
||||
patch.object(collaboration_service, "broadcast_leader_change") as broadcast_leader_change,
|
||||
patch.object(collaboration_service, "is_session_active", side_effect=_is_session_active),
|
||||
):
|
||||
result = collaboration_service.relay_collaboration_event("sid-2", payload)
|
||||
|
||||
assert result == ({"msg": "sync_request_forwarded"}, 200)
|
||||
repository.delete_leader.assert_called_once_with("wf-1")
|
||||
repository.set_leader.assert_called_once_with("wf-1", "sid-2")
|
||||
broadcast_leader_change.assert_called_once_with("wf-1", "sid-2")
|
||||
socketio.emit.assert_called_once_with(
|
||||
"collaboration_update",
|
||||
{"type": "sync_request", "userId": "u-1", "data": {"reason": "join"}, "timestamp": 123},
|
||||
room="sid-2",
|
||||
)
|
||||
|
||||
def test_relay_collaboration_event_sync_request_returns_when_no_active_leader(
|
||||
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
|
||||
) -> None:
|
||||
collaboration_service, repository, socketio = service
|
||||
repository.get_sid_mapping.return_value = {"workflow_id": "wf-1", "user_id": "u-1"}
|
||||
repository.get_current_leader.return_value = "sid-old"
|
||||
repository.list_sessions.return_value = []
|
||||
payload = {"type": "sync_request", "data": {"reason": "join"}, "timestamp": 123}
|
||||
|
||||
with (
|
||||
patch.object(collaboration_service, "refresh_session_state"),
|
||||
patch.object(collaboration_service, "is_session_active", return_value=False),
|
||||
):
|
||||
result = collaboration_service.relay_collaboration_event("sid-2", payload)
|
||||
|
||||
assert result == ({"msg": "no_active_leader"}, 200)
|
||||
repository.delete_leader.assert_called_once_with("wf-1")
|
||||
socketio.emit.assert_not_called()
|
||||
|
||||
def test_relay_graph_event_unauthorized(self, service: tuple[WorkflowCollaborationService, Mock, Mock]) -> None:
|
||||
# Arrange
|
||||
collaboration_service, repository, _socketio = service
|
||||
repository.get_sid_mapping.return_value = None
|
||||
|
||||
# Act
|
||||
result = collaboration_service.relay_graph_event("sid-1", {"nodes": []})
|
||||
|
||||
# Assert
|
||||
assert result == ({"msg": "unauthorized"}, 401)
|
||||
|
||||
def test_disconnect_session_no_mapping(self, service: tuple[WorkflowCollaborationService, Mock, Mock]) -> None:
|
||||
# Arrange
|
||||
collaboration_service, repository, _socketio = service
|
||||
repository.get_sid_mapping.return_value = None
|
||||
|
||||
# Act
|
||||
collaboration_service.disconnect_session("sid-1")
|
||||
|
||||
# Assert
|
||||
repository.delete_session.assert_not_called()
|
||||
|
||||
def test_disconnect_session_cleans_up(self, service: tuple[WorkflowCollaborationService, Mock, Mock]) -> None:
|
||||
# Arrange
|
||||
collaboration_service, repository, _socketio = service
|
||||
repository.get_sid_mapping.return_value = {"workflow_id": "wf-1", "user_id": "u-1"}
|
||||
|
||||
with (
|
||||
patch.object(collaboration_service, "handle_leader_disconnect") as handle_leader_disconnect,
|
||||
patch.object(collaboration_service, "broadcast_online_users") as broadcast_online_users,
|
||||
):
|
||||
# Act
|
||||
collaboration_service.disconnect_session("sid-1")
|
||||
|
||||
# Assert
|
||||
repository.delete_session.assert_called_once_with("wf-1", "sid-1")
|
||||
handle_leader_disconnect.assert_called_once_with("wf-1", "sid-1")
|
||||
broadcast_online_users.assert_called_once_with("wf-1")
|
||||
|
||||
def test_get_or_set_leader_returns_active_leader(
|
||||
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
|
||||
) -> None:
|
||||
# Arrange
|
||||
collaboration_service, repository, _socketio = service
|
||||
repository.get_current_leader.return_value = "sid-1"
|
||||
|
||||
with patch.object(collaboration_service, "is_session_active", return_value=True):
|
||||
# Act
|
||||
result = collaboration_service.get_or_set_leader("wf-1", "sid-2")
|
||||
|
||||
# Assert
|
||||
assert result == "sid-1"
|
||||
repository.set_leader_if_absent.assert_not_called()
|
||||
|
||||
def test_get_or_set_leader_replaces_dead_leader(
|
||||
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
|
||||
) -> None:
|
||||
# Arrange
|
||||
collaboration_service, repository, _socketio = service
|
||||
repository.get_current_leader.return_value = "sid-1"
|
||||
repository.set_leader_if_absent.return_value = True
|
||||
repository.list_sessions.return_value = [
|
||||
{
|
||||
"user_id": "u-2",
|
||||
"username": "B",
|
||||
"avatar": None,
|
||||
"sid": "sid-2",
|
||||
"connected_at": 1,
|
||||
"graph_active": True,
|
||||
}
|
||||
]
|
||||
|
||||
with (
|
||||
patch.object(collaboration_service, "is_session_active", side_effect=lambda _wf, sid: sid != "sid-1"),
|
||||
patch.object(collaboration_service, "broadcast_leader_change") as broadcast_leader_change,
|
||||
):
|
||||
# Act
|
||||
result = collaboration_service.get_or_set_leader("wf-1", "sid-2")
|
||||
|
||||
# Assert
|
||||
assert result == "sid-2"
|
||||
repository.delete_session.assert_called_once_with("wf-1", "sid-1")
|
||||
repository.delete_leader.assert_called_once_with("wf-1")
|
||||
broadcast_leader_change.assert_called_once_with("wf-1", "sid-2")
|
||||
|
||||
def test_get_or_set_leader_falls_back_to_existing(
|
||||
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
|
||||
) -> None:
|
||||
# Arrange
|
||||
collaboration_service, repository, _socketio = service
|
||||
repository.get_current_leader.side_effect = [None, "sid-3"]
|
||||
repository.set_leader_if_absent.return_value = False
|
||||
repository.list_sessions.return_value = [
|
||||
{
|
||||
"user_id": "u-2",
|
||||
"username": "B",
|
||||
"avatar": None,
|
||||
"sid": "sid-2",
|
||||
"connected_at": 1,
|
||||
"graph_active": True,
|
||||
}
|
||||
]
|
||||
|
||||
# Act
|
||||
result = collaboration_service.get_or_set_leader("wf-1", "sid-2")
|
||||
|
||||
# Assert
|
||||
assert result == "sid-3"
|
||||
|
||||
def test_get_or_set_leader_returns_sid_when_leader_still_missing(
|
||||
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
|
||||
) -> None:
|
||||
collaboration_service, repository, _socketio = service
|
||||
repository.get_current_leader.side_effect = [None, None]
|
||||
repository.set_leader_if_absent.return_value = False
|
||||
|
||||
result = collaboration_service.get_or_set_leader("wf-1", "sid-2")
|
||||
|
||||
assert result == "sid-2"
|
||||
|
||||
def test_handle_leader_disconnect_elects_new(
|
||||
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
|
||||
) -> None:
|
||||
# Arrange
|
||||
collaboration_service, repository, _socketio = service
|
||||
repository.get_current_leader.return_value = "sid-1"
|
||||
repository.list_sessions.return_value = [
|
||||
{
|
||||
"user_id": "u-2",
|
||||
"username": "B",
|
||||
"avatar": None,
|
||||
"sid": "sid-2",
|
||||
"connected_at": 1,
|
||||
"graph_active": True,
|
||||
}
|
||||
]
|
||||
|
||||
with (
|
||||
patch.object(collaboration_service, "is_session_active", return_value=True),
|
||||
patch.object(collaboration_service, "broadcast_leader_change") as broadcast_leader_change,
|
||||
):
|
||||
# Act
|
||||
collaboration_service.handle_leader_disconnect("wf-1", "sid-1")
|
||||
|
||||
# Assert
|
||||
repository.set_leader.assert_called_once_with("wf-1", "sid-2")
|
||||
broadcast_leader_change.assert_called_once_with("wf-1", "sid-2")
|
||||
|
||||
def test_handle_leader_disconnect_clears_when_empty(
|
||||
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
|
||||
) -> None:
|
||||
# Arrange
|
||||
collaboration_service, repository, _socketio = service
|
||||
repository.get_current_leader.return_value = "sid-1"
|
||||
repository.list_sessions.return_value = []
|
||||
|
||||
# Act
|
||||
collaboration_service.handle_leader_disconnect("wf-1", "sid-1")
|
||||
|
||||
# Assert
|
||||
repository.delete_leader.assert_called_once_with("wf-1")
|
||||
|
||||
def test_handle_leader_disconnect_ignores_non_leader_or_missing_leader(
|
||||
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
|
||||
) -> None:
|
||||
collaboration_service, repository, _socketio = service
|
||||
|
||||
repository.get_current_leader.return_value = None
|
||||
collaboration_service.handle_leader_disconnect("wf-1", "sid-1")
|
||||
|
||||
repository.get_current_leader.return_value = "sid-leader"
|
||||
collaboration_service.handle_leader_disconnect("wf-1", "sid-other")
|
||||
|
||||
repository.set_leader.assert_not_called()
|
||||
repository.delete_leader.assert_not_called()
|
||||
|
||||
def test_broadcast_leader_change_logs_emit_errors(
|
||||
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
|
||||
) -> None:
|
||||
collaboration_service, repository, socketio = service
|
||||
repository.get_session_sids.return_value = ["sid-1", "sid-2"]
|
||||
socketio.emit.side_effect = [RuntimeError("boom"), None]
|
||||
|
||||
with patch("services.workflow_collaboration_service.logging.exception") as exception_mock:
|
||||
collaboration_service.broadcast_leader_change("wf-1", "sid-2")
|
||||
|
||||
assert exception_mock.call_count == 1
|
||||
|
||||
def test_broadcast_online_users_sorts_and_emits(
|
||||
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
|
||||
) -> None:
|
||||
# Arrange
|
||||
collaboration_service, repository, socketio = service
|
||||
repository.list_sessions.return_value = [
|
||||
{"user_id": "u-1", "username": "A", "avatar": None, "sid": "sid-1", "connected_at": 3},
|
||||
{"user_id": "u-2", "username": "B", "avatar": None, "sid": "sid-2", "connected_at": 1},
|
||||
]
|
||||
repository.get_current_leader.return_value = "sid-1"
|
||||
|
||||
with patch.object(collaboration_service, "is_session_active", return_value=True):
|
||||
# Act
|
||||
collaboration_service.broadcast_online_users("wf-1")
|
||||
|
||||
# Assert
|
||||
socketio.emit.assert_called_once_with(
|
||||
"online_users",
|
||||
{
|
||||
"workflow_id": "wf-1",
|
||||
"users": [
|
||||
{"user_id": "u-2", "username": "B", "avatar": None, "sid": "sid-2", "connected_at": 1},
|
||||
{"user_id": "u-1", "username": "A", "avatar": None, "sid": "sid-1", "connected_at": 3},
|
||||
],
|
||||
"leader": "sid-1",
|
||||
},
|
||||
room="wf-1",
|
||||
)
|
||||
|
||||
def test_broadcast_online_users_reassigns_missing_leader(
|
||||
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
|
||||
) -> None:
|
||||
collaboration_service, repository, socketio = service
|
||||
users = [{"user_id": "u-2", "username": "B", "avatar": None, "sid": "sid-2", "connected_at": 1}]
|
||||
repository.get_current_leader.return_value = "sid-old"
|
||||
|
||||
with (
|
||||
patch.object(collaboration_service, "_prune_inactive_sessions", return_value=users),
|
||||
patch.object(collaboration_service, "_select_graph_leader", return_value="sid-2"),
|
||||
patch.object(collaboration_service, "broadcast_leader_change") as broadcast_leader_change,
|
||||
):
|
||||
collaboration_service.broadcast_online_users("wf-1")
|
||||
|
||||
repository.delete_leader.assert_called_once_with("wf-1")
|
||||
repository.set_leader.assert_called_once_with("wf-1", "sid-2")
|
||||
broadcast_leader_change.assert_called_once_with("wf-1", "sid-2")
|
||||
socketio.emit.assert_called_once_with(
|
||||
"online_users",
|
||||
{"workflow_id": "wf-1", "users": users, "leader": "sid-2"},
|
||||
room="wf-1",
|
||||
)
|
||||
|
||||
def test_refresh_session_state_expires_active_leader(
|
||||
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
|
||||
) -> None:
|
||||
# Arrange
|
||||
collaboration_service, repository, _socketio = service
|
||||
repository.get_current_leader.return_value = "sid-1"
|
||||
|
||||
with patch.object(collaboration_service, "is_session_active", return_value=True):
|
||||
# Act
|
||||
collaboration_service.refresh_session_state("wf-1", "sid-1")
|
||||
|
||||
# Assert
|
||||
repository.refresh_session_state.assert_called_once_with("wf-1", "sid-1")
|
||||
repository.expire_leader.assert_called_once_with("wf-1")
|
||||
repository.set_leader.assert_not_called()
|
||||
|
||||
def test_refresh_session_state_sets_leader_when_missing(
|
||||
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
|
||||
) -> None:
|
||||
# Arrange
|
||||
collaboration_service, repository, _socketio = service
|
||||
repository.get_current_leader.return_value = None
|
||||
repository.list_sessions.return_value = [
|
||||
{
|
||||
"user_id": "u-2",
|
||||
"username": "B",
|
||||
"avatar": None,
|
||||
"sid": "sid-2",
|
||||
"connected_at": 1,
|
||||
"graph_active": True,
|
||||
}
|
||||
]
|
||||
|
||||
with (
|
||||
patch.object(collaboration_service, "is_session_active", return_value=True),
|
||||
patch.object(collaboration_service, "broadcast_leader_change") as broadcast_leader_change,
|
||||
):
|
||||
# Act
|
||||
collaboration_service.refresh_session_state("wf-1", "sid-2")
|
||||
|
||||
# Assert
|
||||
repository.set_leader.assert_called_once_with("wf-1", "sid-2")
|
||||
broadcast_leader_change.assert_called_once_with("wf-1", "sid-2")
|
||||
|
||||
def test_refresh_session_state_replaces_inactive_existing_leader(
|
||||
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
|
||||
) -> None:
|
||||
collaboration_service, repository, _socketio = service
|
||||
repository.get_current_leader.return_value = "sid-old"
|
||||
|
||||
with (
|
||||
patch.object(collaboration_service, "is_session_active", return_value=False),
|
||||
patch.object(collaboration_service, "broadcast_leader_change") as broadcast_leader_change,
|
||||
):
|
||||
collaboration_service.refresh_session_state("wf-1", "sid-new")
|
||||
|
||||
repository.delete_leader.assert_called_once_with("wf-1")
|
||||
repository.set_leader.assert_called_once_with("wf-1", "sid-new")
|
||||
broadcast_leader_change.assert_called_once_with("wf-1", "sid-new")
|
||||
|
||||
def test_relay_graph_event_emits_update(self, service: tuple[WorkflowCollaborationService, Mock, Mock]) -> None:
|
||||
# Arrange
|
||||
collaboration_service, repository, socketio = service
|
||||
repository.get_sid_mapping.return_value = {"workflow_id": "wf-1", "user_id": "u-1"}
|
||||
|
||||
# Act
|
||||
result = collaboration_service.relay_graph_event("sid-1", {"nodes": []})
|
||||
|
||||
# Assert
|
||||
assert result == ({"msg": "graph_update_broadcasted"}, 200)
|
||||
repository.refresh_session_state.assert_called_once_with("wf-1", "sid-1")
|
||||
socketio.emit.assert_called_once_with("graph_update", {"nodes": []}, room="wf-1", skip_sid="sid-1")
|
||||
|
||||
def test_prune_inactive_sessions_handles_empty_and_removes_stale(
|
||||
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
|
||||
) -> None:
|
||||
collaboration_service, repository, _socketio = service
|
||||
repository.list_sessions.return_value = []
|
||||
assert collaboration_service._prune_inactive_sessions("wf-1") == []
|
||||
|
||||
active = {"sid": "sid-1", "user_id": "u-1", "connected_at": 1}
|
||||
stale = {"sid": "sid-2", "user_id": "u-2", "connected_at": 2}
|
||||
repository.list_sessions.return_value = [active, stale]
|
||||
|
||||
with patch.object(
|
||||
collaboration_service,
|
||||
"is_session_active",
|
||||
side_effect=lambda _workflow_id, sid: sid == "sid-1",
|
||||
):
|
||||
users = collaboration_service._prune_inactive_sessions("wf-1")
|
||||
|
||||
assert users == [active]
|
||||
repository.delete_session.assert_called_with("wf-1", "sid-2")
|
||||
|
||||
def test_is_session_active_guard_branches(self, service: tuple[WorkflowCollaborationService, Mock, Mock]) -> None:
|
||||
collaboration_service, repository, socketio = service
|
||||
socketio.manager.is_connected.return_value = True
|
||||
repository.session_exists.return_value = True
|
||||
repository.sid_mapping_exists.return_value = True
|
||||
|
||||
assert collaboration_service.is_session_active("wf-1", "") is False
|
||||
|
||||
socketio.manager.is_connected.return_value = False
|
||||
assert collaboration_service.is_session_active("wf-1", "sid-1") is False
|
||||
|
||||
socketio.manager.is_connected.side_effect = AttributeError("missing manager")
|
||||
assert collaboration_service.is_session_active("wf-1", "sid-1") is False
|
||||
socketio.manager.is_connected.side_effect = None
|
||||
|
||||
socketio.manager.is_connected.return_value = True
|
||||
repository.session_exists.return_value = False
|
||||
assert collaboration_service.is_session_active("wf-1", "sid-1") is False
|
||||
|
||||
repository.session_exists.return_value = True
|
||||
repository.sid_mapping_exists.return_value = False
|
||||
assert collaboration_service.is_session_active("wf-1", "sid-1") is False
|
||||
578
api/tests/unit_tests/services/test_workflow_comment_service.py
Normal file
578
api/tests/unit_tests/services/test_workflow_comment_service.py
Normal file
@ -0,0 +1,578 @@
|
||||
from unittest.mock import MagicMock, Mock, patch
|
||||
|
||||
import pytest
|
||||
from werkzeug.exceptions import Forbidden, NotFound
|
||||
|
||||
from services import workflow_comment_service as service_module
|
||||
from services.workflow_comment_service import WorkflowCommentService
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_session(monkeypatch: pytest.MonkeyPatch) -> Mock:
|
||||
session = Mock()
|
||||
context_manager = MagicMock()
|
||||
context_manager.__enter__.return_value = session
|
||||
context_manager.__exit__.return_value = False
|
||||
mock_db = MagicMock()
|
||||
mock_db.engine = Mock()
|
||||
empty_scalars = Mock()
|
||||
empty_scalars.all.return_value = []
|
||||
session.scalars.return_value = empty_scalars
|
||||
monkeypatch.setattr(service_module, "Session", Mock(return_value=context_manager))
|
||||
monkeypatch.setattr(service_module, "db", mock_db)
|
||||
monkeypatch.setattr(service_module.send_workflow_comment_mention_email_task, "delay", Mock())
|
||||
return session
|
||||
|
||||
|
||||
def _mock_scalars(result_list: list[object]) -> Mock:
|
||||
scalars = Mock()
|
||||
scalars.all.return_value = result_list
|
||||
return scalars
|
||||
|
||||
|
||||
class TestWorkflowCommentService:
|
||||
def test_validate_content_rejects_empty(self) -> None:
|
||||
with pytest.raises(ValueError):
|
||||
WorkflowCommentService._validate_content(" ")
|
||||
|
||||
def test_validate_content_rejects_too_long(self) -> None:
|
||||
with pytest.raises(ValueError):
|
||||
WorkflowCommentService._validate_content("a" * 1001)
|
||||
|
||||
def test_filter_valid_mentioned_user_ids_filters_by_tenant_and_preserves_order(self, mock_session: Mock) -> None:
|
||||
tenant_member_1 = "123e4567-e89b-12d3-a456-426614174000"
|
||||
tenant_member_2 = "123e4567-e89b-12d3-a456-426614174002"
|
||||
non_tenant_member = "123e4567-e89b-12d3-a456-426614174001"
|
||||
mock_session.scalars.return_value = _mock_scalars([tenant_member_1, tenant_member_2])
|
||||
|
||||
result = WorkflowCommentService._filter_valid_mentioned_user_ids(
|
||||
[
|
||||
tenant_member_1,
|
||||
"",
|
||||
123, # type: ignore[list-item]
|
||||
tenant_member_1,
|
||||
non_tenant_member,
|
||||
tenant_member_2,
|
||||
],
|
||||
session=mock_session,
|
||||
tenant_id="tenant-1",
|
||||
)
|
||||
|
||||
assert result == [
|
||||
tenant_member_1,
|
||||
tenant_member_2,
|
||||
]
|
||||
|
||||
def test_format_comment_excerpt_handles_short_and_long_limits(self) -> None:
|
||||
assert WorkflowCommentService._format_comment_excerpt(" hello ", max_length=10) == "hello"
|
||||
assert WorkflowCommentService._format_comment_excerpt("abcdefghijk", max_length=3) == "abc"
|
||||
assert WorkflowCommentService._format_comment_excerpt(" abcdefghijk ", max_length=8) == "abcde..."
|
||||
|
||||
def test_build_mention_email_payloads_returns_empty_for_no_candidates(self, mock_session: Mock) -> None:
|
||||
assert (
|
||||
WorkflowCommentService._build_mention_email_payloads(
|
||||
session=mock_session,
|
||||
tenant_id="tenant-1",
|
||||
app_id="app-1",
|
||||
mentioner_id="user-1",
|
||||
mentioned_user_ids=[],
|
||||
content="hello",
|
||||
)
|
||||
== []
|
||||
)
|
||||
assert (
|
||||
WorkflowCommentService._build_mention_email_payloads(
|
||||
session=mock_session,
|
||||
tenant_id="tenant-1",
|
||||
app_id="app-1",
|
||||
mentioner_id="user-1",
|
||||
mentioned_user_ids=["user-1"],
|
||||
content="hello",
|
||||
)
|
||||
== []
|
||||
)
|
||||
|
||||
def test_dispatch_mention_emails_enqueues_each_payload(self) -> None:
|
||||
delay_mock = Mock()
|
||||
with patch.object(service_module.send_workflow_comment_mention_email_task, "delay", delay_mock):
|
||||
WorkflowCommentService._dispatch_mention_emails(
|
||||
[
|
||||
{"to": "a@example.com"},
|
||||
{"to": "b@example.com"},
|
||||
]
|
||||
)
|
||||
|
||||
assert delay_mock.call_count == 2
|
||||
|
||||
def test_build_mention_email_payloads_skips_accounts_without_email(self, mock_session: Mock) -> None:
|
||||
account_without_email = Mock()
|
||||
account_without_email.email = None
|
||||
account_without_email.name = "No Email"
|
||||
account_without_email.interface_language = "en-US"
|
||||
|
||||
account_with_email = Mock()
|
||||
account_with_email.email = "user@example.com"
|
||||
account_with_email.name = ""
|
||||
account_with_email.interface_language = None
|
||||
|
||||
mock_session.scalar.side_effect = ["My App", "Commenter"]
|
||||
mock_session.scalars.return_value = _mock_scalars([account_without_email, account_with_email])
|
||||
|
||||
payloads = WorkflowCommentService._build_mention_email_payloads(
|
||||
session=mock_session,
|
||||
tenant_id="tenant-1",
|
||||
app_id="app-1",
|
||||
mentioner_id="user-1",
|
||||
mentioned_user_ids=["user-2"],
|
||||
content="hello",
|
||||
)
|
||||
expected_app_url = f"{service_module.dify_config.CONSOLE_WEB_URL.rstrip('/')}/app/app-1/workflow"
|
||||
|
||||
assert payloads == [
|
||||
{
|
||||
"language": "en-US",
|
||||
"to": "user@example.com",
|
||||
"mentioned_name": "user@example.com",
|
||||
"commenter_name": "Commenter",
|
||||
"app_name": "My App",
|
||||
"comment_content": "hello",
|
||||
"app_url": expected_app_url,
|
||||
}
|
||||
]
|
||||
|
||||
def test_create_comment_creates_mentions(self, mock_session: Mock) -> None:
|
||||
comment = Mock()
|
||||
comment.id = "comment-1"
|
||||
comment.created_at = "ts"
|
||||
|
||||
with (
|
||||
patch.object(service_module, "WorkflowComment", return_value=comment),
|
||||
patch.object(service_module, "WorkflowCommentMention", return_value=Mock()),
|
||||
patch.object(WorkflowCommentService, "_filter_valid_mentioned_user_ids", return_value=["user-2"]),
|
||||
):
|
||||
result = WorkflowCommentService.create_comment(
|
||||
tenant_id="tenant-1",
|
||||
app_id="app-1",
|
||||
created_by="user-1",
|
||||
content="hello",
|
||||
position_x=1.0,
|
||||
position_y=2.0,
|
||||
mentioned_user_ids=["user-2", "bad-id"],
|
||||
)
|
||||
|
||||
assert result == {"id": "comment-1", "created_at": "ts"}
|
||||
assert mock_session.add.call_args_list[0].args[0] is comment
|
||||
assert mock_session.add.call_count == 2
|
||||
mock_session.commit.assert_called_once()
|
||||
|
||||
def test_update_comment_raises_not_found(self, mock_session: Mock) -> None:
|
||||
mock_session.scalar.return_value = None
|
||||
|
||||
with pytest.raises(NotFound):
|
||||
WorkflowCommentService.update_comment(
|
||||
tenant_id="tenant-1",
|
||||
app_id="app-1",
|
||||
comment_id="comment-1",
|
||||
user_id="user-1",
|
||||
content="hello",
|
||||
)
|
||||
|
||||
def test_update_comment_raises_forbidden(self, mock_session: Mock) -> None:
|
||||
comment = Mock()
|
||||
comment.created_by = "owner"
|
||||
mock_session.scalar.return_value = comment
|
||||
|
||||
with pytest.raises(Forbidden):
|
||||
WorkflowCommentService.update_comment(
|
||||
tenant_id="tenant-1",
|
||||
app_id="app-1",
|
||||
comment_id="comment-1",
|
||||
user_id="intruder",
|
||||
content="hello",
|
||||
)
|
||||
|
||||
def test_update_comment_replaces_mentions(self, mock_session: Mock) -> None:
|
||||
comment = Mock()
|
||||
comment.id = "comment-1"
|
||||
comment.created_by = "owner"
|
||||
mock_session.scalar.return_value = comment
|
||||
|
||||
existing_mentions = [Mock(), Mock()]
|
||||
mock_session.scalars.return_value = _mock_scalars(existing_mentions)
|
||||
|
||||
with patch.object(WorkflowCommentService, "_filter_valid_mentioned_user_ids", return_value=["user-2"]):
|
||||
result = WorkflowCommentService.update_comment(
|
||||
tenant_id="tenant-1",
|
||||
app_id="app-1",
|
||||
comment_id="comment-1",
|
||||
user_id="owner",
|
||||
content="updated",
|
||||
mentioned_user_ids=["user-2", "bad-id"],
|
||||
)
|
||||
|
||||
assert result == {"id": "comment-1", "updated_at": comment.updated_at}
|
||||
assert mock_session.delete.call_count == 2
|
||||
assert mock_session.add.call_count == 1
|
||||
mock_session.commit.assert_called_once()
|
||||
|
||||
def test_update_comment_preserves_mentions_when_mentioned_user_ids_omitted(self, mock_session: Mock) -> None:
|
||||
comment = Mock()
|
||||
comment.id = "comment-1"
|
||||
comment.created_by = "owner"
|
||||
mock_session.scalar.return_value = comment
|
||||
|
||||
with (
|
||||
patch.object(WorkflowCommentService, "_filter_valid_mentioned_user_ids") as filter_mentions_mock,
|
||||
patch.object(WorkflowCommentService, "_build_mention_email_payloads") as build_payloads_mock,
|
||||
patch.object(WorkflowCommentService, "_dispatch_mention_emails") as dispatch_mock,
|
||||
):
|
||||
result = WorkflowCommentService.update_comment(
|
||||
tenant_id="tenant-1",
|
||||
app_id="app-1",
|
||||
comment_id="comment-1",
|
||||
user_id="owner",
|
||||
content="updated",
|
||||
)
|
||||
|
||||
assert result == {"id": "comment-1", "updated_at": comment.updated_at}
|
||||
mock_session.delete.assert_not_called()
|
||||
mock_session.add.assert_not_called()
|
||||
filter_mentions_mock.assert_not_called()
|
||||
build_payloads_mock.assert_not_called()
|
||||
dispatch_mock.assert_called_once_with([])
|
||||
mock_session.commit.assert_called_once()
|
||||
|
||||
def test_update_comment_clears_mentions_when_empty_list_provided(self, mock_session: Mock) -> None:
|
||||
comment = Mock()
|
||||
comment.id = "comment-1"
|
||||
comment.created_by = "owner"
|
||||
mock_session.scalar.return_value = comment
|
||||
|
||||
existing_mentions = [Mock(), Mock()]
|
||||
mock_session.scalars.return_value = _mock_scalars(existing_mentions)
|
||||
|
||||
with patch.object(WorkflowCommentService, "_filter_valid_mentioned_user_ids", return_value=[]):
|
||||
result = WorkflowCommentService.update_comment(
|
||||
tenant_id="tenant-1",
|
||||
app_id="app-1",
|
||||
comment_id="comment-1",
|
||||
user_id="owner",
|
||||
content="updated",
|
||||
mentioned_user_ids=[],
|
||||
)
|
||||
|
||||
assert result == {"id": "comment-1", "updated_at": comment.updated_at}
|
||||
assert mock_session.delete.call_count == 2
|
||||
mock_session.add.assert_not_called()
|
||||
mock_session.commit.assert_called_once()
|
||||
|
||||
def test_update_comment_notifies_only_new_mentions(self, mock_session: Mock) -> None:
|
||||
comment = Mock()
|
||||
comment.id = "comment-1"
|
||||
comment.created_by = "owner"
|
||||
mock_session.scalar.return_value = comment
|
||||
|
||||
existing_mention = Mock()
|
||||
existing_mention.mentioned_user_id = "user-2"
|
||||
mock_session.scalars.return_value = _mock_scalars([existing_mention])
|
||||
|
||||
with (
|
||||
patch.object(
|
||||
WorkflowCommentService,
|
||||
"_filter_valid_mentioned_user_ids",
|
||||
return_value=["user-2", "user-3"],
|
||||
),
|
||||
patch.object(
|
||||
WorkflowCommentService,
|
||||
"_build_mention_email_payloads",
|
||||
return_value=[],
|
||||
) as build_payloads_mock,
|
||||
patch.object(WorkflowCommentService, "_dispatch_mention_emails") as dispatch_mock,
|
||||
):
|
||||
WorkflowCommentService.update_comment(
|
||||
tenant_id="tenant-1",
|
||||
app_id="app-1",
|
||||
comment_id="comment-1",
|
||||
user_id="owner",
|
||||
content="updated",
|
||||
mentioned_user_ids=["user-2", "user-3"],
|
||||
)
|
||||
|
||||
assert build_payloads_mock.call_args.kwargs["mentioned_user_ids"] == ["user-3"]
|
||||
dispatch_mock.assert_called_once_with([])
|
||||
|
||||
def test_get_comments_preloads_related_accounts(self, mock_session: Mock) -> None:
|
||||
comment = Mock()
|
||||
comment.created_by = "user-1"
|
||||
comment.resolved_by = "user-2"
|
||||
reply = Mock()
|
||||
reply.created_by = "user-3"
|
||||
mention = Mock()
|
||||
mention.mentioned_user_id = "user-4"
|
||||
comment.replies = [reply]
|
||||
comment.mentions = [mention]
|
||||
comment.cache_created_by_account = Mock()
|
||||
comment.cache_resolved_by_account = Mock()
|
||||
reply.cache_created_by_account = Mock()
|
||||
mention.cache_mentioned_user_account = Mock()
|
||||
|
||||
account_1 = Mock()
|
||||
account_1.id = "user-1"
|
||||
account_2 = Mock()
|
||||
account_2.id = "user-2"
|
||||
account_3 = Mock()
|
||||
account_3.id = "user-3"
|
||||
account_4 = Mock()
|
||||
account_4.id = "user-4"
|
||||
|
||||
mock_session.scalars.side_effect = [
|
||||
_mock_scalars([comment]),
|
||||
_mock_scalars([account_1, account_2, account_3, account_4]),
|
||||
]
|
||||
|
||||
result = WorkflowCommentService.get_comments("tenant-1", "app-1")
|
||||
|
||||
assert result == [comment]
|
||||
comment.cache_created_by_account.assert_called_once_with(account_1)
|
||||
comment.cache_resolved_by_account.assert_called_once_with(account_2)
|
||||
reply.cache_created_by_account.assert_called_once_with(account_3)
|
||||
mention.cache_mentioned_user_account.assert_called_once_with(account_4)
|
||||
|
||||
def test_preload_accounts_returns_early_for_empty_comments(self, mock_session: Mock) -> None:
|
||||
WorkflowCommentService._preload_accounts(mock_session, [])
|
||||
|
||||
mock_session.scalars.assert_not_called()
|
||||
|
||||
def test_get_comment_raises_not_found_with_provided_session(self) -> None:
|
||||
session = Mock()
|
||||
session.scalar.return_value = None
|
||||
|
||||
with pytest.raises(NotFound):
|
||||
WorkflowCommentService.get_comment("tenant-1", "app-1", "comment-1", session=session)
|
||||
|
||||
def test_get_comment_uses_context_manager_when_session_not_provided(self, mock_session: Mock) -> None:
|
||||
comment = Mock()
|
||||
comment.created_by = "user-1"
|
||||
comment.resolved_by = None
|
||||
comment.replies = []
|
||||
comment.mentions = []
|
||||
comment.cache_created_by_account = Mock()
|
||||
comment.cache_resolved_by_account = Mock()
|
||||
mock_session.scalar.return_value = comment
|
||||
mock_session.scalars.return_value = _mock_scalars([])
|
||||
|
||||
result = WorkflowCommentService.get_comment("tenant-1", "app-1", "comment-1")
|
||||
|
||||
assert result is comment
|
||||
comment.cache_created_by_account.assert_called_once()
|
||||
comment.cache_resolved_by_account.assert_called_once_with(None)
|
||||
|
||||
def test_delete_comment_raises_forbidden(self, mock_session: Mock) -> None:
|
||||
comment = Mock()
|
||||
comment.created_by = "owner"
|
||||
|
||||
with patch.object(WorkflowCommentService, "get_comment", return_value=comment):
|
||||
with pytest.raises(Forbidden):
|
||||
WorkflowCommentService.delete_comment("tenant-1", "app-1", "comment-1", "intruder")
|
||||
|
||||
def test_delete_comment_removes_related_entities(self, mock_session: Mock) -> None:
|
||||
comment = Mock()
|
||||
comment.created_by = "owner"
|
||||
|
||||
mentions = [Mock(), Mock()]
|
||||
replies = [Mock()]
|
||||
mock_session.scalars.side_effect = [_mock_scalars(mentions), _mock_scalars(replies)]
|
||||
|
||||
with patch.object(WorkflowCommentService, "get_comment", return_value=comment):
|
||||
WorkflowCommentService.delete_comment("tenant-1", "app-1", "comment-1", "owner")
|
||||
|
||||
assert mock_session.delete.call_count == 4
|
||||
mock_session.commit.assert_called_once()
|
||||
|
||||
def test_resolve_comment_sets_fields(self, mock_session: Mock) -> None:
|
||||
comment = Mock()
|
||||
comment.resolved = False
|
||||
comment.resolved_at = None
|
||||
comment.resolved_by = None
|
||||
|
||||
with (
|
||||
patch.object(WorkflowCommentService, "get_comment", return_value=comment),
|
||||
patch.object(service_module, "naive_utc_now", return_value="now"),
|
||||
):
|
||||
result = WorkflowCommentService.resolve_comment("tenant-1", "app-1", "comment-1", "user-1")
|
||||
|
||||
assert result is comment
|
||||
assert comment.resolved is True
|
||||
assert comment.resolved_at == "now"
|
||||
assert comment.resolved_by == "user-1"
|
||||
mock_session.commit.assert_called_once()
|
||||
|
||||
def test_resolve_comment_noop_when_already_resolved(self, mock_session: Mock) -> None:
|
||||
comment = Mock()
|
||||
comment.resolved = True
|
||||
|
||||
with patch.object(WorkflowCommentService, "get_comment", return_value=comment):
|
||||
result = WorkflowCommentService.resolve_comment("tenant-1", "app-1", "comment-1", "user-1")
|
||||
|
||||
assert result is comment
|
||||
mock_session.commit.assert_not_called()
|
||||
|
||||
def test_create_reply_requires_comment(self, mock_session: Mock) -> None:
|
||||
mock_session.get.return_value = None
|
||||
|
||||
with pytest.raises(NotFound):
|
||||
WorkflowCommentService.create_reply("comment-1", "hello", "user-1")
|
||||
|
||||
def test_create_reply_creates_mentions(self, mock_session: Mock) -> None:
|
||||
mock_session.get.return_value = Mock()
|
||||
reply = Mock()
|
||||
reply.id = "reply-1"
|
||||
reply.created_at = "ts"
|
||||
|
||||
with (
|
||||
patch.object(service_module, "WorkflowCommentReply", return_value=reply),
|
||||
patch.object(service_module, "WorkflowCommentMention", return_value=Mock()),
|
||||
patch.object(WorkflowCommentService, "_filter_valid_mentioned_user_ids", return_value=["user-2"]),
|
||||
):
|
||||
result = WorkflowCommentService.create_reply(
|
||||
comment_id="comment-1",
|
||||
content="hello",
|
||||
created_by="user-1",
|
||||
mentioned_user_ids=["user-2", "bad-id"],
|
||||
)
|
||||
|
||||
assert result == {"id": "reply-1", "created_at": "ts"}
|
||||
assert mock_session.add.call_count == 2
|
||||
mock_session.commit.assert_called_once()
|
||||
|
||||
def test_update_reply_raises_not_found(self, mock_session: Mock) -> None:
|
||||
mock_session.scalar.return_value = None
|
||||
|
||||
with pytest.raises(NotFound):
|
||||
WorkflowCommentService.update_reply(
|
||||
tenant_id="tenant-1",
|
||||
app_id="app-1",
|
||||
comment_id="comment-1",
|
||||
reply_id="reply-1",
|
||||
user_id="user-1",
|
||||
content="hello",
|
||||
)
|
||||
|
||||
def test_update_reply_raises_forbidden(self, mock_session: Mock) -> None:
|
||||
reply = Mock()
|
||||
reply.created_by = "owner"
|
||||
mock_session.scalar.return_value = reply
|
||||
|
||||
with pytest.raises(Forbidden):
|
||||
WorkflowCommentService.update_reply(
|
||||
tenant_id="tenant-1",
|
||||
app_id="app-1",
|
||||
comment_id="comment-1",
|
||||
reply_id="reply-1",
|
||||
user_id="intruder",
|
||||
content="hello",
|
||||
)
|
||||
|
||||
def test_update_reply_replaces_mentions(self, mock_session: Mock) -> None:
|
||||
reply = Mock()
|
||||
reply.id = "reply-1"
|
||||
reply.comment_id = "comment-1"
|
||||
reply.created_by = "owner"
|
||||
reply.updated_at = "updated"
|
||||
mock_session.scalar.return_value = reply
|
||||
mock_session.scalars.return_value = _mock_scalars([Mock()])
|
||||
comment = Mock()
|
||||
comment.tenant_id = "tenant-1"
|
||||
comment.app_id = "app-1"
|
||||
mock_session.get.return_value = comment
|
||||
|
||||
with patch.object(WorkflowCommentService, "_filter_valid_mentioned_user_ids", return_value=["user-2"]):
|
||||
result = WorkflowCommentService.update_reply(
|
||||
tenant_id="tenant-1",
|
||||
app_id="app-1",
|
||||
comment_id="comment-1",
|
||||
reply_id="reply-1",
|
||||
user_id="owner",
|
||||
content="new",
|
||||
mentioned_user_ids=["user-2", "bad-id"],
|
||||
)
|
||||
|
||||
assert result == {"id": "reply-1", "updated_at": "updated"}
|
||||
assert mock_session.delete.call_count == 1
|
||||
assert mock_session.add.call_count == 1
|
||||
mock_session.commit.assert_called_once()
|
||||
mock_session.refresh.assert_called_once_with(reply)
|
||||
|
||||
def test_update_comment_updates_position_coordinates_when_provided(self, mock_session: Mock) -> None:
|
||||
comment = Mock()
|
||||
comment.id = "comment-1"
|
||||
comment.created_by = "owner"
|
||||
comment.position_x = 1.0
|
||||
comment.position_y = 2.0
|
||||
mock_session.scalar.return_value = comment
|
||||
mock_session.scalars.return_value = _mock_scalars([])
|
||||
|
||||
WorkflowCommentService.update_comment(
|
||||
tenant_id="tenant-1",
|
||||
app_id="app-1",
|
||||
comment_id="comment-1",
|
||||
user_id="owner",
|
||||
content="updated",
|
||||
position_x=10.5,
|
||||
position_y=20.5,
|
||||
mentioned_user_ids=[],
|
||||
)
|
||||
|
||||
assert comment.position_x == 10.5
|
||||
assert comment.position_y == 20.5
|
||||
|
||||
def test_delete_reply_raises_forbidden(self, mock_session: Mock) -> None:
|
||||
reply = Mock()
|
||||
reply.created_by = "owner"
|
||||
mock_session.scalar.return_value = reply
|
||||
|
||||
with pytest.raises(Forbidden):
|
||||
WorkflowCommentService.delete_reply(
|
||||
tenant_id="tenant-1",
|
||||
app_id="app-1",
|
||||
comment_id="comment-1",
|
||||
reply_id="reply-1",
|
||||
user_id="intruder",
|
||||
)
|
||||
|
||||
def test_delete_reply_raises_not_found(self, mock_session: Mock) -> None:
|
||||
mock_session.scalar.return_value = None
|
||||
|
||||
with pytest.raises(NotFound):
|
||||
WorkflowCommentService.delete_reply(
|
||||
tenant_id="tenant-1",
|
||||
app_id="app-1",
|
||||
comment_id="comment-1",
|
||||
reply_id="reply-1",
|
||||
user_id="owner",
|
||||
)
|
||||
|
||||
def test_delete_reply_removes_mentions(self, mock_session: Mock) -> None:
|
||||
reply = Mock()
|
||||
reply.created_by = "owner"
|
||||
mock_session.scalar.return_value = reply
|
||||
mock_session.scalars.return_value = _mock_scalars([Mock(), Mock()])
|
||||
|
||||
WorkflowCommentService.delete_reply(
|
||||
tenant_id="tenant-1",
|
||||
app_id="app-1",
|
||||
comment_id="comment-1",
|
||||
reply_id="reply-1",
|
||||
user_id="owner",
|
||||
)
|
||||
|
||||
assert mock_session.delete.call_count == 3
|
||||
mock_session.commit.assert_called_once()
|
||||
|
||||
def test_validate_comment_access_delegates_to_get_comment(self) -> None:
|
||||
comment = Mock()
|
||||
with patch.object(WorkflowCommentService, "get_comment", return_value=comment) as get_comment_mock:
|
||||
result = WorkflowCommentService.validate_comment_access("comment-1", "tenant-1", "app-1")
|
||||
|
||||
assert result is comment
|
||||
get_comment_mock.assert_called_once_with("tenant-1", "app-1", "comment-1")
|
||||
@ -12,7 +12,7 @@ This test suite covers:
|
||||
import json
|
||||
import uuid
|
||||
from typing import Any, cast
|
||||
from unittest.mock import ANY, MagicMock, patch
|
||||
from unittest.mock import ANY, MagicMock, Mock, patch
|
||||
|
||||
import pytest
|
||||
from graphon.entities import WorkflowNodeExecution
|
||||
@ -713,6 +713,79 @@ class TestWorkflowService:
|
||||
with pytest.raises(ValueError, match="Invalid app mode"):
|
||||
workflow_service.validate_features_structure(app, features)
|
||||
|
||||
# ==================== Draft Workflow Variable Update Tests ====================
|
||||
# These tests verify updating draft workflow environment/conversation variables
|
||||
|
||||
def test_update_draft_workflow_environment_variables_updates_workflow(self, workflow_service, mock_db_session):
|
||||
"""Test update_draft_workflow_environment_variables updates draft fields."""
|
||||
app = TestWorkflowAssociatedDataFactory.create_app_mock()
|
||||
account = TestWorkflowAssociatedDataFactory.create_account_mock()
|
||||
workflow = TestWorkflowAssociatedDataFactory.create_workflow_mock()
|
||||
variables = [Mock()]
|
||||
|
||||
with (
|
||||
patch.object(workflow_service, "get_draft_workflow", return_value=workflow),
|
||||
patch("services.workflow_service.naive_utc_now", return_value="now"),
|
||||
):
|
||||
workflow_service.update_draft_workflow_environment_variables(
|
||||
app_model=app,
|
||||
environment_variables=variables,
|
||||
account=account,
|
||||
)
|
||||
|
||||
assert workflow.environment_variables == variables
|
||||
assert workflow.updated_by == account.id
|
||||
assert workflow.updated_at == "now"
|
||||
mock_db_session.session.commit.assert_called_once()
|
||||
|
||||
def test_update_draft_workflow_environment_variables_raises_when_missing(self, workflow_service):
|
||||
"""Test update_draft_workflow_environment_variables raises when draft missing."""
|
||||
app = TestWorkflowAssociatedDataFactory.create_app_mock()
|
||||
account = TestWorkflowAssociatedDataFactory.create_account_mock()
|
||||
|
||||
with patch.object(workflow_service, "get_draft_workflow", return_value=None):
|
||||
with pytest.raises(ValueError, match="No draft workflow found."):
|
||||
workflow_service.update_draft_workflow_environment_variables(
|
||||
app_model=app,
|
||||
environment_variables=[],
|
||||
account=account,
|
||||
)
|
||||
|
||||
def test_update_draft_workflow_conversation_variables_updates_workflow(self, workflow_service, mock_db_session):
|
||||
"""Test update_draft_workflow_conversation_variables updates draft fields."""
|
||||
app = TestWorkflowAssociatedDataFactory.create_app_mock()
|
||||
account = TestWorkflowAssociatedDataFactory.create_account_mock()
|
||||
workflow = TestWorkflowAssociatedDataFactory.create_workflow_mock()
|
||||
variables = [Mock()]
|
||||
|
||||
with (
|
||||
patch.object(workflow_service, "get_draft_workflow", return_value=workflow),
|
||||
patch("services.workflow_service.naive_utc_now", return_value="now"),
|
||||
):
|
||||
workflow_service.update_draft_workflow_conversation_variables(
|
||||
app_model=app,
|
||||
conversation_variables=variables,
|
||||
account=account,
|
||||
)
|
||||
|
||||
assert workflow.conversation_variables == variables
|
||||
assert workflow.updated_by == account.id
|
||||
assert workflow.updated_at == "now"
|
||||
mock_db_session.session.commit.assert_called_once()
|
||||
|
||||
def test_update_draft_workflow_conversation_variables_raises_when_missing(self, workflow_service):
|
||||
"""Test update_draft_workflow_conversation_variables raises when draft missing."""
|
||||
app = TestWorkflowAssociatedDataFactory.create_app_mock()
|
||||
account = TestWorkflowAssociatedDataFactory.create_account_mock()
|
||||
|
||||
with patch.object(workflow_service, "get_draft_workflow", return_value=None):
|
||||
with pytest.raises(ValueError, match="No draft workflow found."):
|
||||
workflow_service.update_draft_workflow_conversation_variables(
|
||||
app_model=app,
|
||||
conversation_variables=[],
|
||||
account=account,
|
||||
)
|
||||
|
||||
# ==================== Publish Workflow Tests ====================
|
||||
# These tests verify creating published versions from draft workflows
|
||||
|
||||
|
||||
420
api/uv.lock
generated
420
api/uv.lock
generated
@ -537,6 +537,15 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/1a/39/47f9197bdd44df24d67ac8893641e16f386c984a0619ef2ee4c51fbbc019/beautifulsoup4-4.14.3-py3-none-any.whl", hash = "sha256:0918bfe44902e6ad8d57732ba310582e98da931428d231a5ecb9e7c703a735bb", size = 107721, upload-time = "2025-11-30T15:08:24.087Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bidict"
|
||||
version = "0.23.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/9a/6e/026678aa5a830e07cd9498a05d3e7e650a4f56a42f267a53d22bcda1bdc9/bidict-0.23.1.tar.gz", hash = "sha256:03069d763bc387bbd20e7d49914e75fc4132a41937fa3405417e1a5a2d006d71", size = 29093, upload-time = "2024-02-18T19:09:05.748Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/99/37/e8730c3587a65eb5645d4aba2d27aae48e8003614d6aaf15dda67f702f1f/bidict-0.23.1-py3-none-any.whl", hash = "sha256:5dae8d4d79b552a71cbabc7deb25dfe8ce710b17ff41711e13010ead2abfc3e5", size = 32764, upload-time = "2024-02-18T19:09:04.156Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "billiard"
|
||||
version = "4.2.3"
|
||||
@ -643,24 +652,6 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/57/b7/f4a051cefaf76930c77558b31646bcce7e9b3fbdcbc89e4073783e961519/botocore_stubs-1.41.3-py3-none-any.whl", hash = "sha256:6ab911bd9f7256f1dcea2e24a4af7ae0f9f07e83d0a760bba37f028f4a2e5589", size = 66749, upload-time = "2025-11-24T20:29:26.142Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bottleneck"
|
||||
version = "1.6.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "numpy" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/14/d8/6d641573e210768816023a64966d66463f2ce9fc9945fa03290c8a18f87c/bottleneck-1.6.0.tar.gz", hash = "sha256:028d46ee4b025ad9ab4d79924113816f825f62b17b87c9e1d0d8ce144a4a0e31", size = 104311, upload-time = "2025-09-08T16:30:38.617Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/8d/72/7e3593a2a3dd69ec831a9981a7b1443647acb66a5aec34c1620a5f7f8498/bottleneck-1.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3bb16a16a86a655fdbb34df672109a8a227bb5f9c9cf5bb8ae400a639bc52fa3", size = 100515, upload-time = "2025-09-08T16:29:55.141Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b5/d4/e7bbea08f4c0f0bab819d38c1a613da5f194fba7b19aae3e2b3a27e78886/bottleneck-1.6.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:0fbf5d0787af9aee6cef4db9cdd14975ce24bd02e0cc30155a51411ebe2ff35f", size = 377451, upload-time = "2025-09-08T16:29:56.718Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fe/80/a6da430e3b1a12fd85f9fe90d3ad8fe9a527ecb046644c37b4b3f4baacfc/bottleneck-1.6.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d08966f4a22384862258940346a72087a6f7cebb19038fbf3a3f6690ee7fd39f", size = 368303, upload-time = "2025-09-08T16:29:57.834Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/30/11/abd30a49f3251f4538430e5f876df96f2b39dabf49e05c5836820d2c31fe/bottleneck-1.6.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:604f0b898b43b7bc631c564630e936a8759d2d952641c8b02f71e31dbcd9deaa", size = 361232, upload-time = "2025-09-08T16:29:59.104Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1d/ac/1c0e09d8d92b9951f675bd42463ce76c3c3657b31c5bf53ca1f6dd9eccff/bottleneck-1.6.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d33720bad761e642abc18eda5f188ff2841191c9f63f9d0c052245decc0faeb9", size = 373234, upload-time = "2025-09-08T16:30:00.488Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fb/ea/382c572ae3057ba885d484726bb63629d1f63abedf91c6cd23974eb35a9b/bottleneck-1.6.0-cp312-cp312-win32.whl", hash = "sha256:a1e5907ec2714efbe7075d9207b58c22ab6984a59102e4ecd78dced80dab8374", size = 108020, upload-time = "2025-09-08T16:30:01.773Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/48/ad/d71da675eef85ac153eef5111ca0caa924548c9591da00939bcabba8de8e/bottleneck-1.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:81e3822499f057a917b7d3972ebc631ac63c6bbcc79ad3542a66c4c40634e3a6", size = 113493, upload-time = "2025-09-08T16:30:02.872Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "brotli"
|
||||
version = "1.2.0"
|
||||
@ -695,18 +686,6 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/07/6b/6e92009df3b8b7272f85a0992b306b61c34b7ea1c4776643746e61c380ac/brotlicffi-1.2.0.0-cp38-abi3-win_amd64.whl", hash = "sha256:f139a7cdfe4ae7859513067b736eb44d19fae1186f9e99370092f6915216451b", size = 378586, upload-time = "2025-11-21T18:17:50.531Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bs4"
|
||||
version = "0.0.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "beautifulsoup4" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/c9/aa/4acaf814ff901145da37332e05bb510452ebed97bc9602695059dd46ef39/bs4-0.0.2.tar.gz", hash = "sha256:a48685c58f50fe127722417bae83fe6badf500d54b55f7e39ffe43b798653925", size = 698, upload-time = "2024-01-17T18:15:47.371Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/51/bb/bf7aab772a159614954d84aa832c129624ba6c32faa559dfb200a534e50b/bs4-0.0.2-py2.py3-none-any.whl", hash = "sha256:abf8742c0805ef7f662dce4b51cca104cffe52b835238afc169142ab9b3fbccc", size = 1189, upload-time = "2024-01-17T18:15:48.613Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "build"
|
||||
version = "1.3.0"
|
||||
@ -1306,91 +1285,49 @@ version = "1.13.3"
|
||||
source = { virtual = "." }
|
||||
dependencies = [
|
||||
{ name = "aliyun-log-python-sdk" },
|
||||
{ name = "apscheduler" },
|
||||
{ name = "arize-phoenix-otel" },
|
||||
{ name = "azure-identity" },
|
||||
{ name = "beautifulsoup4" },
|
||||
{ name = "bleach" },
|
||||
{ name = "boto3" },
|
||||
{ name = "bs4" },
|
||||
{ name = "cachetools" },
|
||||
{ name = "celery" },
|
||||
{ name = "charset-normalizer" },
|
||||
{ name = "croniter" },
|
||||
{ name = "fastopenapi", extra = ["flask"] },
|
||||
{ name = "flask" },
|
||||
{ name = "flask-compress" },
|
||||
{ name = "flask-cors" },
|
||||
{ name = "flask-login" },
|
||||
{ name = "flask-migrate" },
|
||||
{ name = "flask-orjson" },
|
||||
{ name = "flask-restx" },
|
||||
{ name = "flask-sqlalchemy" },
|
||||
{ name = "gevent" },
|
||||
{ name = "gevent-websocket" },
|
||||
{ name = "gmpy2" },
|
||||
{ name = "google-api-core" },
|
||||
{ name = "google-api-python-client" },
|
||||
{ name = "google-auth" },
|
||||
{ name = "google-auth-httplib2" },
|
||||
{ name = "google-cloud-aiplatform" },
|
||||
{ name = "googleapis-common-protos" },
|
||||
{ name = "graphon" },
|
||||
{ name = "gunicorn" },
|
||||
{ name = "httpx", extra = ["socks"] },
|
||||
{ name = "httpx-sse" },
|
||||
{ name = "jieba" },
|
||||
{ name = "json-repair" },
|
||||
{ name = "langfuse" },
|
||||
{ name = "langsmith" },
|
||||
{ name = "litellm" },
|
||||
{ name = "markdown" },
|
||||
{ name = "mlflow-skinny" },
|
||||
{ name = "numpy" },
|
||||
{ name = "openpyxl" },
|
||||
{ name = "opentelemetry-api" },
|
||||
{ name = "opentelemetry-distro" },
|
||||
{ name = "opentelemetry-exporter-otlp" },
|
||||
{ name = "opentelemetry-exporter-otlp-proto-common" },
|
||||
{ name = "opentelemetry-exporter-otlp-proto-grpc" },
|
||||
{ name = "opentelemetry-exporter-otlp-proto-http" },
|
||||
{ name = "opentelemetry-instrumentation" },
|
||||
{ name = "opentelemetry-instrumentation-celery" },
|
||||
{ name = "opentelemetry-instrumentation-flask" },
|
||||
{ name = "opentelemetry-instrumentation-httpx" },
|
||||
{ name = "opentelemetry-instrumentation-redis" },
|
||||
{ name = "opentelemetry-instrumentation-sqlalchemy" },
|
||||
{ name = "opentelemetry-propagator-b3" },
|
||||
{ name = "opentelemetry-proto" },
|
||||
{ name = "opentelemetry-sdk" },
|
||||
{ name = "opentelemetry-semantic-conventions" },
|
||||
{ name = "opentelemetry-util-http" },
|
||||
{ name = "opik" },
|
||||
{ name = "packaging" },
|
||||
{ name = "pandas", extra = ["excel", "output-formatting", "performance"] },
|
||||
{ name = "psycogreen" },
|
||||
{ name = "psycopg2-binary" },
|
||||
{ name = "pycryptodome" },
|
||||
{ name = "pydantic" },
|
||||
{ name = "pydantic-settings" },
|
||||
{ name = "pyjwt" },
|
||||
{ name = "pypandoc" },
|
||||
{ name = "pypdfium2" },
|
||||
{ name = "python-docx" },
|
||||
{ name = "python-dotenv" },
|
||||
{ name = "pyyaml" },
|
||||
{ name = "python-socketio" },
|
||||
{ name = "readabilipy" },
|
||||
{ name = "redis", extra = ["hiredis"] },
|
||||
{ name = "resend" },
|
||||
{ name = "sendgrid" },
|
||||
{ name = "sentry-sdk", extra = ["flask"] },
|
||||
{ name = "sqlalchemy" },
|
||||
{ name = "sseclient-py" },
|
||||
{ name = "starlette" },
|
||||
{ name = "tiktoken" },
|
||||
{ name = "transformers" },
|
||||
{ name = "unstructured", extra = ["docx", "epub", "md", "ppt", "pptx"] },
|
||||
{ name = "weave" },
|
||||
{ name = "yarl" },
|
||||
]
|
||||
|
||||
[package.dev-dependencies]
|
||||
@ -1416,7 +1353,6 @@ dev = [
|
||||
{ name = "pytest-xdist" },
|
||||
{ name = "ruff" },
|
||||
{ name = "scipy-stubs" },
|
||||
{ name = "sseclient-py" },
|
||||
{ name = "testcontainers" },
|
||||
{ name = "types-aiofiles" },
|
||||
{ name = "types-beautifulsoup4" },
|
||||
@ -1602,174 +1538,131 @@ vdb-xinference = [
|
||||
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "aliyun-log-python-sdk", specifier = "~=0.9.44" },
|
||||
{ name = "apscheduler", specifier = ">=3.11.2" },
|
||||
{ name = "aliyun-log-python-sdk", specifier = ">=0.9.44,<1.0.0" },
|
||||
{ name = "arize-phoenix-otel", specifier = "~=0.15.0" },
|
||||
{ name = "azure-identity", specifier = "==1.25.3" },
|
||||
{ name = "beautifulsoup4", specifier = "==4.14.3" },
|
||||
{ name = "bleach", specifier = "~=6.3.0" },
|
||||
{ name = "boto3", specifier = "==1.42.88" },
|
||||
{ name = "bs4", specifier = "~=0.0.1" },
|
||||
{ name = "cachetools", specifier = "~=7.0.5" },
|
||||
{ name = "celery", specifier = "~=5.6.3" },
|
||||
{ name = "charset-normalizer", specifier = ">=3.4.7" },
|
||||
{ name = "azure-identity", specifier = ">=1.25.3,<2.0.0" },
|
||||
{ name = "bleach", specifier = ">=6.3.0" },
|
||||
{ name = "boto3", specifier = ">=1.42.88" },
|
||||
{ name = "celery", specifier = ">=5.6.3" },
|
||||
{ name = "croniter", specifier = ">=6.2.2" },
|
||||
{ name = "fastopenapi", extras = ["flask"], specifier = ">=0.7.0" },
|
||||
{ name = "flask", specifier = "~=3.1.3" },
|
||||
{ name = "flask-compress", specifier = ">=1.24,<1.25" },
|
||||
{ name = "flask-cors", specifier = "~=6.0.2" },
|
||||
{ name = "flask-login", specifier = "~=0.6.3" },
|
||||
{ name = "flask-migrate", specifier = "~=4.1.0" },
|
||||
{ name = "flask-orjson", specifier = "~=2.0.0" },
|
||||
{ name = "flask-restx", specifier = "~=1.3.2" },
|
||||
{ name = "flask-sqlalchemy", specifier = "~=3.1.1" },
|
||||
{ name = "gevent", specifier = "~=26.4.0" },
|
||||
{ name = "gmpy2", specifier = "~=2.3.0" },
|
||||
{ name = "google-api-core", specifier = ">=2.30.3" },
|
||||
{ name = "google-api-python-client", specifier = "==2.194.0" },
|
||||
{ name = "google-auth", specifier = ">=2.49.2" },
|
||||
{ name = "google-auth-httplib2", specifier = "==0.3.1" },
|
||||
{ name = "google-cloud-aiplatform", specifier = ">=1.147.0" },
|
||||
{ name = "googleapis-common-protos", specifier = ">=1.74.0" },
|
||||
{ name = "graphon", specifier = ">=0.1.2" },
|
||||
{ name = "gunicorn", specifier = "~=25.3.0" },
|
||||
{ name = "httpx", extras = ["socks"], specifier = "~=0.28.1" },
|
||||
{ name = "fastopenapi", extras = ["flask"], specifier = "~=0.7.0" },
|
||||
{ name = "flask-compress", specifier = ">=1.24,<2.0.0" },
|
||||
{ name = "flask-cors", specifier = ">=6.0.2" },
|
||||
{ name = "flask-login", specifier = ">=0.6.3,<1.0.0" },
|
||||
{ name = "flask-migrate", specifier = ">=4.1.0,<5.0.0" },
|
||||
{ name = "flask-orjson", specifier = ">=2.0.0,<3.0.0" },
|
||||
{ name = "flask-restx", specifier = ">=1.3.2,<2.0.0" },
|
||||
{ name = "gevent", specifier = ">=26.4.0" },
|
||||
{ name = "gevent-websocket", specifier = ">=0.10.1" },
|
||||
{ name = "gmpy2", specifier = ">=2.3.0" },
|
||||
{ name = "google-api-python-client", specifier = ">=2.194.0" },
|
||||
{ name = "google-cloud-aiplatform", specifier = ">=1.147.0,<2.0.0" },
|
||||
{ name = "graphon", specifier = "~=0.1.2" },
|
||||
{ name = "gunicorn", specifier = ">=25.3.0" },
|
||||
{ name = "httpx", extras = ["socks"], specifier = ">=0.28.1,<1.0.0" },
|
||||
{ name = "httpx-sse", specifier = "~=0.4.0" },
|
||||
{ name = "jieba", specifier = "==0.42.1" },
|
||||
{ name = "json-repair", specifier = ">=0.59.2" },
|
||||
{ name = "json-repair", specifier = "~=0.59.2" },
|
||||
{ name = "langfuse", specifier = ">=4.2.0,<5.0.0" },
|
||||
{ name = "langsmith", specifier = "~=0.7.30" },
|
||||
{ name = "litellm", specifier = "==1.83.0" },
|
||||
{ name = "markdown", specifier = "~=3.10.2" },
|
||||
{ name = "mlflow-skinny", specifier = ">=3.11.1" },
|
||||
{ name = "numpy", specifier = "~=2.4.4" },
|
||||
{ name = "openpyxl", specifier = "~=3.1.5" },
|
||||
{ name = "opentelemetry-api", specifier = "==1.41.0" },
|
||||
{ name = "opentelemetry-distro", specifier = "==0.62b0" },
|
||||
{ name = "opentelemetry-exporter-otlp", specifier = "==1.41.0" },
|
||||
{ name = "opentelemetry-exporter-otlp-proto-common", specifier = "==1.41.0" },
|
||||
{ name = "opentelemetry-exporter-otlp-proto-grpc", specifier = "==1.41.0" },
|
||||
{ name = "opentelemetry-exporter-otlp-proto-http", specifier = "==1.41.0" },
|
||||
{ name = "opentelemetry-instrumentation", specifier = "==0.62b0" },
|
||||
{ name = "opentelemetry-instrumentation-celery", specifier = "==0.62b0" },
|
||||
{ name = "opentelemetry-instrumentation-flask", specifier = "==0.62b0" },
|
||||
{ name = "opentelemetry-instrumentation-httpx", specifier = "==0.62b0" },
|
||||
{ name = "opentelemetry-instrumentation-redis", specifier = "==0.62b0" },
|
||||
{ name = "opentelemetry-instrumentation-sqlalchemy", specifier = "==0.62b0" },
|
||||
{ name = "opentelemetry-propagator-b3", specifier = "==1.41.0" },
|
||||
{ name = "opentelemetry-proto", specifier = "==1.41.0" },
|
||||
{ name = "opentelemetry-sdk", specifier = "==1.41.0" },
|
||||
{ name = "opentelemetry-semantic-conventions", specifier = "==0.62b0" },
|
||||
{ name = "opentelemetry-util-http", specifier = "==0.62b0" },
|
||||
{ name = "langsmith", specifier = ">=0.7.30,<1.0.0" },
|
||||
{ name = "mlflow-skinny", specifier = ">=3.11.1,<4.0.0" },
|
||||
{ name = "opentelemetry-distro", specifier = ">=0.62b0,<1.0.0" },
|
||||
{ name = "opentelemetry-instrumentation-celery", specifier = ">=0.62b0,<1.0.0" },
|
||||
{ name = "opentelemetry-instrumentation-flask", specifier = ">=0.62b0,<1.0.0" },
|
||||
{ name = "opentelemetry-instrumentation-httpx", specifier = ">=0.62b0,<1.0.0" },
|
||||
{ name = "opentelemetry-instrumentation-redis", specifier = ">=0.62b0,<1.0.0" },
|
||||
{ name = "opentelemetry-instrumentation-sqlalchemy", specifier = ">=0.62b0,<1.0.0" },
|
||||
{ name = "opentelemetry-propagator-b3", specifier = ">=1.41.0,<2.0.0" },
|
||||
{ name = "opik", specifier = "~=1.11.2" },
|
||||
{ name = "packaging", specifier = "~=26.0" },
|
||||
{ name = "pandas", extras = ["excel", "output-formatting", "performance"], specifier = "~=3.0.2" },
|
||||
{ name = "psycogreen", specifier = "~=1.0.2" },
|
||||
{ name = "psycopg2-binary", specifier = "~=2.9.11" },
|
||||
{ name = "pycryptodome", specifier = "==3.23.0" },
|
||||
{ name = "pydantic", specifier = "~=2.12.5" },
|
||||
{ name = "pydantic-settings", specifier = "~=2.13.1" },
|
||||
{ name = "pyjwt", specifier = "~=2.12.1" },
|
||||
{ name = "pypandoc", specifier = "~=1.13" },
|
||||
{ name = "pypdfium2", specifier = "==5.6.0" },
|
||||
{ name = "python-docx", specifier = "~=1.2.0" },
|
||||
{ name = "python-dotenv", specifier = "==1.2.2" },
|
||||
{ name = "pyyaml", specifier = "~=6.0.1" },
|
||||
{ name = "readabilipy", specifier = "~=0.3.0" },
|
||||
{ name = "redis", extras = ["hiredis"], specifier = "~=7.4.0" },
|
||||
{ name = "resend", specifier = "~=2.27.0" },
|
||||
{ name = "sendgrid", specifier = "~=6.12.5" },
|
||||
{ name = "sentry-sdk", extras = ["flask"], specifier = "~=2.57.0" },
|
||||
{ name = "sqlalchemy", specifier = "~=2.0.49" },
|
||||
{ name = "sseclient-py", specifier = "~=1.9.0" },
|
||||
{ name = "starlette", specifier = "==1.0.0" },
|
||||
{ name = "tiktoken", specifier = "~=0.12.0" },
|
||||
{ name = "transformers", specifier = "~=5.3.0" },
|
||||
{ name = "unstructured", extras = ["docx", "epub", "md", "ppt", "pptx"], specifier = "~=0.21.5" },
|
||||
{ name = "weave", specifier = ">=0.52.36" },
|
||||
{ name = "yarl", specifier = "~=1.23.0" },
|
||||
{ name = "psycogreen", specifier = ">=1.0.2" },
|
||||
{ name = "psycopg2-binary", specifier = ">=2.9.11" },
|
||||
{ name = "python-socketio", specifier = ">=5.13.0" },
|
||||
{ name = "readabilipy", specifier = ">=0.3.0,<1.0.0" },
|
||||
{ name = "redis", extras = ["hiredis"], specifier = ">=7.4.0" },
|
||||
{ name = "resend", specifier = ">=2.27.0,<3.0.0" },
|
||||
{ name = "sendgrid", specifier = ">=6.12.5" },
|
||||
{ name = "sseclient-py", specifier = ">=1.8.0" },
|
||||
{ name = "weave", specifier = ">=0.52.36,<1.0.0" },
|
||||
]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
dev = [
|
||||
{ name = "basedpyright", specifier = "~=1.39.0" },
|
||||
{ name = "basedpyright", specifier = ">=1.39.0" },
|
||||
{ name = "boto3-stubs", specifier = ">=1.42.88" },
|
||||
{ name = "celery-types", specifier = ">=0.23.0" },
|
||||
{ name = "coverage", specifier = "~=7.13.4" },
|
||||
{ name = "dotenv-linter", specifier = "~=0.7.0" },
|
||||
{ name = "faker", specifier = "~=40.13.0" },
|
||||
{ name = "coverage", specifier = ">=7.13.4" },
|
||||
{ name = "dotenv-linter", specifier = ">=0.7.0" },
|
||||
{ name = "faker", specifier = ">=20.1.0" },
|
||||
{ name = "hypothesis", specifier = ">=6.151.12" },
|
||||
{ name = "import-linter", specifier = ">=2.3" },
|
||||
{ name = "lxml-stubs", specifier = "~=0.5.1" },
|
||||
{ name = "mypy", specifier = "~=1.20.1" },
|
||||
{ name = "pandas-stubs", specifier = "~=3.0.0" },
|
||||
{ name = "lxml-stubs", specifier = ">=0.5.1" },
|
||||
{ name = "mypy", specifier = ">=1.20.1" },
|
||||
{ name = "pandas-stubs", specifier = ">=3.0.0" },
|
||||
{ name = "pyrefly", specifier = ">=0.60.0" },
|
||||
{ name = "pytest", specifier = "~=9.0.3" },
|
||||
{ name = "pytest-benchmark", specifier = "~=5.2.3" },
|
||||
{ name = "pytest-cov", specifier = "~=7.1.0" },
|
||||
{ name = "pytest-env", specifier = "~=1.6.0" },
|
||||
{ name = "pytest-mock", specifier = "~=3.15.1" },
|
||||
{ name = "pytest", specifier = ">=9.0.3" },
|
||||
{ name = "pytest-benchmark", specifier = ">=5.2.3" },
|
||||
{ name = "pytest-cov", specifier = ">=7.1.0" },
|
||||
{ name = "pytest-env", specifier = ">=1.6.0" },
|
||||
{ name = "pytest-mock", specifier = ">=3.15.1" },
|
||||
{ name = "pytest-timeout", specifier = ">=2.4.0" },
|
||||
{ name = "pytest-xdist", specifier = ">=3.8.0" },
|
||||
{ name = "ruff", specifier = "~=0.15.10" },
|
||||
{ name = "ruff", specifier = ">=0.15.10" },
|
||||
{ name = "scipy-stubs", specifier = ">=1.15.3.0" },
|
||||
{ name = "sseclient-py", specifier = ">=1.8.0" },
|
||||
{ name = "testcontainers", specifier = "~=4.14.2" },
|
||||
{ name = "types-aiofiles", specifier = "~=25.1.0" },
|
||||
{ name = "types-beautifulsoup4", specifier = "~=4.12.0" },
|
||||
{ name = "types-cachetools", specifier = "~=6.2.0" },
|
||||
{ name = "testcontainers", specifier = ">=4.14.2" },
|
||||
{ name = "types-aiofiles", specifier = ">=25.1.0" },
|
||||
{ name = "types-beautifulsoup4", specifier = ">=4.12.0" },
|
||||
{ name = "types-cachetools", specifier = ">=6.2.0" },
|
||||
{ name = "types-cffi", specifier = ">=2.0.0.20260408" },
|
||||
{ name = "types-colorama", specifier = "~=0.4.15" },
|
||||
{ name = "types-defusedxml", specifier = "~=0.7.0" },
|
||||
{ name = "types-deprecated", specifier = "~=1.3.1" },
|
||||
{ name = "types-docutils", specifier = "~=0.22.3" },
|
||||
{ name = "types-flask-cors", specifier = "~=6.0.0" },
|
||||
{ name = "types-flask-migrate", specifier = "~=4.1.0" },
|
||||
{ name = "types-gevent", specifier = "~=26.4.0" },
|
||||
{ name = "types-greenlet", specifier = "~=3.4.0" },
|
||||
{ name = "types-html5lib", specifier = "~=1.1.11" },
|
||||
{ name = "types-colorama", specifier = ">=0.4.15" },
|
||||
{ name = "types-defusedxml", specifier = ">=0.7.0" },
|
||||
{ name = "types-deprecated", specifier = ">=1.3.1" },
|
||||
{ name = "types-docutils", specifier = ">=0.22.3" },
|
||||
{ name = "types-flask-cors", specifier = ">=6.0.0" },
|
||||
{ name = "types-flask-migrate", specifier = ">=4.1.0" },
|
||||
{ name = "types-gevent", specifier = ">=26.4.0" },
|
||||
{ name = "types-greenlet", specifier = ">=3.4.0" },
|
||||
{ name = "types-html5lib", specifier = ">=1.1.11" },
|
||||
{ name = "types-jmespath", specifier = ">=1.1.0.20260408" },
|
||||
{ name = "types-markdown", specifier = "~=3.10.2" },
|
||||
{ name = "types-oauthlib", specifier = "~=3.3.0" },
|
||||
{ name = "types-objgraph", specifier = "~=3.6.0" },
|
||||
{ name = "types-olefile", specifier = "~=0.47.0" },
|
||||
{ name = "types-openpyxl", specifier = "~=3.1.5" },
|
||||
{ name = "types-pexpect", specifier = "~=4.9.0" },
|
||||
{ name = "types-protobuf", specifier = "~=7.34.1" },
|
||||
{ name = "types-psutil", specifier = "~=7.2.2" },
|
||||
{ name = "types-psycopg2", specifier = "~=2.9.21" },
|
||||
{ name = "types-pygments", specifier = "~=2.20.0" },
|
||||
{ name = "types-pymysql", specifier = "~=1.1.0" },
|
||||
{ name = "types-markdown", specifier = ">=3.10.2" },
|
||||
{ name = "types-oauthlib", specifier = ">=3.3.0" },
|
||||
{ name = "types-objgraph", specifier = ">=3.6.0" },
|
||||
{ name = "types-olefile", specifier = ">=0.47.0" },
|
||||
{ name = "types-openpyxl", specifier = ">=3.1.5" },
|
||||
{ name = "types-pexpect", specifier = ">=4.9.0" },
|
||||
{ name = "types-protobuf", specifier = ">=7.34.1" },
|
||||
{ name = "types-psutil", specifier = ">=7.2.2" },
|
||||
{ name = "types-psycopg2", specifier = ">=2.9.21" },
|
||||
{ name = "types-pygments", specifier = ">=2.20.0" },
|
||||
{ name = "types-pymysql", specifier = ">=1.1.0" },
|
||||
{ name = "types-pyopenssl", specifier = ">=24.1.0" },
|
||||
{ name = "types-python-dateutil", specifier = "~=2.9.0" },
|
||||
{ name = "types-python-dateutil", specifier = ">=2.9.0" },
|
||||
{ name = "types-python-http-client", specifier = ">=3.3.7.20260408" },
|
||||
{ name = "types-pywin32", specifier = "~=311.0.0" },
|
||||
{ name = "types-pyyaml", specifier = "~=6.0.12" },
|
||||
{ name = "types-pywin32", specifier = ">=311.0.0" },
|
||||
{ name = "types-pyyaml", specifier = ">=6.0.12" },
|
||||
{ name = "types-redis", specifier = ">=4.6.0.20241004" },
|
||||
{ name = "types-regex", specifier = "~=2026.4.4" },
|
||||
{ name = "types-regex", specifier = ">=2026.4.4" },
|
||||
{ name = "types-setuptools", specifier = ">=82.0.0.20260408" },
|
||||
{ name = "types-shapely", specifier = "~=2.1.0" },
|
||||
{ name = "types-shapely", specifier = ">=2.1.0" },
|
||||
{ name = "types-simplejson", specifier = ">=3.20.0.20260408" },
|
||||
{ name = "types-six", specifier = ">=1.17.0.20260408" },
|
||||
{ name = "types-tensorflow", specifier = ">=2.18.0.20260408" },
|
||||
{ name = "types-tqdm", specifier = ">=4.67.3.20260408" },
|
||||
{ name = "types-ujson", specifier = ">=5.10.0" },
|
||||
{ name = "xinference-client", specifier = "~=2.4.0" },
|
||||
{ name = "xinference-client", specifier = ">=2.4.0" },
|
||||
]
|
||||
storage = [
|
||||
{ name = "azure-storage-blob", specifier = "==12.28.0" },
|
||||
{ name = "bce-python-sdk", specifier = "~=0.9.69" },
|
||||
{ name = "cos-python-sdk-v5", specifier = "==1.9.41" },
|
||||
{ name = "esdk-obs-python", specifier = "==3.26.2" },
|
||||
{ name = "azure-storage-blob", specifier = ">=12.28.0" },
|
||||
{ name = "bce-python-sdk", specifier = ">=0.9.69" },
|
||||
{ name = "cos-python-sdk-v5", specifier = ">=1.9.41" },
|
||||
{ name = "esdk-obs-python", specifier = ">=3.22.2" },
|
||||
{ name = "google-cloud-storage", specifier = ">=3.10.1" },
|
||||
{ name = "opendal", specifier = "~=0.46.0" },
|
||||
{ name = "oss2", specifier = "==2.19.1" },
|
||||
{ name = "supabase", specifier = "~=2.18.1" },
|
||||
{ name = "tos", specifier = "~=2.9.0" },
|
||||
{ name = "opendal", specifier = ">=0.46.0" },
|
||||
{ name = "oss2", specifier = ">=2.19.1" },
|
||||
{ name = "supabase", specifier = ">=2.18.1" },
|
||||
{ name = "tos", specifier = ">=2.9.0" },
|
||||
]
|
||||
tools = [
|
||||
{ name = "cloudscraper", specifier = "~=1.2.71" },
|
||||
{ name = "nltk", specifier = "~=3.9.1" },
|
||||
{ name = "cloudscraper", specifier = ">=1.2.71" },
|
||||
{ name = "nltk", specifier = ">=3.9.1" },
|
||||
]
|
||||
vdb-alibabacloud-mysql = [{ name = "dify-vdb-alibabacloud-mysql", editable = "providers/vdb/vdb-alibabacloud-mysql" }]
|
||||
vdb-all = [
|
||||
@ -1833,7 +1726,7 @@ vdb-upstash = [{ name = "dify-vdb-upstash", editable = "providers/vdb/vdb-upstas
|
||||
vdb-vastbase = [{ name = "dify-vdb-vastbase", editable = "providers/vdb/vdb-vastbase" }]
|
||||
vdb-vikingdb = [{ name = "dify-vdb-vikingdb", editable = "providers/vdb/vdb-vikingdb" }]
|
||||
vdb-weaviate = [{ name = "dify-vdb-weaviate", editable = "providers/vdb/vdb-weaviate" }]
|
||||
vdb-xinference = [{ name = "xinference-client", specifier = "~=2.4.0" }]
|
||||
vdb-xinference = [{ name = "xinference-client", specifier = ">=2.4.0" }]
|
||||
|
||||
[[package]]
|
||||
name = "dify-vdb-alibabacloud-mysql"
|
||||
@ -2584,6 +2477,18 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/f6/df/7875e08b06a95f4577b71708ec470d029fadf873a66eb813a2861d79dfb5/gevent-26.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:1c737e6ac6ce1398df0e3f41c58d982e397c993cbe73ac05b7edbe39e128c9cb", size = 1680530, upload-time = "2026-04-08T23:15:38.714Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "gevent-websocket"
|
||||
version = "0.10.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "gevent" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/98/d2/6fa19239ff1ab072af40ebf339acd91fb97f34617c2ee625b8e34bf42393/gevent-websocket-0.10.1.tar.gz", hash = "sha256:7eaef32968290c9121f7c35b973e2cc302ffb076d018c9068d2f5ca8b2d85fb0", size = 18366, upload-time = "2017-03-12T22:46:05.68Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/7b/84/2dc373eb6493e00c884cc11e6c059ec97abae2678d42f06bf780570b0193/gevent_websocket-0.10.1-py3-none-any.whl", hash = "sha256:17b67d91282f8f4c973eba0551183fc84f56f1c90c8f6b6b30256f31f66f5242", size = 22987, upload-time = "2017-03-12T22:46:03.611Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "gitdb"
|
||||
version = "4.0.12"
|
||||
@ -3996,25 +3901,6 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/7d/86/db87a5393f1b1fabef53ac3ba4e6b938bb27e40a04ad7cc512098fcae032/numba-0.65.0-cp312-cp312-win_amd64.whl", hash = "sha256:59bb9f2bb9f1238dfd8e927ba50645c18ae769fef4f3d58ea0ea22a2683b91f5", size = 2749979, upload-time = "2026-04-01T03:51:37.88Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "numexpr"
|
||||
version = "2.14.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "numpy" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/cb/2f/fdba158c9dbe5caca9c3eca3eaffffb251f2fb8674bf8e2d0aed5f38d319/numexpr-2.14.1.tar.gz", hash = "sha256:4be00b1086c7b7a5c32e31558122b7b80243fe098579b170967da83f3152b48b", size = 119400, upload-time = "2025-10-13T16:17:27.351Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/9d/20/c473fc04a371f5e2f8c5749e04505c13e7a8ede27c09e9f099b2ad6f43d6/numexpr-2.14.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:91ebae0ab18c799b0e6b8c5a8d11e1fa3848eb4011271d99848b297468a39430", size = 162790, upload-time = "2025-10-13T16:16:34.903Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/45/93/b6760dd1904c2a498e5f43d1bb436f59383c3ddea3815f1461dfaa259373/numexpr-2.14.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:47041f2f7b9e69498fb311af672ba914a60e6e6d804011caacb17d66f639e659", size = 152196, upload-time = "2025-10-13T16:16:36.593Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/72/94/cc921e35593b820521e464cbbeaf8212bbdb07f16dc79fe283168df38195/numexpr-2.14.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d686dfb2c1382d9e6e0ee0b7647f943c1886dba3adbf606c625479f35f1956c1", size = 452468, upload-time = "2025-10-13T16:13:29.531Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d9/43/560e9ba23c02c904b5934496486d061bcb14cd3ebba2e3cf0e2dccb6c22b/numexpr-2.14.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:eee6d4fbbbc368e6cdd0772734d6249128d957b3b8ad47a100789009f4de7083", size = 443631, upload-time = "2025-10-13T16:15:02.473Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7b/6c/78f83b6219f61c2c22d71ab6e6c2d4e5d7381334c6c29b77204e59edb039/numexpr-2.14.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3a2839efa25f3c8d4133252ea7342d8f81226c7c4dda81f97a57e090b9d87a48", size = 1417670, upload-time = "2025-10-13T16:13:33.464Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0e/bb/1ccc9dcaf46281568ce769888bf16294c40e98a5158e4b16c241de31d0d3/numexpr-2.14.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9f9137f1351b310436662b5dc6f4082a245efa8950c3b0d9008028df92fefb9b", size = 1466212, upload-time = "2025-10-13T16:15:12.828Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/31/9f/203d82b9e39dadd91d64bca55b3c8ca432e981b822468dcef41a4418626b/numexpr-2.14.1-cp312-cp312-win32.whl", hash = "sha256:36f8d5c1bd1355df93b43d766790f9046cccfc1e32b7c6163f75bcde682cda07", size = 166996, upload-time = "2025-10-13T16:17:10.369Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1f/67/ffe750b5452eb66de788c34e7d21ec6d886abb4d7c43ad1dc88ceb3d998f/numexpr-2.14.1-cp312-cp312-win_amd64.whl", hash = "sha256:fdd886f4b7dbaf167633ee396478f0d0aa58ea2f9e7ccc3c6431019623e8d68f", size = 160187, upload-time = "2025-10-13T16:17:11.974Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "numpy"
|
||||
version = "2.4.4"
|
||||
@ -4629,15 +4515,6 @@ excel = [
|
||||
{ name = "xlrd" },
|
||||
{ name = "xlsxwriter" },
|
||||
]
|
||||
output-formatting = [
|
||||
{ name = "jinja2" },
|
||||
{ name = "tabulate" },
|
||||
]
|
||||
performance = [
|
||||
{ name = "bottleneck" },
|
||||
{ name = "numba" },
|
||||
{ name = "numexpr" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pandas-stubs"
|
||||
@ -5461,6 +5338,18 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/0b/d7/1959b9648791274998a9c3526f6d0ec8fd2233e4d4acce81bbae76b44b2a/python_dotenv-1.2.2-py3-none-any.whl", hash = "sha256:1d8214789a24de455a8b8bd8ae6fe3c6b69a5e3d64aa8a8e5d68e694bbcb285a", size = 22101, upload-time = "2026-03-01T16:00:25.09Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "python-engineio"
|
||||
version = "4.13.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "simple-websocket" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/34/12/bdef9dbeedbe2cdeba2a2056ad27b1fb081557d34b69a97f574843462cae/python_engineio-4.13.1.tar.gz", hash = "sha256:0a853fcef52f5b345425d8c2b921ac85023a04dfcf75d7b74696c61e940fd066", size = 92348, upload-time = "2026-02-06T23:38:06.12Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/aa/54/0cce26da03a981f949bb8449c9778537f75f5917c172e1d2992ff25cb57d/python_engineio-4.13.1-py3-none-any.whl", hash = "sha256:f32ad10589859c11053ad7d9bb3c9695cdf862113bfb0d20bc4d890198287399", size = 59847, upload-time = "2026-02-06T23:38:04.861Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "python-http-client"
|
||||
version = "3.3.7"
|
||||
@ -5517,6 +5406,19 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/d9/4f/00be2196329ebbff56ce564aa94efb0fbc828d00de250b1980de1a34ab49/python_pptx-1.0.2-py3-none-any.whl", hash = "sha256:160838e0b8565a8b1f67947675886e9fea18aa5e795db7ae531606d68e785cba", size = 472788, upload-time = "2024-08-07T17:33:28.192Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "python-socketio"
|
||||
version = "5.16.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "bidict" },
|
||||
{ name = "python-engineio" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/59/81/cf8284f45e32efa18d3848ed82cdd4dcc1b657b082458fbe01ad3e1f2f8d/python_socketio-5.16.1.tar.gz", hash = "sha256:f863f98eacce81ceea2e742f6388e10ca3cdd0764be21d30d5196470edf5ea89", size = 128508, upload-time = "2026-02-06T23:42:07Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/07/c7/deb8c5e604404dbf10a3808a858946ca3547692ff6316b698945bb72177e/python_socketio-5.16.1-py3-none-any.whl", hash = "sha256:a3eb1702e92aa2f2b5d3ba00261b61f062cce51f1cfb6900bf3ab4d1934d2d35", size = 82054, upload-time = "2026-02-06T23:42:05.772Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pytz"
|
||||
version = "2025.2"
|
||||
@ -5879,13 +5781,6 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/c9/64/982e07b93219cb52e1cca5d272cb579e2f3eb001956c9e7a9a6d106c9473/sentry_sdk-2.57.0-py2.py3-none-any.whl", hash = "sha256:812c8bf5ff3d2f0e89c82f5ce80ab3a6423e102729c4706af7413fd1eb480585", size = 456489, upload-time = "2026-03-31T09:39:27.524Z" },
|
||||
]
|
||||
|
||||
[package.optional-dependencies]
|
||||
flask = [
|
||||
{ name = "blinker" },
|
||||
{ name = "flask" },
|
||||
{ name = "markupsafe" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "setuptools"
|
||||
version = "80.9.0"
|
||||
@ -5904,6 +5799,18 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755, upload-time = "2023-10-24T04:13:38.866Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "simple-websocket"
|
||||
version = "1.1.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "wsproto" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/b0/d4/bfa032f961103eba93de583b161f0e6a5b63cebb8f2c7d0c6e6efe1e3d2e/simple_websocket-1.1.0.tar.gz", hash = "sha256:7939234e7aa067c534abdab3a9ed933ec9ce4691b0713c78acb195560aa52ae4", size = 17300, upload-time = "2024-10-10T22:39:31.412Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/52/59/0782e51887ac6b07ffd1570e0364cf901ebc36345fea669969d2084baebb/simple_websocket-1.1.0-py3-none-any.whl", hash = "sha256:4af6069630a38ed6c561010f0e11a5bc0d4ca569b36306eb257cd9a192497c8c", size = 13842, upload-time = "2024-10-10T22:39:29.645Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "six"
|
||||
version = "1.17.0"
|
||||
@ -6200,15 +6107,6 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/45/3f/48af1e72e59d60481724b326317bd311615bdedc31f8f81f9508fb84cda6/tablestore-6.4.4-py3-none-any.whl", hash = "sha256:984f086fa7acabaa3558da93205ad6df562b266b85fd249bc5891f2dd1d65814", size = 5118758, upload-time = "2026-04-09T09:40:17.209Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tabulate"
|
||||
version = "0.9.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/ec/fe/802052aecb21e3797b8f7902564ab6ea0d60ff8ca23952079064155d1ae1/tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c", size = 81090, upload-time = "2022-10-06T17:21:48.54Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/40/44/4a5f08c96eb108af5cb50b41f76142f0afa346dfa99d5296fe7202a11854/tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f", size = 35252, upload-time = "2022-10-06T17:21:44.262Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tcvdb-text"
|
||||
version = "1.1.2"
|
||||
@ -7349,6 +7247,18 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/ff/21/abdedb4cdf6ff41ebf01a74087740a709e2edb146490e4d9beea054b0b7a/wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1", size = 23362, upload-time = "2023-11-09T06:33:28.271Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wsproto"
|
||||
version = "1.3.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "h11" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/c7/79/12135bdf8b9c9367b8701c2c19a14c913c120b882d50b014ca0d38083c2c/wsproto-1.3.2.tar.gz", hash = "sha256:b86885dcf294e15204919950f666e06ffc6c7c114ca900b060d6e16293528294", size = 50116, upload-time = "2025-11-20T18:18:01.871Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/a4/f5/10b68b7b1544245097b2a1b8238f66f2fc6dcaeb24ba5d917f52bd2eed4f/wsproto-1.3.2-py3-none-any.whl", hash = "sha256:61eea322cdf56e8cc904bd3ad7573359a242ba65688716b0710a5eb12beab584", size = 24405, upload-time = "2025-11-20T18:18:00.454Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "xinference-client"
|
||||
version = "2.4.0"
|
||||
|
||||
@ -132,6 +132,10 @@ MIGRATION_ENABLED=true
|
||||
# The default value is 300 seconds.
|
||||
FILES_ACCESS_TIMEOUT=300
|
||||
|
||||
# Collaboration mode toggle
|
||||
# To open collaboration features, you also need to set SERVER_WORKER_CLASS=geventwebsocket.gunicorn.workers.GeventWebSocketWorker
|
||||
ENABLE_COLLABORATION_MODE=false
|
||||
|
||||
# Access token expiration time in minutes
|
||||
ACCESS_TOKEN_EXPIRE_MINUTES=60
|
||||
|
||||
@ -167,6 +171,7 @@ SERVER_WORKER_AMOUNT=1
|
||||
# Modifying it may also decrease throughput.
|
||||
#
|
||||
# It is strongly discouraged to change this parameter.
|
||||
# If enable collaboration mode, it must be set to geventwebsocket.gunicorn.workers.GeventWebSocketWorker
|
||||
SERVER_WORKER_CLASS=gevent
|
||||
|
||||
# Default number of worker connections, the default is 10.
|
||||
@ -428,6 +433,8 @@ CONSOLE_CORS_ALLOW_ORIGINS=*
|
||||
COOKIE_DOMAIN=
|
||||
# When the frontend and backend run on different subdomains, set NEXT_PUBLIC_COOKIE_DOMAIN=1.
|
||||
NEXT_PUBLIC_COOKIE_DOMAIN=
|
||||
# WebSocket server URL.
|
||||
NEXT_PUBLIC_SOCKET_URL=ws://localhost
|
||||
NEXT_PUBLIC_BATCH_CONCURRENCY=5
|
||||
|
||||
# ------------------------------
|
||||
|
||||
@ -159,6 +159,7 @@ services:
|
||||
APP_API_URL: ${APP_API_URL:-}
|
||||
AMPLITUDE_API_KEY: ${AMPLITUDE_API_KEY:-}
|
||||
NEXT_PUBLIC_COOKIE_DOMAIN: ${NEXT_PUBLIC_COOKIE_DOMAIN:-}
|
||||
NEXT_PUBLIC_SOCKET_URL: ${NEXT_PUBLIC_SOCKET_URL:-ws://localhost}
|
||||
SENTRY_DSN: ${WEB_SENTRY_DSN:-}
|
||||
NEXT_TELEMETRY_DISABLED: ${NEXT_TELEMETRY_DISABLED:-0}
|
||||
EXPERIMENTAL_ENABLE_VINEXT: ${EXPERIMENTAL_ENABLE_VINEXT:-false}
|
||||
|
||||
@ -34,6 +34,7 @@ x-shared-env: &shared-api-worker-env
|
||||
OPENAI_API_BASE: ${OPENAI_API_BASE:-https://api.openai.com/v1}
|
||||
MIGRATION_ENABLED: ${MIGRATION_ENABLED:-true}
|
||||
FILES_ACCESS_TIMEOUT: ${FILES_ACCESS_TIMEOUT:-300}
|
||||
ENABLE_COLLABORATION_MODE: ${ENABLE_COLLABORATION_MODE:-false}
|
||||
ACCESS_TOKEN_EXPIRE_MINUTES: ${ACCESS_TOKEN_EXPIRE_MINUTES:-60}
|
||||
REFRESH_TOKEN_EXPIRE_DAYS: ${REFRESH_TOKEN_EXPIRE_DAYS:-30}
|
||||
APP_DEFAULT_ACTIVE_REQUESTS: ${APP_DEFAULT_ACTIVE_REQUESTS:-0}
|
||||
@ -119,6 +120,7 @@ x-shared-env: &shared-api-worker-env
|
||||
CONSOLE_CORS_ALLOW_ORIGINS: ${CONSOLE_CORS_ALLOW_ORIGINS:-*}
|
||||
COOKIE_DOMAIN: ${COOKIE_DOMAIN:-}
|
||||
NEXT_PUBLIC_COOKIE_DOMAIN: ${NEXT_PUBLIC_COOKIE_DOMAIN:-}
|
||||
NEXT_PUBLIC_SOCKET_URL: ${NEXT_PUBLIC_SOCKET_URL:-ws://localhost}
|
||||
NEXT_PUBLIC_BATCH_CONCURRENCY: ${NEXT_PUBLIC_BATCH_CONCURRENCY:-5}
|
||||
STORAGE_TYPE: ${STORAGE_TYPE:-opendal}
|
||||
OPENDAL_SCHEME: ${OPENDAL_SCHEME:-fs}
|
||||
@ -878,6 +880,7 @@ services:
|
||||
APP_API_URL: ${APP_API_URL:-}
|
||||
AMPLITUDE_API_KEY: ${AMPLITUDE_API_KEY:-}
|
||||
NEXT_PUBLIC_COOKIE_DOMAIN: ${NEXT_PUBLIC_COOKIE_DOMAIN:-}
|
||||
NEXT_PUBLIC_SOCKET_URL: ${NEXT_PUBLIC_SOCKET_URL:-ws://localhost}
|
||||
SENTRY_DSN: ${WEB_SENTRY_DSN:-}
|
||||
NEXT_TELEMETRY_DISABLED: ${NEXT_TELEMETRY_DISABLED:-0}
|
||||
EXPERIMENTAL_ENABLE_VINEXT: ${EXPERIMENTAL_ENABLE_VINEXT:-false}
|
||||
|
||||
@ -14,6 +14,14 @@ server {
|
||||
include proxy.conf;
|
||||
}
|
||||
|
||||
location /socket.io/ {
|
||||
proxy_pass http://api:5001;
|
||||
include proxy.conf;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection "upgrade";
|
||||
proxy_cache_bypass $http_upgrade;
|
||||
}
|
||||
|
||||
location /v1 {
|
||||
proxy_pass http://api:5001;
|
||||
include proxy.conf;
|
||||
|
||||
27
packages/dify-ui/AGENTS.md
Normal file
27
packages/dify-ui/AGENTS.md
Normal file
@ -0,0 +1,27 @@
|
||||
# @langgenius/dify-ui
|
||||
|
||||
This package provides shared design tokens (colors, shadows, typography), the `cn()` utility, and a Tailwind CSS preset consumed by `web/`.
|
||||
|
||||
## Border Radius: Figma Token → Tailwind Class Mapping
|
||||
|
||||
The Figma design system uses `--radius/*` tokens whose scale is **offset by one step** from Tailwind CSS v4 defaults. When translating Figma specs to code, always use this mapping — never use `radius-*` as a CSS class, and never extend `borderRadius` in the preset.
|
||||
|
||||
| Figma Token | Value | Tailwind Class |
|
||||
|---|---|---|
|
||||
| `--radius/2xs` | 2px | `rounded-xs` |
|
||||
| `--radius/xs` | 4px | `rounded-sm` |
|
||||
| `--radius/sm` | 6px | `rounded-md` |
|
||||
| `--radius/md` | 8px | `rounded-lg` |
|
||||
| `--radius/lg` | 10px | `rounded-[10px]` |
|
||||
| `--radius/xl` | 12px | `rounded-xl` |
|
||||
| `--radius/2xl` | 16px | `rounded-2xl` |
|
||||
| `--radius/3xl` | 20px | `rounded-[20px]` |
|
||||
| `--radius/6xl` | 28px | `rounded-[28px]` |
|
||||
| `--radius/full` | 999px | `rounded-full` |
|
||||
|
||||
### Rules
|
||||
|
||||
- **Do not** add custom `borderRadius` values to `tailwind-preset.ts`. We use Tailwind v4 defaults and arbitrary values (`rounded-[Npx]`) for sizes without a standard equivalent.
|
||||
- **Do not** use `radius-*` as CSS class names. The old `@utility radius-*` definitions have been removed.
|
||||
- When the Figma MCP returns `rounded-[var(--radius/sm, 6px)]`, convert it to the standard Tailwind class from the table above (e.g. `rounded-md`).
|
||||
- For values without a standard Tailwind equivalent (10px, 20px, 28px), use arbitrary values like `rounded-[10px]`.
|
||||
24
packages/dify-ui/package.json
Normal file
24
packages/dify-ui/package.json
Normal file
@ -0,0 +1,24 @@
|
||||
{
|
||||
"name": "@langgenius/dify-ui",
|
||||
"version": "0.0.1",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"exports": {
|
||||
"./styles.css": "./src/styles/styles.css",
|
||||
"./tailwind-preset": {
|
||||
"import": "./src/tailwind-preset.ts",
|
||||
"types": "./src/tailwind-preset.ts"
|
||||
},
|
||||
"./cn": {
|
||||
"import": "./src/cn.ts",
|
||||
"types": "./src/cn.ts"
|
||||
}
|
||||
},
|
||||
"dependencies": {
|
||||
"clsx": "catalog:",
|
||||
"tailwind-merge": "catalog:"
|
||||
},
|
||||
"devDependencies": {
|
||||
"tailwindcss": "catalog:"
|
||||
}
|
||||
}
|
||||
3
packages/dify-ui/src/styles/styles.css
Normal file
3
packages/dify-ui/src/styles/styles.css
Normal file
@ -0,0 +1,3 @@
|
||||
@import '../themes/light.css' layer(base);
|
||||
@import '../themes/dark.css' layer(base);
|
||||
@import './utilities.css';
|
||||
272
packages/dify-ui/src/styles/utilities.css
Normal file
272
packages/dify-ui/src/styles/utilities.css
Normal file
@ -0,0 +1,272 @@
|
||||
@utility system-kbd {
|
||||
font-size: 12px;
|
||||
font-weight: 500;
|
||||
line-height: 16px;
|
||||
}
|
||||
|
||||
@utility system-2xs-regular-uppercase {
|
||||
font-size: 10px;
|
||||
font-weight: 400;
|
||||
text-transform: uppercase;
|
||||
line-height: 12px;
|
||||
}
|
||||
|
||||
@utility system-2xs-regular {
|
||||
font-size: 10px;
|
||||
font-weight: 400;
|
||||
line-height: 12px;
|
||||
}
|
||||
|
||||
@utility system-2xs-medium {
|
||||
font-size: 10px;
|
||||
font-weight: 500;
|
||||
line-height: 12px;
|
||||
}
|
||||
|
||||
@utility system-2xs-medium-uppercase {
|
||||
font-size: 10px;
|
||||
font-weight: 500;
|
||||
text-transform: uppercase;
|
||||
line-height: 12px;
|
||||
}
|
||||
|
||||
@utility system-2xs-semibold-uppercase {
|
||||
font-size: 10px;
|
||||
font-weight: 600;
|
||||
text-transform: uppercase;
|
||||
line-height: 12px;
|
||||
}
|
||||
|
||||
@utility system-xs-regular {
|
||||
font-size: 12px;
|
||||
font-weight: 400;
|
||||
line-height: 16px;
|
||||
}
|
||||
|
||||
@utility system-xs-regular-uppercase {
|
||||
font-size: 12px;
|
||||
font-weight: 400;
|
||||
text-transform: uppercase;
|
||||
line-height: 16px;
|
||||
}
|
||||
|
||||
@utility system-xs-medium {
|
||||
font-size: 12px;
|
||||
font-weight: 500;
|
||||
line-height: 16px;
|
||||
}
|
||||
|
||||
@utility system-xs-medium-uppercase {
|
||||
font-size: 12px;
|
||||
font-weight: 500;
|
||||
text-transform: uppercase;
|
||||
line-height: 16px;
|
||||
}
|
||||
|
||||
@utility system-xs-semibold {
|
||||
font-size: 12px;
|
||||
font-weight: 600;
|
||||
line-height: 16px;
|
||||
}
|
||||
|
||||
@utility system-xs-semibold-uppercase {
|
||||
font-size: 12px;
|
||||
font-weight: 600;
|
||||
text-transform: uppercase;
|
||||
line-height: 16px;
|
||||
}
|
||||
|
||||
@utility system-sm-regular {
|
||||
font-size: 13px;
|
||||
font-weight: 400;
|
||||
line-height: 16px;
|
||||
}
|
||||
|
||||
@utility system-sm-medium {
|
||||
font-size: 13px;
|
||||
font-weight: 500;
|
||||
line-height: 16px;
|
||||
}
|
||||
|
||||
@utility system-sm-medium-uppercase {
|
||||
font-size: 13px;
|
||||
font-weight: 500;
|
||||
text-transform: uppercase;
|
||||
line-height: 16px;
|
||||
}
|
||||
|
||||
@utility system-sm-semibold {
|
||||
font-size: 13px;
|
||||
font-weight: 600;
|
||||
line-height: 16px;
|
||||
}
|
||||
|
||||
@utility system-sm-semibold-uppercase {
|
||||
font-size: 13px;
|
||||
font-weight: 600;
|
||||
text-transform: uppercase;
|
||||
line-height: 16px;
|
||||
}
|
||||
|
||||
@utility system-md-regular {
|
||||
font-size: 14px;
|
||||
font-weight: 400;
|
||||
line-height: 20px;
|
||||
}
|
||||
|
||||
@utility system-md-medium {
|
||||
font-size: 14px;
|
||||
font-weight: 500;
|
||||
line-height: 20px;
|
||||
}
|
||||
|
||||
@utility system-md-semibold {
|
||||
font-size: 14px;
|
||||
font-weight: 600;
|
||||
line-height: 20px;
|
||||
}
|
||||
|
||||
@utility system-md-semibold-uppercase {
|
||||
font-size: 14px;
|
||||
font-weight: 600;
|
||||
text-transform: uppercase;
|
||||
line-height: 20px;
|
||||
}
|
||||
|
||||
@utility system-xl-medium {
|
||||
font-size: 16px;
|
||||
font-weight: 500;
|
||||
line-height: 24px;
|
||||
}
|
||||
|
||||
@utility system-xl-semibold {
|
||||
font-size: 16px;
|
||||
font-weight: 600;
|
||||
line-height: 24px;
|
||||
}
|
||||
|
||||
@utility code-xs-regular {
|
||||
font-size: 12px;
|
||||
font-weight: 400;
|
||||
line-height: 1.5;
|
||||
}
|
||||
|
||||
@utility code-sm-regular {
|
||||
font-size: 13px;
|
||||
font-weight: 400;
|
||||
line-height: 1.5;
|
||||
}
|
||||
|
||||
@utility code-sm-semibold {
|
||||
font-size: 13px;
|
||||
font-weight: 600;
|
||||
line-height: 1.5;
|
||||
}
|
||||
|
||||
@utility body-xs-regular {
|
||||
font-size: 12px;
|
||||
font-weight: 400;
|
||||
line-height: 16px;
|
||||
}
|
||||
|
||||
@utility body-xs-medium {
|
||||
font-size: 12px;
|
||||
font-weight: 500;
|
||||
line-height: 16px;
|
||||
}
|
||||
|
||||
@utility body-sm-regular {
|
||||
font-size: 13px;
|
||||
font-weight: 400;
|
||||
line-height: 16px;
|
||||
}
|
||||
|
||||
@utility body-sm-medium {
|
||||
font-size: 13px;
|
||||
font-weight: 500;
|
||||
line-height: 16px;
|
||||
}
|
||||
|
||||
@utility body-md-regular {
|
||||
font-size: 14px;
|
||||
font-weight: 400;
|
||||
line-height: 20px;
|
||||
}
|
||||
|
||||
@utility body-md-medium {
|
||||
font-size: 14px;
|
||||
font-weight: 500;
|
||||
line-height: 20px;
|
||||
}
|
||||
|
||||
@utility body-lg-regular {
|
||||
font-size: 15px;
|
||||
font-weight: 400;
|
||||
line-height: 20px;
|
||||
}
|
||||
|
||||
@utility body-2xl-regular {
|
||||
font-size: 18px;
|
||||
font-weight: 400;
|
||||
line-height: 1.5;
|
||||
}
|
||||
|
||||
@utility title-xs-semi-bold {
|
||||
font-size: 12px;
|
||||
font-weight: 600;
|
||||
line-height: 16px;
|
||||
}
|
||||
|
||||
@utility title-sm-semi-bold {
|
||||
font-size: 13px;
|
||||
font-weight: 600;
|
||||
line-height: 16px;
|
||||
}
|
||||
|
||||
@utility title-md-semi-bold {
|
||||
font-size: 14px;
|
||||
font-weight: 600;
|
||||
line-height: 20px;
|
||||
}
|
||||
|
||||
@utility title-lg-bold {
|
||||
font-size: 15px;
|
||||
font-weight: 700;
|
||||
line-height: 1.2;
|
||||
}
|
||||
|
||||
@utility title-xl-semi-bold {
|
||||
font-size: 16px;
|
||||
font-weight: 600;
|
||||
line-height: 1.2;
|
||||
}
|
||||
|
||||
@utility title-2xl-semi-bold {
|
||||
font-size: 18px;
|
||||
font-weight: 600;
|
||||
line-height: 1.2;
|
||||
}
|
||||
|
||||
@utility title-3xl-semi-bold {
|
||||
font-size: 20px;
|
||||
font-weight: 600;
|
||||
line-height: 1.2;
|
||||
}
|
||||
|
||||
@utility title-3xl-bold {
|
||||
font-size: 20px;
|
||||
font-weight: 700;
|
||||
line-height: 1.2;
|
||||
}
|
||||
|
||||
@utility title-4xl-semi-bold {
|
||||
font-size: 24px;
|
||||
font-weight: 600;
|
||||
line-height: 1.2;
|
||||
}
|
||||
|
||||
@utility title-5xl-bold {
|
||||
font-size: 30px;
|
||||
font-weight: 700;
|
||||
line-height: 1.2;
|
||||
}
|
||||
87
packages/dify-ui/src/tailwind-preset.ts
Normal file
87
packages/dify-ui/src/tailwind-preset.ts
Normal file
@ -0,0 +1,87 @@
|
||||
import tailwindThemeVarDefine from './themes/tailwind-theme-var-define'
|
||||
|
||||
const difyUIPreset = {
|
||||
theme: {
|
||||
extend: {
|
||||
colors: {
|
||||
gray: {
|
||||
25: '#fcfcfd',
|
||||
50: '#f9fafb',
|
||||
100: '#f2f4f7',
|
||||
200: '#eaecf0',
|
||||
300: '#d0d5dd',
|
||||
400: '#98a2b3',
|
||||
500: '#667085',
|
||||
600: '#344054',
|
||||
700: '#475467',
|
||||
800: '#1d2939',
|
||||
900: '#101828',
|
||||
},
|
||||
primary: {
|
||||
25: '#f5f8ff',
|
||||
50: '#eff4ff',
|
||||
100: '#d1e0ff',
|
||||
200: '#b2ccff',
|
||||
300: '#84adff',
|
||||
400: '#528bff',
|
||||
500: '#2970ff',
|
||||
600: '#155eef',
|
||||
700: '#004eeb',
|
||||
800: '#0040c1',
|
||||
900: '#00359e',
|
||||
},
|
||||
blue: {
|
||||
500: '#E1EFFE',
|
||||
},
|
||||
green: {
|
||||
50: '#F3FAF7',
|
||||
100: '#DEF7EC',
|
||||
800: '#03543F',
|
||||
},
|
||||
yellow: {
|
||||
100: '#FDF6B2',
|
||||
800: '#723B13',
|
||||
},
|
||||
purple: {
|
||||
50: '#F6F5FF',
|
||||
200: '#DCD7FE',
|
||||
},
|
||||
indigo: {
|
||||
25: '#F5F8FF',
|
||||
50: '#EEF4FF',
|
||||
100: '#E0EAFF',
|
||||
300: '#A4BCFD',
|
||||
400: '#8098F9',
|
||||
600: '#444CE7',
|
||||
800: '#2D31A6',
|
||||
},
|
||||
...tailwindThemeVarDefine,
|
||||
},
|
||||
boxShadow: {
|
||||
'xs': '0px 1px 2px 0px rgba(16, 24, 40, 0.05)',
|
||||
'sm': '0px 1px 2px 0px rgba(16, 24, 40, 0.06), 0px 1px 3px 0px rgba(16, 24, 40, 0.10)',
|
||||
'sm-no-bottom': '0px -1px 2px 0px rgba(16, 24, 40, 0.06), 0px -1px 3px 0px rgba(16, 24, 40, 0.10)',
|
||||
'md': '0px 2px 4px -2px rgba(16, 24, 40, 0.06), 0px 4px 8px -2px rgba(16, 24, 40, 0.10)',
|
||||
'lg': '0px 4px 6px -2px rgba(16, 24, 40, 0.03), 0px 12px 16px -4px rgba(16, 24, 40, 0.08)',
|
||||
'xl': '0px 8px 8px -4px rgba(16, 24, 40, 0.03), 0px 20px 24px -4px rgba(16, 24, 40, 0.08)',
|
||||
'2xl': '0px 24px 48px -12px rgba(16, 24, 40, 0.18)',
|
||||
'3xl': '0px 32px 64px -12px rgba(16, 24, 40, 0.14)',
|
||||
'status-indicator-green-shadow': '0px 2px 6px 0px var(--color-components-badge-status-light-success-halo), 0px 0px 0px 1px var(--color-components-badge-status-light-border-outer)',
|
||||
'status-indicator-warning-shadow': '0px 2px 6px 0px var(--color-components-badge-status-light-warning-halo), 0px 0px 0px 1px var(--color-components-badge-status-light-border-outer)',
|
||||
'status-indicator-red-shadow': '0px 2px 6px 0px var(--color-components-badge-status-light-error-halo), 0px 0px 0px 1px var(--color-components-badge-status-light-border-outer)',
|
||||
'status-indicator-blue-shadow': '0px 2px 6px 0px var(--color-components-badge-status-light-normal-halo), 0px 0px 0px 1px var(--color-components-badge-status-light-border-outer)',
|
||||
'status-indicator-gray-shadow': '0px 1px 2px 0px var(--color-components-badge-status-light-disabled-halo), 0px 0px 0px 1px var(--color-components-badge-status-light-border-outer)',
|
||||
},
|
||||
opacity: {
|
||||
2: '0.02',
|
||||
8: '0.08',
|
||||
},
|
||||
fontSize: {
|
||||
'2xs': '0.625rem',
|
||||
},
|
||||
},
|
||||
},
|
||||
plugins: [],
|
||||
}
|
||||
|
||||
export default difyUIPreset
|
||||
18
packages/dify-ui/tsconfig.json
Normal file
18
packages/dify-ui/tsconfig.json
Normal file
@ -0,0 +1,18 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2022",
|
||||
"module": "ESNext",
|
||||
"moduleResolution": "bundler",
|
||||
"strict": true,
|
||||
"esModuleInterop": true,
|
||||
"skipLibCheck": true,
|
||||
"declaration": true,
|
||||
"declarationMap": true,
|
||||
"sourceMap": true,
|
||||
"outDir": "dist",
|
||||
"rootDir": "src",
|
||||
"isolatedModules": true,
|
||||
"verbatimModuleSyntax": true
|
||||
},
|
||||
"include": ["src"]
|
||||
}
|
||||
@ -0,0 +1,4 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M0 4C0 1.79086 1.79086 0 4 0H12C14.2091 0 16 1.79086 16 4V12C16 14.2091 14.2091 16 12 16H4C1.79086 16 0 14.2091 0 12V4Z" fill="white" fill-opacity="0.12"/>
|
||||
<path d="M3.42756 8.7358V7.62784H10.8764C11.2003 7.62784 11.4957 7.5483 11.7628 7.3892C12.0298 7.23011 12.2415 7.01705 12.3977 6.75C12.5568 6.48295 12.6364 6.1875 12.6364 5.86364C12.6364 5.53977 12.5568 5.24574 12.3977 4.98153C12.2386 4.71449 12.0256 4.50142 11.7585 4.34233C11.4943 4.18324 11.2003 4.10369 10.8764 4.10369H10.3991V3H10.8764C11.4048 3 11.8849 3.12926 12.3168 3.38778C12.7486 3.64631 13.0938 3.99148 13.3523 4.4233C13.6108 4.85511 13.7401 5.33523 13.7401 5.86364C13.7401 6.25852 13.6648 6.62926 13.5142 6.97585C13.3665 7.32244 13.1619 7.62784 12.9006 7.89205C12.6392 8.15625 12.3352 8.36364 11.9886 8.5142C11.642 8.66193 11.2713 8.7358 10.8764 8.7358H3.42756ZM6.16761 12.0554L2.29403 8.18182L6.16761 4.30824L6.9304 5.07102L3.81534 8.18182L6.9304 11.2926L6.16761 12.0554Z" fill="white"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.0 KiB |
@ -0,0 +1,3 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="14" height="12" viewBox="0 0 14 12" fill="none">
|
||||
<path d="M12.3334 4C12.3334 2.52725 11.1395 1.33333 9.66671 1.33333H4.33337C2.86062 1.33333 1.66671 2.52724 1.66671 4V10.6667H9.66671C11.1395 10.6667 12.3334 9.47274 12.3334 8V4ZM7.66671 6.66667V8H4.33337V6.66667H7.66671ZM9.66671 4V5.33333H4.33337V4H9.66671ZM13.6667 8C13.6667 10.2091 11.8758 12 9.66671 12H0.333374V4C0.333374 1.79086 2.12424 0 4.33337 0H9.66671C11.8758 0 13.6667 1.79086 13.6667 4V8Z" fill="currentColor"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 528 B |
174
pnpm-lock.yaml
generated
174
pnpm-lock.yaml
generated
@ -384,6 +384,9 @@ catalogs:
|
||||
lexical:
|
||||
specifier: 0.43.0
|
||||
version: 0.43.0
|
||||
loro-crdt:
|
||||
specifier: 1.10.8
|
||||
version: 1.10.8
|
||||
mermaid:
|
||||
specifier: 11.14.0
|
||||
version: 11.14.0
|
||||
@ -471,6 +474,9 @@ catalogs:
|
||||
shiki:
|
||||
specifier: 4.0.2
|
||||
version: 4.0.2
|
||||
socket.io-client:
|
||||
specifier: 4.8.3
|
||||
version: 4.8.3
|
||||
sortablejs:
|
||||
specifier: 1.15.7
|
||||
version: 1.15.7
|
||||
@ -599,6 +605,19 @@ importers:
|
||||
specifier: 'catalog:'
|
||||
version: 0.1.16(@types/node@25.6.0)(@voidzero-dev/vite-plus-core@0.1.16(@types/node@25.6.0)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(happy-dom@20.9.0)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3)
|
||||
|
||||
packages/dify-ui:
|
||||
dependencies:
|
||||
clsx:
|
||||
specifier: 'catalog:'
|
||||
version: 2.1.1
|
||||
tailwind-merge:
|
||||
specifier: 'catalog:'
|
||||
version: 3.5.0
|
||||
devDependencies:
|
||||
tailwindcss:
|
||||
specifier: 'catalog:'
|
||||
version: 4.2.2
|
||||
|
||||
packages/iconify-collections:
|
||||
devDependencies:
|
||||
iconify-import-svg:
|
||||
@ -739,9 +758,6 @@ importers:
|
||||
client-only:
|
||||
specifier: 'catalog:'
|
||||
version: 0.0.1
|
||||
clsx:
|
||||
specifier: 'catalog:'
|
||||
version: 2.1.1
|
||||
cmdk:
|
||||
specifier: 'catalog:'
|
||||
version: 1.1.1(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.5(react@19.2.5))(react@19.2.5)
|
||||
@ -829,6 +845,9 @@ importers:
|
||||
lexical:
|
||||
specifier: 'catalog:'
|
||||
version: 0.43.0
|
||||
loro-crdt:
|
||||
specifier: 'catalog:'
|
||||
version: 1.10.8
|
||||
mermaid:
|
||||
specifier: 'catalog:'
|
||||
version: 11.14.0
|
||||
@ -910,6 +929,9 @@ importers:
|
||||
shiki:
|
||||
specifier: 'catalog:'
|
||||
version: 4.0.2
|
||||
socket.io-client:
|
||||
specifier: 'catalog:'
|
||||
version: 4.8.3
|
||||
sortablejs:
|
||||
specifier: 'catalog:'
|
||||
version: 1.15.7
|
||||
@ -922,9 +944,6 @@ importers:
|
||||
string-ts:
|
||||
specifier: 'catalog:'
|
||||
version: 2.3.1
|
||||
tailwind-merge:
|
||||
specifier: 'catalog:'
|
||||
version: 3.5.0
|
||||
tldts:
|
||||
specifier: 'catalog:'
|
||||
version: 7.0.28
|
||||
@ -971,6 +990,9 @@ importers:
|
||||
'@iconify-json/ri':
|
||||
specifier: 'catalog:'
|
||||
version: 1.2.10
|
||||
'@langgenius/dify-ui':
|
||||
specifier: workspace:*
|
||||
version: link:../packages/dify-ui
|
||||
'@mdx-js/loader':
|
||||
specifier: 'catalog:'
|
||||
version: 3.1.1(webpack@5.105.4(uglify-js@3.19.3))
|
||||
@ -1126,7 +1148,7 @@ importers:
|
||||
version: 4.12.12
|
||||
knip:
|
||||
specifier: 'catalog:'
|
||||
version: 6.4.1(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)
|
||||
version: 6.4.1(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2)
|
||||
postcss:
|
||||
specifier: 'catalog:'
|
||||
version: 8.5.9
|
||||
@ -1553,14 +1575,17 @@ packages:
|
||||
peerDependencies:
|
||||
tailwindcss: '*'
|
||||
|
||||
'@emnapi/core@1.9.1':
|
||||
resolution: {integrity: sha512-mukuNALVsoix/w1BJwFzwXBN/dHeejQtuVzcDsfOEsdpCumXb/E9j8w11h5S54tT1xhifGfbbSm/ICrObRb3KA==}
|
||||
'@emnapi/core@1.9.2':
|
||||
resolution: {integrity: sha512-UC+ZhH3XtczQYfOlu3lNEkdW/p4dsJ1r/bP7H8+rhao3TTTMO1ATq/4DdIi23XuGoFY+Cz0JmCbdVl0hz9jZcA==}
|
||||
|
||||
'@emnapi/runtime@1.9.1':
|
||||
resolution: {integrity: sha512-VYi5+ZVLhpgK4hQ0TAjiQiZ6ol0oe4mBx7mVv7IflsiEp0OWoVsp/+f9Vc1hOhE0TtkORVrI1GvzyreqpgWtkA==}
|
||||
|
||||
'@emnapi/wasi-threads@1.2.0':
|
||||
resolution: {integrity: sha512-N10dEJNSsUx41Z6pZsXU8FjPjpBEplgH24sfkmITrBED1/U2Esum9F3lfLrMjKHHjmi557zQn7kR9R+XWXu5Rg==}
|
||||
'@emnapi/runtime@1.9.2':
|
||||
resolution: {integrity: sha512-3U4+MIWHImeyu1wnmVygh5WlgfYDtyf0k8AbLhMFxOipihf6nrWC4syIm/SwEeec0mNSafiiNnMJwbza/Is6Lw==}
|
||||
|
||||
'@emnapi/wasi-threads@1.2.1':
|
||||
resolution: {integrity: sha512-uTII7OYF+/Mes/MrcIOYp5yOtSMLBWSIoLPpcgwipoiKbli6k322tcoFsxoIIxPDqW01SQGAgko4EzZi2BNv2w==}
|
||||
|
||||
'@emoji-mart/data@1.2.1':
|
||||
resolution: {integrity: sha512-no2pQMWiBy6gpBEiqGeU77/bFejDqUTRY7KX+0+iur13op3bqUsXdnwoZs6Xb1zbv0gAj5VvS1PWoUUckSr5Dw==}
|
||||
@ -3531,6 +3556,9 @@ packages:
|
||||
resolution: {integrity: sha512-TeheYy0ILzBEI/CO55CP6zJCSdSWeRtGnHy8U8dWSUH4I68iqTsy7HkMktR4xakThc9jotkPQUXT4ITdbV7cHA==}
|
||||
engines: {node: '>=18'}
|
||||
|
||||
'@socket.io/component-emitter@3.1.2':
|
||||
resolution: {integrity: sha512-9BCxFwvbGg/RsZK9tjXd8s4UcwR0MWeFQ1XEKIQVVvAGJyINdrqKMcTRyLoK8Rse1GjzLV9cwjWV1olXRWEXVA==}
|
||||
|
||||
'@solid-primitives/event-listener@2.4.5':
|
||||
resolution: {integrity: sha512-nwRV558mIabl4yVAhZKY8cb6G+O1F0M6Z75ttTu5hk+SxdOnKSGj+eetDIu7Oax1P138ZdUU01qnBPR8rnxaEA==}
|
||||
peerDependencies:
|
||||
@ -5487,6 +5515,13 @@ packages:
|
||||
end-of-stream@1.4.5:
|
||||
resolution: {integrity: sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg==}
|
||||
|
||||
engine.io-client@6.6.4:
|
||||
resolution: {integrity: sha512-+kjUJnZGwzewFDw951CDWcwj35vMNf2fcj7xQWOctq1F2i1jkDdVvdFG9kM/BEChymCH36KgjnW0NsL58JYRxw==}
|
||||
|
||||
engine.io-parser@5.2.3:
|
||||
resolution: {integrity: sha512-HqD3yTBfnBxIrbnM1DoD6Pcq8NECnh8d4As1Qgh0z5Gg3jRRIqijury0CL3ghu/edArpUYiYqQiDUQBIs4np3Q==}
|
||||
engines: {node: '>=10.0.0'}
|
||||
|
||||
enhanced-resolve@5.20.1:
|
||||
resolution: {integrity: sha512-Qohcme7V1inbAfvjItgw0EaxVX5q2rdVEZHRBrEQdRZTssLDGsL8Lwrznl8oQ/6kuTJONLaDcGjkNP247XEhcA==}
|
||||
engines: {node: '>=10.13.0'}
|
||||
@ -6611,6 +6646,9 @@ packages:
|
||||
resolution: {integrity: sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==}
|
||||
hasBin: true
|
||||
|
||||
loro-crdt@1.10.8:
|
||||
resolution: {integrity: sha512-GvH8fSJST1VDHRGzlQml80pBYoFbIP4ULeV1S8fD4ffmA8m+icoPORyVUW2AkJBY3dxKIcMMn0WqaJmpCmnbkQ==}
|
||||
|
||||
loupe@3.2.1:
|
||||
resolution: {integrity: sha512-CdzqowRJCeLU72bHvWqwRBBlLcMEtIvGrlvef74kMnV2AolS9Y8xUv1I0U/MNAWMhBlKIoyuEgoJ0t/bbwHbLQ==}
|
||||
|
||||
@ -7750,6 +7788,14 @@ packages:
|
||||
resolution: {integrity: sha512-dWUG8F5sIIARXih1DTaQAX4SsiTXhInKf1buxdY9DIg4ZYPZK5nGM1VRIYmEbDbsHt7USo99xSLFu5Q1IqTmsg==}
|
||||
engines: {node: '>= 18'}
|
||||
|
||||
socket.io-client@4.8.3:
|
||||
resolution: {integrity: sha512-uP0bpjWrjQmUt5DTHq9RuoCBdFJF10cdX9X+a368j/Ft0wmaVgxlrjvK3kjvgCODOMMOz9lcaRzxmso0bTWZ/g==}
|
||||
engines: {node: '>=10.0.0'}
|
||||
|
||||
socket.io-parser@4.2.6:
|
||||
resolution: {integrity: sha512-asJqbVBDsBCJx0pTqw3WfesSY0iRX+2xzWEWzrpcH7L6fLzrhyF8WPI8UaeM4YCuDfpwA/cgsdugMsmtz8EJeg==}
|
||||
engines: {node: '>=10.0.0'}
|
||||
|
||||
solid-js@1.9.11:
|
||||
resolution: {integrity: sha512-WEJtcc5mkh/BnHA6Yrg4whlF8g6QwpmXXRg4P2ztPmcKeHHlH4+djYecBLhSpecZY2RRECXYUwIc/C2r3yzQ4Q==}
|
||||
|
||||
@ -8007,6 +8053,10 @@ packages:
|
||||
resolution: {integrity: sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==}
|
||||
engines: {node: '>=12.0.0'}
|
||||
|
||||
tinyglobby@0.2.16:
|
||||
resolution: {integrity: sha512-pn99VhoACYR8nFHhxqix+uvsbXineAasWm5ojXoN8xEwK5Kd3/TrhNn1wByuD52UxWRLy8pu+kRMniEi6Eq9Zg==}
|
||||
engines: {node: '>=12.0.0'}
|
||||
|
||||
tinypool@2.1.0:
|
||||
resolution: {integrity: sha512-Pugqs6M0m7Lv1I7FtxN4aoyToKg1C4tu+/381vH35y8oENM/Ai7f7C4StcoK4/+BSw9ebcS8jRiVrORFKCALLw==}
|
||||
engines: {node: ^20.0.0 || >=22.0.0}
|
||||
@ -8473,6 +8523,18 @@ packages:
|
||||
wrappy@1.0.2:
|
||||
resolution: {integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==}
|
||||
|
||||
ws@8.18.3:
|
||||
resolution: {integrity: sha512-PEIGCY5tSlUt50cqyMXfCzX+oOPqN0vuGqWzbcJ2xvnkzkq46oOpz7dQaTDBdfICb4N14+GARUDw2XV2N4tvzg==}
|
||||
engines: {node: '>=10.0.0'}
|
||||
peerDependencies:
|
||||
bufferutil: ^4.0.1
|
||||
utf-8-validate: '>=5.0.2'
|
||||
peerDependenciesMeta:
|
||||
bufferutil:
|
||||
optional: true
|
||||
utf-8-validate:
|
||||
optional: true
|
||||
|
||||
ws@8.20.0:
|
||||
resolution: {integrity: sha512-sAt8BhgNbzCtgGbt2OxmpuryO63ZoDk/sqaB/znQm94T4fCEsy/yV+7CdC1kJhOU9lboAEU7R3kquuycDoibVA==}
|
||||
engines: {node: '>=10.0.0'}
|
||||
@ -8501,6 +8563,10 @@ packages:
|
||||
resolution: {integrity: sha512-yMqGBqtXyeN1e3TGYvgNgDVZ3j84W4cwkOXQswghol6APgZWaff9lnbvN7MHYJOiXsvGPXtjTYJEiC9J2wv9Eg==}
|
||||
engines: {node: '>=8.0'}
|
||||
|
||||
xmlhttprequest-ssl@2.1.2:
|
||||
resolution: {integrity: sha512-TEU+nJVUUnA4CYJFLvK5X9AOeH4KvDvhIfm0vV1GaQRtchnG0hgK5p8hw/xjv8cunWYCsiPCSDzObPyhEwq3KQ==}
|
||||
engines: {node: '>=0.4.0'}
|
||||
|
||||
yallist@3.1.1:
|
||||
resolution: {integrity: sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==}
|
||||
|
||||
@ -9153,9 +9219,9 @@ snapshots:
|
||||
'@iconify/utils': 3.1.0
|
||||
tailwindcss: 4.2.2
|
||||
|
||||
'@emnapi/core@1.9.1':
|
||||
'@emnapi/core@1.9.2':
|
||||
dependencies:
|
||||
'@emnapi/wasi-threads': 1.2.0
|
||||
'@emnapi/wasi-threads': 1.2.1
|
||||
tslib: 2.8.1
|
||||
optional: true
|
||||
|
||||
@ -9164,7 +9230,12 @@ snapshots:
|
||||
tslib: 2.8.1
|
||||
optional: true
|
||||
|
||||
'@emnapi/wasi-threads@1.2.0':
|
||||
'@emnapi/runtime@1.9.2':
|
||||
dependencies:
|
||||
tslib: 2.8.1
|
||||
optional: true
|
||||
|
||||
'@emnapi/wasi-threads@1.2.1':
|
||||
dependencies:
|
||||
tslib: 2.8.1
|
||||
optional: true
|
||||
@ -9944,10 +10015,10 @@ snapshots:
|
||||
react: 19.2.5
|
||||
react-dom: 19.2.5(react@19.2.5)
|
||||
|
||||
'@napi-rs/wasm-runtime@1.1.2(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)':
|
||||
'@napi-rs/wasm-runtime@1.1.2(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2)':
|
||||
dependencies:
|
||||
'@emnapi/core': 1.9.1
|
||||
'@emnapi/runtime': 1.9.1
|
||||
'@emnapi/core': 1.9.2
|
||||
'@emnapi/runtime': 1.9.2
|
||||
'@tybys/wasm-util': 0.10.1
|
||||
optional: true
|
||||
|
||||
@ -10120,9 +10191,9 @@ snapshots:
|
||||
'@oxc-parser/binding-openharmony-arm64@0.121.0':
|
||||
optional: true
|
||||
|
||||
'@oxc-parser/binding-wasm32-wasi@0.121.0(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)':
|
||||
'@oxc-parser/binding-wasm32-wasi@0.121.0(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2)':
|
||||
dependencies:
|
||||
'@napi-rs/wasm-runtime': 1.1.2(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)
|
||||
'@napi-rs/wasm-runtime': 1.1.2(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2)
|
||||
transitivePeerDependencies:
|
||||
- '@emnapi/core'
|
||||
- '@emnapi/runtime'
|
||||
@ -10191,9 +10262,9 @@ snapshots:
|
||||
'@oxc-resolver/binding-openharmony-arm64@11.19.1':
|
||||
optional: true
|
||||
|
||||
'@oxc-resolver/binding-wasm32-wasi@11.19.1(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)':
|
||||
'@oxc-resolver/binding-wasm32-wasi@11.19.1(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2)':
|
||||
dependencies:
|
||||
'@napi-rs/wasm-runtime': 1.1.2(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)
|
||||
'@napi-rs/wasm-runtime': 1.1.2(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2)
|
||||
transitivePeerDependencies:
|
||||
- '@emnapi/core'
|
||||
- '@emnapi/runtime'
|
||||
@ -10880,6 +10951,8 @@ snapshots:
|
||||
|
||||
'@sindresorhus/base62@1.0.0': {}
|
||||
|
||||
'@socket.io/component-emitter@3.1.2': {}
|
||||
|
||||
'@solid-primitives/event-listener@2.4.5(solid-js@1.9.11)':
|
||||
dependencies:
|
||||
'@solid-primitives/utils': 6.4.0(solid-js@1.9.11)
|
||||
@ -11758,7 +11831,7 @@ snapshots:
|
||||
debug: 4.4.3(supports-color@8.1.1)
|
||||
minimatch: 10.2.4
|
||||
semver: 7.7.4
|
||||
tinyglobby: 0.2.15
|
||||
tinyglobby: 0.2.16
|
||||
ts-api-utils: 2.5.0(typescript@6.0.2)
|
||||
typescript: 6.0.2
|
||||
transitivePeerDependencies:
|
||||
@ -12944,6 +13017,20 @@ snapshots:
|
||||
dependencies:
|
||||
once: 1.4.0
|
||||
|
||||
engine.io-client@6.6.4:
|
||||
dependencies:
|
||||
'@socket.io/component-emitter': 3.1.2
|
||||
debug: 4.4.3(supports-color@8.1.1)
|
||||
engine.io-parser: 5.2.3
|
||||
ws: 8.18.3
|
||||
xmlhttprequest-ssl: 2.1.2
|
||||
transitivePeerDependencies:
|
||||
- bufferutil
|
||||
- supports-color
|
||||
- utf-8-validate
|
||||
|
||||
engine.io-parser@5.2.3: {}
|
||||
|
||||
enhanced-resolve@5.20.1:
|
||||
dependencies:
|
||||
graceful-fs: 4.2.11
|
||||
@ -14140,7 +14227,7 @@ snapshots:
|
||||
|
||||
khroma@2.1.0: {}
|
||||
|
||||
knip@6.4.1(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1):
|
||||
knip@6.4.1(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2):
|
||||
dependencies:
|
||||
'@nodelib/fs.walk': 1.2.8
|
||||
fast-glob: 3.3.3
|
||||
@ -14148,8 +14235,8 @@ snapshots:
|
||||
get-tsconfig: 4.13.7
|
||||
jiti: 2.6.1
|
||||
minimist: 1.2.8
|
||||
oxc-parser: 0.121.0(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)
|
||||
oxc-resolver: 11.19.1(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)
|
||||
oxc-parser: 0.121.0(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2)
|
||||
oxc-resolver: 11.19.1(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2)
|
||||
picocolors: 1.1.1
|
||||
picomatch: 4.0.4
|
||||
smol-toml: 1.6.1
|
||||
@ -14288,6 +14375,8 @@ snapshots:
|
||||
dependencies:
|
||||
js-tokens: 4.0.0
|
||||
|
||||
loro-crdt@1.10.8: {}
|
||||
|
||||
loupe@3.2.1: {}
|
||||
|
||||
lower-case@2.0.2:
|
||||
@ -15056,7 +15145,7 @@ snapshots:
|
||||
type-check: 0.4.0
|
||||
word-wrap: 1.2.5
|
||||
|
||||
oxc-parser@0.121.0(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1):
|
||||
oxc-parser@0.121.0(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2):
|
||||
dependencies:
|
||||
'@oxc-project/types': 0.121.0
|
||||
optionalDependencies:
|
||||
@ -15076,7 +15165,7 @@ snapshots:
|
||||
'@oxc-parser/binding-linux-x64-gnu': 0.121.0
|
||||
'@oxc-parser/binding-linux-x64-musl': 0.121.0
|
||||
'@oxc-parser/binding-openharmony-arm64': 0.121.0
|
||||
'@oxc-parser/binding-wasm32-wasi': 0.121.0(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)
|
||||
'@oxc-parser/binding-wasm32-wasi': 0.121.0(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2)
|
||||
'@oxc-parser/binding-win32-arm64-msvc': 0.121.0
|
||||
'@oxc-parser/binding-win32-ia32-msvc': 0.121.0
|
||||
'@oxc-parser/binding-win32-x64-msvc': 0.121.0
|
||||
@ -15084,7 +15173,7 @@ snapshots:
|
||||
- '@emnapi/core'
|
||||
- '@emnapi/runtime'
|
||||
|
||||
oxc-resolver@11.19.1(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1):
|
||||
oxc-resolver@11.19.1(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2):
|
||||
optionalDependencies:
|
||||
'@oxc-resolver/binding-android-arm-eabi': 11.19.1
|
||||
'@oxc-resolver/binding-android-arm64': 11.19.1
|
||||
@ -15102,7 +15191,7 @@ snapshots:
|
||||
'@oxc-resolver/binding-linux-x64-gnu': 11.19.1
|
||||
'@oxc-resolver/binding-linux-x64-musl': 11.19.1
|
||||
'@oxc-resolver/binding-openharmony-arm64': 11.19.1
|
||||
'@oxc-resolver/binding-wasm32-wasi': 11.19.1(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)
|
||||
'@oxc-resolver/binding-wasm32-wasi': 11.19.1(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2)
|
||||
'@oxc-resolver/binding-win32-arm64-msvc': 11.19.1
|
||||
'@oxc-resolver/binding-win32-ia32-msvc': 11.19.1
|
||||
'@oxc-resolver/binding-win32-x64-msvc': 11.19.1
|
||||
@ -15980,6 +16069,24 @@ snapshots:
|
||||
|
||||
smol-toml@1.6.1: {}
|
||||
|
||||
socket.io-client@4.8.3:
|
||||
dependencies:
|
||||
'@socket.io/component-emitter': 3.1.2
|
||||
debug: 4.4.3(supports-color@8.1.1)
|
||||
engine.io-client: 6.6.4
|
||||
socket.io-parser: 4.2.6
|
||||
transitivePeerDependencies:
|
||||
- bufferutil
|
||||
- supports-color
|
||||
- utf-8-validate
|
||||
|
||||
socket.io-parser@4.2.6:
|
||||
dependencies:
|
||||
'@socket.io/component-emitter': 3.1.2
|
||||
debug: 4.4.3(supports-color@8.1.1)
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
solid-js@1.9.11:
|
||||
dependencies:
|
||||
csstype: 3.2.3
|
||||
@ -16241,6 +16348,11 @@ snapshots:
|
||||
fdir: 6.5.0(picomatch@4.0.4)
|
||||
picomatch: 4.0.4
|
||||
|
||||
tinyglobby@0.2.16:
|
||||
dependencies:
|
||||
fdir: 6.5.0(picomatch@4.0.4)
|
||||
picomatch: 4.0.4
|
||||
|
||||
tinypool@2.1.0: {}
|
||||
|
||||
tinyrainbow@2.0.0: {}
|
||||
@ -16749,6 +16861,8 @@ snapshots:
|
||||
|
||||
wrappy@1.0.2: {}
|
||||
|
||||
ws@8.18.3: {}
|
||||
|
||||
ws@8.20.0: {}
|
||||
|
||||
wsl-utils@0.1.0:
|
||||
@ -16764,6 +16878,8 @@ snapshots:
|
||||
|
||||
xmlbuilder@15.1.1: {}
|
||||
|
||||
xmlhttprequest-ssl@2.1.2: {}
|
||||
|
||||
yallist@3.1.1: {}
|
||||
|
||||
yallist@5.0.0: {}
|
||||
|
||||
@ -1,57 +1,22 @@
|
||||
catalogMode: prefer
|
||||
trustPolicy: no-downgrade
|
||||
trustPolicyExclude:
|
||||
- chokidar@4.0.3
|
||||
- reselect@5.1.1
|
||||
- semver@6.3.1
|
||||
blockExoticSubdeps: true
|
||||
strictDepBuilds: true
|
||||
allowBuilds:
|
||||
"@parcel/watcher": false
|
||||
canvas: false
|
||||
esbuild: false
|
||||
sharp: false
|
||||
packages:
|
||||
- web
|
||||
- e2e
|
||||
- sdks/nodejs-client
|
||||
- packages/*
|
||||
overrides:
|
||||
"@lexical/code": npm:lexical-code-no-prism@0.41.0
|
||||
"@monaco-editor/loader": 1.7.0
|
||||
brace-expansion@>=2.0.0 <2.0.3: 2.0.3
|
||||
canvas: ^3.2.2
|
||||
dompurify@>=3.1.3 <=3.3.1: 3.3.2
|
||||
esbuild@<0.27.2: 0.27.2
|
||||
flatted@<=3.4.1: 3.4.2
|
||||
glob@>=10.2.0 <10.5.0: 11.1.0
|
||||
is-core-module: npm:@nolyfill/is-core-module@^1.0.39
|
||||
lodash@>=4.0.0 <= 4.17.23: 4.18.0
|
||||
lodash-es@>=4.0.0 <= 4.17.23: 4.18.0
|
||||
picomatch@<2.3.2: 2.3.2
|
||||
picomatch@>=4.0.0 <4.0.4: 4.0.4
|
||||
rollup@>=4.0.0 <4.59.0: 4.59.0
|
||||
safe-buffer: ^5.2.1
|
||||
safer-buffer: npm:@nolyfill/safer-buffer@^1.0.44
|
||||
side-channel: npm:@nolyfill/side-channel@^1.0.44
|
||||
smol-toml@<1.6.1: 1.6.1
|
||||
solid-js: 1.9.11
|
||||
string-width: ~8.2.0
|
||||
svgo@>=3.0.0 <3.3.3: 3.3.3
|
||||
tar@<=7.5.10: 7.5.11
|
||||
undici@>=7.0.0 <7.24.0: 7.24.0
|
||||
vite: npm:@voidzero-dev/vite-plus-core@0.1.16
|
||||
vitest: npm:@voidzero-dev/vite-plus-test@0.1.16
|
||||
yaml@>=2.0.0 <2.8.3: 2.8.3
|
||||
yauzl@<3.2.1: 3.2.1
|
||||
allowBuilds:
|
||||
"@parcel/watcher": false
|
||||
canvas: false
|
||||
esbuild: false
|
||||
sharp: false
|
||||
blockExoticSubdeps: true
|
||||
catalog:
|
||||
"@amplitude/analytics-browser": 2.39.0
|
||||
"@amplitude/plugin-session-replay-browser": 1.27.7
|
||||
"@antfu/eslint-config": 8.2.0
|
||||
"@base-ui/react": 1.4.0
|
||||
"@date-fns/tz": 1.4.1
|
||||
"@chromatic-com/storybook": 5.1.2
|
||||
"@cucumber/cucumber": 12.8.0
|
||||
"@date-fns/tz": 1.4.1
|
||||
"@egoist/tailwindcss-icons": 1.9.2
|
||||
"@emoji-mart/data": 1.2.1
|
||||
"@eslint-react/eslint-plugin": 3.0.0
|
||||
@ -107,6 +72,7 @@ catalog:
|
||||
"@testing-library/jest-dom": 6.9.1
|
||||
"@testing-library/react": 16.3.2
|
||||
"@testing-library/user-event": 14.6.1
|
||||
'@tsdown/css': 0.21.8
|
||||
"@tsslint/cli": 3.0.3
|
||||
"@tsslint/compat-eslint": 3.0.3
|
||||
"@tsslint/config": 3.0.3
|
||||
@ -176,6 +142,7 @@ catalog:
|
||||
ky: 2.0.0
|
||||
lamejs: 1.2.1
|
||||
lexical: 0.43.0
|
||||
loro-crdt: 1.10.8
|
||||
mermaid: 11.14.0
|
||||
mime: 4.1.0
|
||||
mitt: 3.0.1
|
||||
@ -206,6 +173,7 @@ catalog:
|
||||
scheduler: 0.27.0
|
||||
sharp: 0.34.5
|
||||
shiki: 4.0.2
|
||||
socket.io-client: 4.8.3
|
||||
sortablejs: 1.15.7
|
||||
std-semver: 1.0.8
|
||||
storybook: 10.3.5
|
||||
@ -230,3 +198,38 @@ catalog:
|
||||
zod: 4.3.6
|
||||
zundo: 2.3.0
|
||||
zustand: 5.0.12
|
||||
catalogMode: prefer
|
||||
overrides:
|
||||
"@lexical/code": npm:lexical-code-no-prism@0.41.0
|
||||
"@monaco-editor/loader": 1.7.0
|
||||
brace-expansion@>=2.0.0 <2.0.3: 2.0.3
|
||||
canvas: ^3.2.2
|
||||
dompurify@>=3.1.3 <=3.3.1: 3.3.2
|
||||
esbuild@<0.27.2: 0.27.2
|
||||
flatted@<=3.4.1: 3.4.2
|
||||
glob@>=10.2.0 <10.5.0: 11.1.0
|
||||
is-core-module: npm:@nolyfill/is-core-module@^1.0.39
|
||||
lodash-es@>=4.0.0 <= 4.17.23: 4.18.0
|
||||
lodash@>=4.0.0 <= 4.17.23: 4.18.0
|
||||
picomatch@<2.3.2: 2.3.2
|
||||
picomatch@>=4.0.0 <4.0.4: 4.0.4
|
||||
rollup@>=4.0.0 <4.59.0: 4.59.0
|
||||
safe-buffer: ^5.2.1
|
||||
safer-buffer: npm:@nolyfill/safer-buffer@^1.0.44
|
||||
side-channel: npm:@nolyfill/side-channel@^1.0.44
|
||||
smol-toml@<1.6.1: 1.6.1
|
||||
solid-js: 1.9.11
|
||||
string-width: ~8.2.0
|
||||
svgo@>=3.0.0 <3.3.3: 3.3.3
|
||||
tar@<=7.5.10: 7.5.11
|
||||
undici@>=7.0.0 <7.24.0: 7.24.0
|
||||
vite: npm:@voidzero-dev/vite-plus-core@0.1.16
|
||||
vitest: npm:@voidzero-dev/vite-plus-test@0.1.16
|
||||
yaml@>=2.0.0 <2.8.3: 2.8.3
|
||||
yauzl@<3.2.1: 3.2.1
|
||||
strictDepBuilds: true
|
||||
trustPolicy: no-downgrade
|
||||
trustPolicyExclude:
|
||||
- chokidar@4.0.3
|
||||
- reselect@5.1.1
|
||||
- semver@6.3.1
|
||||
|
||||
@ -14,6 +14,8 @@ NEXT_PUBLIC_API_PREFIX=http://localhost:5001/console/api
|
||||
NEXT_PUBLIC_PUBLIC_API_PREFIX=http://localhost:5001/api
|
||||
# When the frontend and backend run on different subdomains, set NEXT_PUBLIC_COOKIE_DOMAIN=1.
|
||||
NEXT_PUBLIC_COOKIE_DOMAIN=
|
||||
# WebSocket server URL.
|
||||
NEXT_PUBLIC_SOCKET_URL=ws://localhost:5001
|
||||
|
||||
# Dev-only Hono proxy targets.
|
||||
# The frontend keeps requesting http://localhost:5001 directly,
|
||||
|
||||
@ -12,6 +12,10 @@
|
||||
|
||||
- `frontend-query-mutation` is the source of truth for Dify frontend contracts, query and mutation call-site patterns, conditional queries, invalidation, and mutation error handling.
|
||||
|
||||
## Design Token Mapping
|
||||
|
||||
- When translating Figma designs to code, read `../packages/dify-ui/AGENTS.md` for the Figma `--radius/*` token to Tailwind `rounded-*` class mapping. The two scales are offset by one step.
|
||||
|
||||
## Automated Test Generation
|
||||
|
||||
- Use `./docs/test.md` as the canonical instruction set for generating frontend automated tests.
|
||||
|
||||
@ -30,9 +30,8 @@ COPY packages /app/packages
|
||||
# Use packageManager from package.json
|
||||
RUN corepack install
|
||||
|
||||
# Install only the web workspace to keep image builds from pulling in
|
||||
# unrelated workspace dependencies such as e2e tooling.
|
||||
RUN VITE_GIT_HOOKS=0 pnpm install --filter ./web... --frozen-lockfile
|
||||
# Install workspace dependencies for the Docker build environment.
|
||||
RUN VITE_GIT_HOOKS=0 pnpm install --frozen-lockfile
|
||||
|
||||
# build resources
|
||||
FROM base AS builder
|
||||
|
||||
@ -136,7 +136,6 @@ pnpm -C web test
|
||||
|
||||
If you are not familiar with writing tests, refer to:
|
||||
|
||||
- [classnames.spec.ts] - Utility function test example
|
||||
- [index.spec.tsx] - Component test example
|
||||
|
||||
### Analyze Component Complexity
|
||||
@ -166,7 +165,6 @@ The Dify community can be found on [Discord community], where you can ask questi
|
||||
[Storybook]: https://storybook.js.org
|
||||
[Vite+]: https://viteplus.dev
|
||||
[Vitest]: https://vitest.dev
|
||||
[classnames.spec.ts]: ./utils/classnames.spec.ts
|
||||
[index.spec.tsx]: ./app/components/base/button/index.spec.tsx
|
||||
[pnpm]: https://pnpm.io
|
||||
[vinext]: https://github.com/cloudflare/vinext
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user