Merge branch 'main' into tp

This commit is contained in:
JzoNg 2026-05-06 15:39:10 +08:00
commit 21a9c8d59c
736 changed files with 69536 additions and 14530 deletions

3
.github/CODEOWNERS vendored
View File

@ -6,6 +6,9 @@
* @crazywoola @laipz8200 @Yeuoly
# ESLint suppression file is maintained by autofix.ci pruning.
/eslint-suppressions.json
# CODEOWNERS file
/.github/CODEOWNERS @laipz8200 @crazywoola

View File

@ -4,7 +4,7 @@ runs:
using: composite
steps:
- name: Setup Vite+
uses: voidzero-dev/setup-vp@20553a7a7429c429a74894104a2835d7fed28a72 # v1.3.0
uses: voidzero-dev/setup-vp@4f5aa3e38c781f1b01e78fb9255527cee8a6efa6 # v1.8.0
with:
node-version-file: .nvmrc
cache: true

1
.github/labeler.yml vendored
View File

@ -6,5 +6,4 @@ web:
- 'package.json'
- 'pnpm-lock.yaml'
- 'pnpm-workspace.yaml'
- '.npmrc'
- '.nvmrc'

View File

@ -43,7 +43,6 @@ jobs:
package.json
pnpm-lock.yaml
pnpm-workspace.yaml
.npmrc
.nvmrc
- name: Check api inputs
if: github.event_name != 'merge_group'
@ -114,7 +113,7 @@ jobs:
find . -name "*.py.bak" -type f -delete
- name: Setup web environment
if: github.event_name != 'merge_group' && steps.web-changes.outputs.any_changed == 'true'
if: github.event_name != 'merge_group'
uses: ./.github/actions/setup-web
- name: ESLint autofix

View File

@ -74,7 +74,7 @@ jobs:
password: ${{ env.DOCKERHUB_TOKEN }}
- name: Set up Depot CLI
uses: depot/setup-action@v1
uses: depot/setup-action@15c09a5f77a0840ad4bce955686522a257853461 # v1.7.1
- name: Extract metadata for Docker
id: meta
@ -84,7 +84,7 @@ jobs:
- name: Build Docker image
id: build
uses: depot/build-push-action@v1
uses: depot/build-push-action@5f3b3c2e5a00f0093de47f657aeaefcedff27d18 # v1.17.0
with:
project: ${{ vars.DEPOT_PROJECT_ID }}
context: ${{ matrix.build_context }}
@ -124,10 +124,10 @@ jobs:
file: "web/Dockerfile"
steps:
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@98e3b2c9eab4f4f98a95c0c0a3ea5e5e672fd2a8 # v3.10.0
uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0
- name: Validate Docker image
uses: docker/build-push-action@5cd29d66b4a8d8e6f4d5dfe2e9329f0b1d446289 # v6.18.0
uses: docker/build-push-action@bcafcacb16a39f128d818304e6c9c0c18556b85f # v7.1.0
with:
push: false
context: ${{ matrix.build_context }}

View File

@ -110,6 +110,28 @@ jobs:
sed -i 's/DB_PORT=5432/DB_PORT=3306/' .env
sed -i 's/DB_USERNAME=postgres/DB_USERNAME=root/' .env
# hoverkraft-tech/compose-action@v2.6.0 only waits for `docker compose up -d`
# to return (container processes started); it does not wait on healthcheck
# status. mysql:8.0's first-time init takes 15-30s, so without an explicit
# wait the migration runs while InnoDB is still initialising and gets
# killed with "Lost connection during query". Poll a real SELECT until it
# succeeds.
- name: Wait for MySQL to accept queries
run: |
set +e
for i in $(seq 1 60); do
if docker run --rm --network host mysql:8.0 \
mysql -h 127.0.0.1 -P 3306 -uroot -pdifyai123456 \
-e 'SELECT 1' >/dev/null 2>&1; then
echo "MySQL ready after ${i}s"
exit 0
fi
sleep 1
done
echo "MySQL not ready after 60s; dumping container logs:"
docker compose -f docker/docker-compose.middleware.yaml --profile mysql logs --tail=200 db_mysql
exit 1
- name: Run DB Migration
env:
DEBUG: true

View File

@ -44,10 +44,10 @@ jobs:
file: "web/Dockerfile"
steps:
- name: Set up Depot CLI
uses: depot/setup-action@v1
uses: depot/setup-action@15c09a5f77a0840ad4bce955686522a257853461 # v1.7.1
- name: Build Docker Image
uses: depot/build-push-action@v1
uses: depot/build-push-action@5f3b3c2e5a00f0093de47f657aeaefcedff27d18 # v1.17.0
with:
project: ${{ vars.DEPOT_PROJECT_ID }}
push: false
@ -71,10 +71,10 @@ jobs:
file: "web/Dockerfile"
steps:
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@98e3b2c9eab4f4f98a95c0c0a3ea5e5e672fd2a8 # v3.10.0
uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0
- name: Build Docker Image
uses: docker/build-push-action@5cd29d66b4a8d8e6f4d5dfe2e9329f0b1d446289 # v6.18.0
uses: docker/build-push-action@bcafcacb16a39f128d818304e6c9c0c18556b85f # v7.1.0
with:
push: false
context: ${{ matrix.context }}

View File

@ -69,7 +69,6 @@ jobs:
- 'package.json'
- 'pnpm-lock.yaml'
- 'pnpm-workspace.yaml'
- '.npmrc'
- '.nvmrc'
- '.github/workflows/web-tests.yml'
- '.github/actions/setup-web/**'
@ -83,7 +82,6 @@ jobs:
- 'package.json'
- 'pnpm-lock.yaml'
- 'pnpm-workspace.yaml'
- '.npmrc'
- '.nvmrc'
- 'docker/docker-compose.middleware.yaml'
- 'docker/middleware.env.example'

View File

@ -83,7 +83,6 @@ jobs:
package.json
pnpm-lock.yaml
pnpm-workspace.yaml
.npmrc
.nvmrc
.github/workflows/style.yml
.github/actions/setup-web/**
@ -110,8 +109,6 @@ jobs:
- name: Web tsslint
if: steps.changed-files.outputs.any_changed == 'true'
working-directory: ./web
env:
NODE_OPTIONS: --max-old-space-size=4096
run: vp run lint:tss
- name: Web type check

View File

@ -9,7 +9,6 @@ on:
- package.json
- pnpm-lock.yaml
- pnpm-workspace.yaml
- .npmrc
concurrency:
group: sdk-tests-${{ github.head_ref || github.run_id }}

View File

@ -158,7 +158,7 @@ jobs:
- name: Run Claude Code for Translation Sync
if: steps.context.outputs.CHANGED_FILES != ''
uses: anthropics/claude-code-action@567fe954a4527e81f132d87d1bdbcc94f7737434 # v1.0.107
uses: anthropics/claude-code-action@fefa07e9c665b7320f08c3b525980457f22f58aa # v1.0.111
with:
anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }}
github_token: ${{ secrets.GITHUB_TOKEN }}

View File

@ -13,7 +13,7 @@ concurrency:
jobs:
test:
name: Web Full-Stack E2E
runs-on: depot-ubuntu-24.04
runs-on: depot-ubuntu-24.04-4
defaults:
run:
shell: bash

3
.gitignore vendored
View File

@ -219,6 +219,9 @@ node_modules
# plugin migrate
plugins.jsonl
# generated API OpenAPI specs
packages/contracts/openapi/
# mise
mise.toml

1
.npmrc
View File

@ -1 +0,0 @@
save-exact=true

View File

@ -113,8 +113,18 @@ def create_tenant(email: str, language: str | None = None, name: str | None = No
# Validates name encoding for non-Latin characters.
name = name.strip().encode("utf-8").decode("utf-8") if name else None
# generate random password
new_password = secrets.token_urlsafe(16)
# Generate a random password that satisfies the password policy.
# The iteration limit guards against infinite loops caused by unexpected bugs in valid_password.
for _ in range(100):
new_password = secrets.token_urlsafe(16)
try:
valid_password(new_password)
break
except Exception:
continue
else:
click.echo(click.style("Failed to generate a valid password. Please try again.", fg="red"))
return
# register account
account = RegisterService.register(

View File

@ -41,7 +41,8 @@ def guess_file_info_from_response(response: httpx.Response):
# Try to extract filename from URL
parsed_url = urllib.parse.urlparse(url)
url_path = parsed_url.path
filename = os.path.basename(url_path)
# Decode percent-encoded characters in the path segment
filename = urllib.parse.unquote(os.path.basename(url_path))
# If filename couldn't be extracted, use Content-Disposition header
if not filename:

View File

@ -1,4 +1,5 @@
import logging
import re
import uuid
from datetime import datetime
from typing import Any, Literal
@ -8,6 +9,7 @@ from flask_restx import Resource
from pydantic import AliasChoices, BaseModel, Field, computed_field, field_validator
from sqlalchemy import select
from sqlalchemy.orm import Session
from werkzeug.datastructures import MultiDict
from werkzeug.exceptions import BadRequest
from controllers.common.helpers import FileInfo
@ -57,6 +59,7 @@ ALLOW_CREATE_APP_MODES = ["chat", "agent-chat", "advanced-chat", "workflow", "co
register_enum_models(console_ns, IconType)
_logger = logging.getLogger(__name__)
_TAG_IDS_BRACKET_PATTERN = re.compile(r"^tag_ids\[(\d+)\]$")
class AppListQuery(BaseModel):
@ -66,22 +69,19 @@ class AppListQuery(BaseModel):
default="all", description="App mode filter"
)
name: str | None = Field(default=None, description="Filter by app name")
tag_ids: list[str] | None = Field(default=None, description="Comma-separated tag IDs")
tag_ids: list[str] | None = Field(default=None, description="Filter by tag IDs")
is_created_by_me: bool | None = Field(default=None, description="Filter by creator")
@field_validator("tag_ids", mode="before")
@classmethod
def validate_tag_ids(cls, value: str | list[str] | None) -> list[str] | None:
def validate_tag_ids(cls, value: list[str] | None) -> list[str] | None:
if not value:
return None
if isinstance(value, str):
items = [item.strip() for item in value.split(",") if item.strip()]
elif isinstance(value, list):
items = [str(item).strip() for item in value if item and str(item).strip()]
else:
raise TypeError("Unsupported tag_ids type.")
if not isinstance(value, list):
raise ValueError("Unsupported tag_ids type.")
items = [str(item).strip() for item in value if item and str(item).strip()]
if not items:
return None
@ -91,6 +91,26 @@ class AppListQuery(BaseModel):
raise ValueError("Invalid UUID format in tag_ids.") from exc
def _normalize_app_list_query_args(query_args: MultiDict[str, str]) -> dict[str, str | list[str]]:
normalized: dict[str, str | list[str]] = {}
indexed_tag_ids: list[tuple[int, str]] = []
for key in query_args:
match = _TAG_IDS_BRACKET_PATTERN.fullmatch(key)
if match:
indexed_tag_ids.extend((int(match.group(1)), value) for value in query_args.getlist(key))
continue
value = query_args.get(key)
if value is not None:
normalized[key] = value
if indexed_tag_ids:
normalized["tag_ids"] = [value for _, value in sorted(indexed_tag_ids)]
return normalized
class CreateAppPayload(BaseModel):
name: str = Field(..., min_length=1, description="App name")
description: str | None = Field(default=None, description="App description (max 400 chars)", max_length=400)
@ -455,7 +475,7 @@ class AppListApi(Resource):
"""Get app list"""
current_user, current_tenant_id = current_account_with_tenant()
args = AppListQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
args = AppListQuery.model_validate(_normalize_app_list_query_args(request.args))
args_dict = args.model_dump()
# get app list

View File

@ -60,7 +60,8 @@ _file_access_controller = DatabaseFileAccessController()
LISTENING_RETRY_IN = 2000
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
RESTORE_SOURCE_WORKFLOW_MUST_BE_PUBLISHED_MESSAGE = "source workflow must be published"
MAX_WORKFLOW_ONLINE_USERS_QUERY_IDS = 50
MAX_WORKFLOW_ONLINE_USERS_REQUEST_IDS = 1000
WORKFLOW_ONLINE_USERS_REDIS_BATCH_SIZE = 50
# Register models for flask_restx to avoid dict type issues in Swagger
# Register in dependency order: base models first, then dependent models
@ -158,8 +159,13 @@ class WorkflowFeaturesPayload(BaseModel):
features: dict[str, Any] = Field(..., description="Workflow feature configuration")
class WorkflowOnlineUsersQuery(BaseModel):
app_ids: str = Field(..., description="Comma-separated app IDs")
class WorkflowOnlineUsersPayload(BaseModel):
app_ids: list[str] = Field(default_factory=list, description="App IDs")
@field_validator("app_ids")
@classmethod
def normalize_app_ids(cls, app_ids: list[str]) -> list[str]:
return list(dict.fromkeys(app_id.strip() for app_id in app_ids if app_id.strip()))
class DraftWorkflowTriggerRunPayload(BaseModel):
@ -186,7 +192,7 @@ reg(ConvertToWorkflowPayload)
reg(WorkflowListQuery)
reg(WorkflowUpdatePayload)
reg(WorkflowFeaturesPayload)
reg(WorkflowOnlineUsersQuery)
reg(WorkflowOnlineUsersPayload)
reg(DraftWorkflowTriggerRunPayload)
reg(DraftWorkflowTriggerRunAllPayload)
@ -1384,19 +1390,19 @@ class DraftWorkflowTriggerRunAllApi(Resource):
@console_ns.route("/apps/workflows/online-users")
class WorkflowOnlineUsersApi(Resource):
@console_ns.expect(console_ns.models[WorkflowOnlineUsersQuery.__name__])
@console_ns.expect(console_ns.models[WorkflowOnlineUsersPayload.__name__])
@console_ns.doc("get_workflow_online_users")
@console_ns.doc(description="Get workflow online users")
@setup_required
@login_required
@account_initialization_required
@marshal_with(online_user_list_fields)
def get(self):
args = WorkflowOnlineUsersQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
def post(self):
args = WorkflowOnlineUsersPayload.model_validate(console_ns.payload or {})
app_ids = list(dict.fromkeys(app_id.strip() for app_id in args.app_ids.split(",") if app_id.strip()))
if len(app_ids) > MAX_WORKFLOW_ONLINE_USERS_QUERY_IDS:
raise BadRequest(f"Maximum {MAX_WORKFLOW_ONLINE_USERS_QUERY_IDS} app_ids are allowed per request.")
app_ids = args.app_ids
if len(app_ids) > MAX_WORKFLOW_ONLINE_USERS_REQUEST_IDS:
raise BadRequest(f"Maximum {MAX_WORKFLOW_ONLINE_USERS_REQUEST_IDS} app_ids are allowed per request.")
if not app_ids:
return {"data": []}
@ -1404,13 +1410,24 @@ class WorkflowOnlineUsersApi(Resource):
_, current_tenant_id = current_account_with_tenant()
workflow_service = WorkflowService()
accessible_app_ids = workflow_service.get_accessible_app_ids(app_ids, current_tenant_id)
ordered_accessible_app_ids = [app_id for app_id in app_ids if app_id in accessible_app_ids]
users_json_by_app_id: dict[str, Any] = {}
for start_index in range(0, len(ordered_accessible_app_ids), WORKFLOW_ONLINE_USERS_REDIS_BATCH_SIZE):
app_id_batch = ordered_accessible_app_ids[
start_index : start_index + WORKFLOW_ONLINE_USERS_REDIS_BATCH_SIZE
]
pipe = redis_client.pipeline(transaction=False)
for app_id in app_id_batch:
pipe.hgetall(f"{WORKFLOW_ONLINE_USERS_PREFIX}{app_id}")
users_json_batch = pipe.execute()
for app_id, users_json in zip(app_id_batch, users_json_batch):
users_json_by_app_id[app_id] = users_json
results = []
for app_id in app_ids:
if app_id not in accessible_app_ids:
continue
users_json = redis_client.hgetall(f"{WORKFLOW_ONLINE_USERS_PREFIX}{app_id}")
for app_id in ordered_accessible_app_ids:
users_json = users_json_by_app_id.get(app_id, {})
users = []
for _, user_info_json in users_json.items():

View File

@ -38,6 +38,48 @@ class HitTestingPayload(BaseModel):
class DatasetsHitTestingBase:
@staticmethod
def _normalize_hit_testing_query(query: Any) -> str:
"""Return the user-visible query string from legacy and current response shapes."""
if isinstance(query, str):
return query
if isinstance(query, dict):
content = query.get("content")
if isinstance(content, str):
return content
raise ValueError("Invalid hit testing query response")
@staticmethod
def _normalize_hit_testing_records(records: Any) -> list[dict[str, Any]]:
"""Coerce nullable collection fields into lists before response validation."""
if not isinstance(records, list):
return []
normalized_records: list[dict[str, Any]] = []
for record in records:
if not isinstance(record, dict):
continue
normalized_record = dict(record)
segment = normalized_record.get("segment")
if isinstance(segment, dict):
normalized_segment = dict(segment)
if normalized_segment.get("keywords") is None:
normalized_segment["keywords"] = []
normalized_record["segment"] = normalized_segment
if normalized_record.get("child_chunks") is None:
normalized_record["child_chunks"] = []
if normalized_record.get("files") is None:
normalized_record["files"] = []
normalized_records.append(normalized_record)
return normalized_records
@staticmethod
def get_and_validate_dataset(dataset_id: str):
assert isinstance(current_user, Account)
@ -75,7 +117,12 @@ class DatasetsHitTestingBase:
attachment_ids=args.get("attachment_ids"),
limit=10,
)
return {"query": response["query"], "records": marshal(response["records"], hit_testing_record_fields)}
return {
"query": DatasetsHitTestingBase._normalize_hit_testing_query(response.get("query")),
"records": DatasetsHitTestingBase._normalize_hit_testing_records(
marshal(response.get("records", []), hit_testing_record_fields)
),
}
except services.errors.index.IndexNotInitializedError:
raise DatasetNotInitializedError()
except ProviderTokenNotInitError as ex:

View File

@ -8,6 +8,7 @@ from flask import request
from flask_restx import Resource
from pydantic import BaseModel, Field, field_validator, model_validator
from sqlalchemy import select
from werkzeug.exceptions import NotFound
from configs import dify_config
from constants.languages import supported_language
@ -45,6 +46,8 @@ from libs.helper import EmailStr, extract_remote_ip, timezone
from libs.login import current_account_with_tenant, login_required
from models import AccountIntegrate, InvitationCode
from models.account import AccountStatus, InvitationCodeStatus
from models.enums import CreatorUserRole
from models.model import UploadFile
from services.account_service import AccountService
from services.billing_service import BillingService
from services.errors.account import CurrentPasswordIncorrectError as ServiceCurrentPasswordIncorrectError
@ -322,9 +325,24 @@ class AccountAvatarApi(Resource):
@login_required
@account_initialization_required
def get(self):
current_user, current_tenant_id = current_account_with_tenant()
args = AccountAvatarQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
avatar = args.avatar
avatar_url = file_helpers.get_signed_file_url(args.avatar)
if avatar.startswith(("http://", "https://")):
return {"avatar_url": avatar}
upload_file = db.session.scalar(select(UploadFile).where(UploadFile.id == avatar).limit(1))
if upload_file is None:
raise NotFound("Avatar file not found")
if upload_file.tenant_id != current_tenant_id:
raise NotFound("Avatar file not found")
if upload_file.created_by_role != CreatorUserRole.ACCOUNT or upload_file.created_by != current_user.id:
raise NotFound("Avatar file not found")
avatar_url = file_helpers.get_signed_file_url(upload_file_id=upload_file.id)
return {"avatar_url": avatar_url}
@console_ns.expect(console_ns.models[AccountAvatarPayload.__name__])

View File

@ -468,15 +468,98 @@ class DocumentAddByFileApi(DatasetApiResource):
return documents_and_batch_fields, 200
def _update_document_by_file(tenant_id: str, dataset_id: UUID, document_id: UUID) -> tuple[Mapping[str, object], int]:
"""Update a document from an uploaded file for canonical and deprecated routes."""
dataset_id_str = str(dataset_id)
tenant_id_str = str(tenant_id)
dataset = db.session.scalar(
select(Dataset).where(Dataset.tenant_id == tenant_id_str, Dataset.id == dataset_id_str).limit(1)
)
if not dataset:
raise ValueError("Dataset does not exist.")
if dataset.provider == "external":
raise ValueError("External datasets are not supported.")
args: dict[str, object] = {}
if "data" in request.form:
args = json.loads(request.form["data"])
if "doc_form" not in args:
args["doc_form"] = dataset.chunk_structure or "text_model"
if "doc_language" not in args:
args["doc_language"] = "English"
# indexing_technique is already set in dataset since this is an update
args["indexing_technique"] = dataset.indexing_technique
if "file" in request.files:
# save file info
file = request.files["file"]
if len(request.files) > 1:
raise TooManyFilesError()
if not file.filename:
raise FilenameNotExistsError
if not current_user:
raise ValueError("current_user is required")
try:
upload_file = FileService(db.engine).upload_file(
filename=file.filename,
content=file.read(),
mimetype=file.mimetype,
user=current_user,
source="datasets",
)
except services.errors.file.FileTooLargeError as file_too_large_error:
raise FileTooLargeError(file_too_large_error.description)
except services.errors.file.UnsupportedFileTypeError:
raise UnsupportedFileTypeError()
data_source = {
"type": "upload_file",
"info_list": {"data_source_type": "upload_file", "file_info_list": {"file_ids": [upload_file.id]}},
}
args["data_source"] = data_source
# validate args
args["original_document_id"] = str(document_id)
knowledge_config = KnowledgeConfig.model_validate(args)
DocumentService.document_create_args_validate(knowledge_config)
try:
documents, _ = DocumentService.save_document_with_dataset_id(
dataset=dataset,
knowledge_config=knowledge_config,
account=dataset.created_by_account,
dataset_process_rule=dataset.latest_process_rule if "process_rule" not in args else None,
created_from="api",
)
except ProviderTokenNotInitError as ex:
raise ProviderNotInitializeError(ex.description)
document = documents[0]
documents_and_batch_fields = {"document": marshal(document, document_fields), "batch": document.batch}
return documents_and_batch_fields, 200
@service_api_ns.route(
"/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/update_by_file",
"/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/update-by-file",
)
class DocumentUpdateByFileApi(DatasetApiResource):
"""Resource for update documents."""
class DeprecatedDocumentUpdateByFileApi(DatasetApiResource):
"""Deprecated resource aliases for file document updates."""
@service_api_ns.doc("update_document_by_file")
@service_api_ns.doc(description="Update an existing document by uploading a file")
@service_api_ns.doc("update_document_by_file_deprecated")
@service_api_ns.doc(deprecated=True)
@service_api_ns.doc(
description=(
"Deprecated legacy alias for updating an existing document by uploading a file. "
"Use PATCH /datasets/{dataset_id}/documents/{document_id} instead."
)
)
@service_api_ns.doc(params={"dataset_id": "Dataset ID", "document_id": "Document ID"})
@service_api_ns.doc(
responses={
@ -487,82 +570,9 @@ class DocumentUpdateByFileApi(DatasetApiResource):
)
@cloud_edition_billing_resource_check("vector_space", "dataset")
@cloud_edition_billing_rate_limit_check("knowledge", "dataset")
def post(self, tenant_id, dataset_id, document_id):
"""Update document by upload file."""
dataset = db.session.scalar(
select(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).limit(1)
)
if not dataset:
raise ValueError("Dataset does not exist.")
if dataset.provider == "external":
raise ValueError("External datasets are not supported.")
args = {}
if "data" in request.form:
args = json.loads(request.form["data"])
if "doc_form" not in args:
args["doc_form"] = dataset.chunk_structure or "text_model"
if "doc_language" not in args:
args["doc_language"] = "English"
# get dataset info
dataset_id = str(dataset_id)
tenant_id = str(tenant_id)
# indexing_technique is already set in dataset since this is an update
args["indexing_technique"] = dataset.indexing_technique
if "file" in request.files:
# save file info
file = request.files["file"]
if len(request.files) > 1:
raise TooManyFilesError()
if not file.filename:
raise FilenameNotExistsError
if not current_user:
raise ValueError("current_user is required")
try:
upload_file = FileService(db.engine).upload_file(
filename=file.filename,
content=file.read(),
mimetype=file.mimetype,
user=current_user,
source="datasets",
)
except services.errors.file.FileTooLargeError as file_too_large_error:
raise FileTooLargeError(file_too_large_error.description)
except services.errors.file.UnsupportedFileTypeError:
raise UnsupportedFileTypeError()
data_source = {
"type": "upload_file",
"info_list": {"data_source_type": "upload_file", "file_info_list": {"file_ids": [upload_file.id]}},
}
args["data_source"] = data_source
# validate args
args["original_document_id"] = str(document_id)
knowledge_config = KnowledgeConfig.model_validate(args)
DocumentService.document_create_args_validate(knowledge_config)
try:
documents, _ = DocumentService.save_document_with_dataset_id(
dataset=dataset,
knowledge_config=knowledge_config,
account=dataset.created_by_account,
dataset_process_rule=dataset.latest_process_rule if "process_rule" not in args else None,
created_from="api",
)
except ProviderTokenNotInitError as ex:
raise ProviderNotInitializeError(ex.description)
document = documents[0]
documents_and_batch_fields = {"document": marshal(document, document_fields), "batch": document.batch}
return documents_and_batch_fields, 200
def post(self, tenant_id: str, dataset_id: UUID, document_id: UUID):
"""Update document by file through the deprecated file-update aliases."""
return _update_document_by_file(tenant_id=tenant_id, dataset_id=dataset_id, document_id=document_id)
@service_api_ns.route("/datasets/<uuid:dataset_id>/documents")
@ -876,6 +886,22 @@ class DocumentApi(DatasetApiResource):
return response
@service_api_ns.doc("update_document_by_file")
@service_api_ns.doc(description="Update an existing document by uploading a file")
@service_api_ns.doc(params={"dataset_id": "Dataset ID", "document_id": "Document ID"})
@service_api_ns.doc(
responses={
200: "Document updated successfully",
401: "Unauthorized - invalid API token",
404: "Document not found",
}
)
@cloud_edition_billing_resource_check("vector_space", "dataset")
@cloud_edition_billing_rate_limit_check("knowledge", "dataset")
def patch(self, tenant_id: str, dataset_id: UUID, document_id: UUID):
"""Update document by file on the canonical document resource."""
return _update_document_by_file(tenant_id=tenant_id, dataset_id=dataset_id, document_id=document_id)
@service_api_ns.doc("delete_document")
@service_api_ns.doc(description="Delete a document")
@service_api_ns.doc(params={"dataset_id": "Dataset ID", "document_id": "Document ID"})

View File

@ -151,6 +151,12 @@ def deserialize_response(raw_data: bytes) -> Response:
response = Response(response=body, status=status_code)
# Replace Flask's default headers (e.g. Content-Type, Content-Length) with the
# parsed ones so we faithfully reproduce the original response. Use Headers.add
# rather than dict-style assignment so that repeated headers such as Set-Cookie
# (and any other multi-valued header per RFC 9110) are preserved instead of
# being overwritten.
response.headers.clear()
for line in lines[1:]:
if not line:
continue
@ -158,6 +164,6 @@ def deserialize_response(raw_data: bytes) -> Response:
if ":" not in line_str:
continue
name, value = line_str.split(":", 1)
response.headers[name] = value.strip()
response.headers.add(name, value.strip())
return response

View File

@ -9,9 +9,9 @@ from typing import TYPE_CHECKING, Any
from pydantic import TypeAdapter
from sqlalchemy import select
from sqlalchemy.exc import IntegrityError
from sqlalchemy.orm import Session
from configs import dify_config
from core.db.session_factory import session_factory
from core.entities.model_entities import DefaultModelEntity, DefaultModelProviderEntity
from core.entities.provider_configuration import ProviderConfiguration, ProviderConfigurations, ProviderModelBundle
from core.entities.provider_entities import (
@ -445,7 +445,7 @@ class ProviderManager:
@staticmethod
def _get_all_providers(tenant_id: str) -> dict[str, list[Provider]]:
provider_name_to_provider_records_dict = defaultdict(list)
with Session(db.engine, expire_on_commit=False) as session:
with session_factory.create_session() as session:
stmt = select(Provider).where(Provider.tenant_id == tenant_id, Provider.is_valid == True)
providers = session.scalars(stmt)
for provider in providers:
@ -462,7 +462,7 @@ class ProviderManager:
:return:
"""
provider_name_to_provider_model_records_dict = defaultdict(list)
with Session(db.engine, expire_on_commit=False) as session:
with session_factory.create_session() as session:
stmt = select(ProviderModel).where(ProviderModel.tenant_id == tenant_id, ProviderModel.is_valid == True)
provider_models = session.scalars(stmt)
for provider_model in provider_models:
@ -478,7 +478,7 @@ class ProviderManager:
:return:
"""
provider_name_to_preferred_provider_type_records_dict = {}
with Session(db.engine, expire_on_commit=False) as session:
with session_factory.create_session() as session:
stmt = select(TenantPreferredModelProvider).where(TenantPreferredModelProvider.tenant_id == tenant_id)
preferred_provider_types = session.scalars(stmt)
provider_name_to_preferred_provider_type_records_dict = {
@ -496,7 +496,7 @@ class ProviderManager:
:return:
"""
provider_name_to_provider_model_settings_dict = defaultdict(list)
with Session(db.engine, expire_on_commit=False) as session:
with session_factory.create_session() as session:
stmt = select(ProviderModelSetting).where(ProviderModelSetting.tenant_id == tenant_id)
provider_model_settings = session.scalars(stmt)
for provider_model_setting in provider_model_settings:
@ -514,7 +514,7 @@ class ProviderManager:
:return:
"""
provider_name_to_provider_model_credentials_dict = defaultdict(list)
with Session(db.engine, expire_on_commit=False) as session:
with session_factory.create_session() as session:
stmt = select(ProviderModelCredential).where(ProviderModelCredential.tenant_id == tenant_id)
provider_model_credentials = session.scalars(stmt)
for provider_model_credential in provider_model_credentials:
@ -544,7 +544,7 @@ class ProviderManager:
return {}
provider_name_to_provider_load_balancing_model_configs_dict = defaultdict(list)
with Session(db.engine, expire_on_commit=False) as session:
with session_factory.create_session() as session:
stmt = select(LoadBalancingModelConfig).where(LoadBalancingModelConfig.tenant_id == tenant_id)
provider_load_balancing_configs = session.scalars(stmt)
for provider_load_balancing_config in provider_load_balancing_configs:
@ -578,7 +578,7 @@ class ProviderManager:
:param provider_name: provider name
:return:
"""
with Session(db.engine, expire_on_commit=False) as session:
with session_factory.create_session() as session:
stmt = (
select(ProviderCredential)
.where(
@ -608,7 +608,7 @@ class ProviderManager:
:param model_type: model type
:return:
"""
with Session(db.engine, expire_on_commit=False) as session:
with session_factory.create_session() as session:
stmt = (
select(ProviderModelCredential)
.where(

View File

@ -217,10 +217,11 @@ class RetrievalService:
"""Deduplicate documents in O(n) while preserving first-seen order.
Rules:
- For provider == "dify" and metadata["doc_id"] exists: keep the doc with the highest
metadata["score"] among duplicates; if a later duplicate has no score, ignore it.
- For non-dify documents (or dify without doc_id): deduplicate by content key
(provider, page_content), keeping the first occurrence.
- If metadata["doc_id"] exists (any provider): deduplicate by (provider, doc_id) key;
keep the doc with the highest metadata["score"] among duplicates. If a later duplicate
has no score, ignore it.
- If metadata["doc_id"] is absent: deduplicate by content key (provider, page_content),
keeping the first occurrence.
"""
if not documents:
return documents
@ -231,11 +232,10 @@ class RetrievalService:
order: list[tuple] = []
for doc in documents:
is_dify = doc.provider == "dify"
doc_id = (doc.metadata or {}).get("doc_id") if is_dify else None
doc_id = (doc.metadata or {}).get("doc_id")
if is_dify and doc_id:
key = ("dify", doc_id)
if doc_id:
key = (doc.provider or "dify", doc_id)
if key not in chosen:
chosen[key] = doc
order.append(key)

View File

@ -144,8 +144,20 @@ class Vector:
def get_vector_factory(vector_type: str) -> type[AbstractVectorFactory]:
return get_vector_factory_class(vector_type)
@staticmethod
def _filter_empty_text_documents(documents: list[Document]) -> list[Document]:
filtered_documents = [document for document in documents if document.page_content.strip()]
skipped_count = len(documents) - len(filtered_documents)
if skipped_count:
logger.warning("skip %d empty documents before vector embedding", skipped_count)
return filtered_documents
def create(self, texts: list | None = None, **kwargs):
if texts:
texts = self._filter_empty_text_documents(texts)
if not texts:
return
start = time.time()
logger.info("start embedding %s texts %s", len(texts), start)
batch_size = 1000
@ -203,8 +215,14 @@ class Vector:
logger.info("Embedding %s files took %s s", len(file_documents), time.time() - start)
def add_texts(self, documents: list[Document], **kwargs):
documents = self._filter_empty_text_documents(documents)
if not documents:
return
if kwargs.get("duplicate_check", False):
documents = self._filter_duplicate_texts(documents)
if not documents:
return
embeddings = self._embeddings.embed_documents([document.page_content for document in documents])
self._vector_processor.create(texts=documents, embeddings=embeddings, **kwargs)

View File

@ -1078,6 +1078,13 @@ class ToolManager:
if parameter.form == ToolParameter.ToolParameterForm.FORM:
if variable_pool:
config = tool_configurations.get(parameter.name, {})
selector_value = cls._extract_runtime_selector_value(parameter, config)
if selector_value is not None:
# Selector parameters carry structured dictionaries, not scalar ToolInput values.
runtime_parameters[parameter.name] = selector_value
continue
if not (config and isinstance(config, dict) and config.get("value") is not None):
continue
tool_input = ToolNodeData.ToolInput.model_validate(tool_configurations.get(parameter.name, {}))
@ -1105,5 +1112,39 @@ class ToolManager:
runtime_parameters[parameter.name] = value
return runtime_parameters
@classmethod
def _extract_runtime_selector_value(cls, parameter: ToolParameter, config: Any) -> dict[str, Any] | None:
if parameter.type not in {
ToolParameter.ToolParameterType.MODEL_SELECTOR,
ToolParameter.ToolParameterType.APP_SELECTOR,
}:
return None
if not isinstance(config, dict):
return None
input_value = config.get("value")
if isinstance(input_value, dict) and cls._is_selector_value(parameter, input_value):
return cast("dict[str, Any]", parameter.init_frontend_parameter(input_value))
if cls._is_selector_value(parameter, config):
selector_value = dict(config)
selector_value.pop("type", None)
selector_value.pop("value", None)
return cast("dict[str, Any]", parameter.init_frontend_parameter(selector_value))
return None
@classmethod
def _is_selector_value(cls, parameter: ToolParameter, value: Mapping[str, Any]) -> bool:
if parameter.type == ToolParameter.ToolParameterType.MODEL_SELECTOR:
return (
isinstance(value.get("provider"), str)
and isinstance(value.get("model"), str)
and isinstance(value.get("model_type"), str)
)
if parameter.type == ToolParameter.ToolParameterType.APP_SELECTOR:
return isinstance(value.get("app_id"), str)
return False
ToolManager.load_hardcoded_providers_cache()

View File

@ -272,6 +272,14 @@ def _adapt_tool_node_data_for_graph(node_data: Mapping[str, Any]) -> dict[str, A
normalized_tool_configurations[name] = value
continue
selector_value = _extract_selector_configuration(value)
if selector_value is not None:
# Model/app selectors are dictionaries even when they come through the legacy tool configuration path.
# Move them to tool_parameters so graph validation does not flatten them as primitive constants.
found_legacy_tool_inputs = True
normalized_tool_parameters.setdefault(name, {"type": "constant", "value": selector_value})
continue
input_type = value.get("type")
input_value = value.get("value")
if input_type not in {"mixed", "variable", "constant"}:
@ -310,6 +318,28 @@ def _flatten_legacy_tool_configuration_value(*, input_type: Any, input_value: An
return None
def _extract_selector_configuration(value: Mapping[str, Any]) -> dict[str, Any] | None:
input_value = value.get("value")
if isinstance(input_value, Mapping) and _is_selector_configuration(input_value):
return dict(input_value)
if _is_selector_configuration(value):
selector_value = dict(value)
selector_value.pop("type", None)
selector_value.pop("value", None)
return selector_value
return None
def _is_selector_configuration(value: Mapping[str, Any]) -> bool:
return (
isinstance(value.get("provider"), str)
and isinstance(value.get("model"), str)
and isinstance(value.get("model_type"), str)
) or isinstance(value.get("app_id"), str)
def _normalize_email_recipients(recipients: Mapping[str, Any]) -> dict[str, Any]:
normalized = dict(recipients)

View File

@ -365,7 +365,8 @@ class DifyNodeFactory(NodeFactory):
(including pydantic ValidationError, which subclasses ValueError),
if node type is unknown, or if no implementation exists for the resolved version
"""
typed_node_config = NodeConfigDictAdapter.validate_python(adapt_node_config_for_graph(node_config))
adapted_node_config = adapt_node_config_for_graph(node_config)
typed_node_config = NodeConfigDictAdapter.validate_python(adapted_node_config)
node_id = typed_node_config["id"]
node_data = typed_node_config["data"]
node_class = self._resolve_node_class(node_type=node_data.type, node_version=str(node_data.version))
@ -373,6 +374,11 @@ class DifyNodeFactory(NodeFactory):
# Re-validate using the resolved node class so workflow-local node schemas
# stay explicit and constructors receive the concrete typed payload.
resolved_node_data = self._validate_resolved_node_data(node_class, node_data)
config_for_node_init: BaseNodeData | dict[str, Any]
if isinstance(resolved_node_data, BaseNodeData):
config_for_node_init = resolved_node_data.model_dump(mode="python", by_alias=True)
else:
config_for_node_init = resolved_node_data
node_type = node_data.type
node_init_kwargs_factories: Mapping[NodeType, Callable[[], dict[str, object]]] = {
BuiltinNodeTypes.CODE: lambda: {
@ -442,7 +448,7 @@ class DifyNodeFactory(NodeFactory):
node_init_kwargs = node_init_kwargs_factories.get(node_type, lambda: {})()
return node_class(
node_id=node_id,
config=resolved_node_data,
config=config_for_node_init,
graph_init_params=self.graph_init_params,
graph_runtime_state=self.graph_runtime_state,
**node_init_kwargs,
@ -474,10 +480,7 @@ class DifyNodeFactory(NodeFactory):
include_retriever_attachment_loader: bool,
include_jinja2_template_renderer: bool,
) -> dict[str, object]:
validated_node_data = cast(
LLMCompatibleNodeData,
self._validate_resolved_node_data(node_class=node_class, node_data=node_data),
)
validated_node_data = cast(LLMCompatibleNodeData, node_data)
model_instance = self._build_model_instance_for_llm_node(validated_node_data)
node_init_kwargs: dict[str, object] = {
"credentials_provider": self._llm_credentials_provider,

View File

@ -501,11 +501,15 @@ class DifyToolNodeRuntime(ToolNodeRuntimeProtocol):
@staticmethod
def _build_tool_runtime_spec(node_data: ToolNodeData) -> _WorkflowToolRuntimeSpec:
tool_configurations = dict(node_data.tool_configurations)
tool_configurations.update(
{name: tool_input.model_dump(mode="python") for name, tool_input in node_data.tool_parameters.items()}
)
return _WorkflowToolRuntimeSpec(
provider_type=CoreToolProviderType(node_data.provider_type.value),
provider_id=node_data.provider_id,
tool_name=node_data.tool_name,
tool_configurations=dict(node_data.tool_configurations),
tool_configurations=tool_configurations,
credential_id=node_data.credential_id,
)

View File

@ -3,6 +3,7 @@ import logging
from core.tools.entities.tool_entities import ToolProviderType
from core.tools.tool_manager import ToolManager
from core.tools.utils.configuration import ToolParameterConfigurationManager
from core.workflow.human_input_adapter import adapt_node_config_for_graph
from events.app_event import app_draft_workflow_was_synced
from graphon.nodes import BuiltinNodeTypes
from graphon.nodes.tool.entities import ToolEntity
@ -19,7 +20,8 @@ def handle(sender, **kwargs):
for node_data in synced_draft_workflow.graph_dict.get("nodes", []):
if node_data.get("data", {}).get("type") == BuiltinNodeTypes.TOOL:
try:
tool_entity = ToolEntity.model_validate(node_data["data"])
adapted_node_data = adapt_node_config_for_graph(node_data)
tool_entity = ToolEntity.model_validate(adapted_node_data["data"])
provider_type = ToolProviderType(tool_entity.provider_type.value)
tool_runtime = ToolManager.get_tool_runtime(
provider_type=provider_type,

View File

@ -298,7 +298,7 @@ def _build_from_datasource_file(
raise ValueError(f"DatasourceFile {mapping.get('datasource_file_id')} not found")
extension = "." + datasource_file.key.split(".")[-1] if "." in datasource_file.key else ".bin"
detected_file_type = standardize_file_type(extension="." + extension, mime_type=datasource_file.mime_type)
detected_file_type = standardize_file_type(extension=extension, mime_type=datasource_file.mime_type)
file_type = _resolve_file_type(
detected_file_type=detected_file_type,
specified_type=mapping.get("type"),

View File

@ -19,8 +19,13 @@ from werkzeug.http import parse_options_header
from core.helper import ssrf_proxy
def extract_filename(url_path: str, content_disposition: str | None) -> str | None:
"""Extract a safe filename from Content-Disposition or the request URL path."""
def extract_filename(url_or_path: str, content_disposition: str | None) -> str | None:
"""Extract a safe filename from Content-Disposition or the request URL path.
Handles full URLs, paths with query strings, hash fragments, and percent-encoded segments.
Query strings and hash fragments are stripped from the URL before extracting the basename.
Percent-encoded characters in the path are decoded safely.
"""
filename: str | None = None
if content_disposition:
filename_star_match = re.search(r"filename\*=([^;]+)", content_disposition)
@ -47,8 +52,13 @@ def extract_filename(url_path: str, content_disposition: str | None) -> str | No
filename = urllib.parse.unquote(raw)
if not filename:
candidate = os.path.basename(url_path)
filename = urllib.parse.unquote(candidate) if candidate else None
# Parse the URL to extract just the path, stripping query strings and fragments
# This handles both full URLs and bare paths
parsed = urllib.parse.urlparse(url_or_path)
path = parsed.path
candidate = os.path.basename(path)
# Decode percent-encoded characters, with safe fallback for malformed input
filename = urllib.parse.unquote(candidate, errors="replace") if candidate else None
if filename:
filename = os.path.basename(filename)

View File

@ -2182,7 +2182,7 @@ class ApiToken(Base): # bug: this uses setattr so idk the field.
return result
class UploadFile(Base):
class UploadFile(TypeBase):
__tablename__ = "upload_files"
__table_args__ = (
sa.PrimaryKeyConstraint("id", name="upload_file_pkey"),
@ -2190,9 +2190,12 @@ class UploadFile(Base):
)
# NOTE: The `id` field is generated within the application to minimize extra roundtrips
# (especially when generating `source_url`).
# The `server_default` serves as a fallback mechanism.
id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()))
# (especially when generating `source_url`) and keep model metadata portable across databases.
id: Mapped[str] = mapped_column(
StringUUID,
init=False,
default_factory=lambda: str(uuid4()),
)
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
storage_type: Mapped[StorageType] = mapped_column(EnumText(StorageType, length=255), nullable=False)
key: Mapped[str] = mapped_column(String(255), nullable=False)
@ -2200,16 +2203,6 @@ class UploadFile(Base):
size: Mapped[int] = mapped_column(sa.Integer, nullable=False)
extension: Mapped[str] = mapped_column(String(255), nullable=False)
mime_type: Mapped[str] = mapped_column(String(255), nullable=True)
# The `created_by_role` field indicates whether the file was created by an `Account` or an `EndUser`.
# Its value is derived from the `CreatorUserRole` enumeration.
created_by_role: Mapped[CreatorUserRole] = mapped_column(
EnumText(CreatorUserRole, length=255),
nullable=False,
server_default=sa.text("'account'"),
default=CreatorUserRole.ACCOUNT,
)
# The `created_by` field stores the ID of the entity that created this upload file.
#
# If `created_by_role` is `ACCOUNT`, it corresponds to `Account.id`.
@ -2228,10 +2221,18 @@ class UploadFile(Base):
# `used` may indicate whether the file has been utilized by another service.
used: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=sa.text("false"))
# The `created_by_role` field indicates whether the file was created by an `Account` or an `EndUser`.
# Its value is derived from the `CreatorUserRole` enumeration.
created_by_role: Mapped[CreatorUserRole] = mapped_column(
EnumText(CreatorUserRole, length=255),
nullable=False,
server_default=sa.text("'account'"),
default=CreatorUserRole.ACCOUNT,
)
# `used_by` may indicate the ID of the user who utilized this file.
used_by: Mapped[str | None] = mapped_column(StringUUID, nullable=True)
used_at: Mapped[datetime | None] = mapped_column(sa.DateTime, nullable=True)
hash: Mapped[str | None] = mapped_column(String(255), nullable=True)
used_by: Mapped[str | None] = mapped_column(StringUUID, nullable=True, default=None)
used_at: Mapped[datetime | None] = mapped_column(sa.DateTime, nullable=True, default=None)
hash: Mapped[str | None] = mapped_column(String(255), nullable=True, default=None)
source_url: Mapped[str] = mapped_column(LongText, default="")
def __init__(

View File

@ -9,11 +9,11 @@ import sqlalchemy as sa
from sqlalchemy import DateTime, String, func, select, text
from sqlalchemy.orm import Mapped, mapped_column
from core.db.session_factory import session_factory
from graphon.model_runtime.entities.model_entities import ModelType
from libs.uuid_utils import uuidv7
from .base import TypeBase
from .engine import db
from .enums import CredentialSourceType, PaymentStatus, ProviderQuotaType
from .types import EnumText, LongText, StringUUID
@ -82,7 +82,8 @@ class Provider(TypeBase):
@cached_property
def credential(self):
if self.credential_id:
return db.session.scalar(select(ProviderCredential).where(ProviderCredential.id == self.credential_id))
with session_factory.create_session() as session:
return session.scalar(select(ProviderCredential).where(ProviderCredential.id == self.credential_id))
@property
def credential_name(self):
@ -145,9 +146,10 @@ class ProviderModel(TypeBase):
@cached_property
def credential(self):
if self.credential_id:
return db.session.scalar(
select(ProviderModelCredential).where(ProviderModelCredential.id == self.credential_id)
)
with session_factory.create_session() as session:
return session.scalar(
select(ProviderModelCredential).where(ProviderModelCredential.id == self.credential_id)
)
@property
def credential_name(self):

View File

@ -50,7 +50,7 @@ from libs.uuid_utils import uuidv7
from ._workflow_exc import NodeNotFoundError, WorkflowDataError
if TYPE_CHECKING:
from .model import AppMode, UploadFile
from .model import AppMode
from constants import DEFAULT_FILE_NUMBER_LIMITS, HIDDEN_VALUE
@ -63,6 +63,10 @@ from .account import Account
from .base import Base, DefaultFieldsDCMixin, TypeBase
from .engine import db
from .enums import CreatorUserRole, DraftVariableType, ExecutionOffLoadType, WorkflowRunTriggeredFrom
# UploadFile uses TypeBase while workflow execution offload models use Base, so relationships
# must target the class object directly instead of relying on string lookup across registries.
from .model import UploadFile
from .types import EnumText, LongText, StringUUID
from .utils.file_input_compat import (
build_file_from_mapping_without_lookup,
@ -1096,8 +1100,6 @@ class WorkflowNodeExecutionModel(Base): # This model is expected to have `offlo
@staticmethod
def _load_full_content(session: orm.Session, file_id: str, storage: Storage):
from .model import UploadFile
stmt = sa.select(UploadFile).where(UploadFile.id == file_id)
file = session.scalars(stmt).first()
assert file is not None, f"UploadFile with id {file_id} should exist but not"
@ -1191,10 +1193,11 @@ class WorkflowNodeExecutionOffload(Base):
)
file: Mapped[Optional["UploadFile"]] = orm.relationship(
UploadFile,
foreign_keys=[file_id],
lazy="raise",
uselist=False,
primaryjoin="WorkflowNodeExecutionOffload.file_id == UploadFile.id",
primaryjoin=lambda: orm.foreign(WorkflowNodeExecutionOffload.file_id) == UploadFile.id,
)
@ -1565,12 +1568,14 @@ class WorkflowDraftVariable(Base):
),
)
# Relationship to WorkflowDraftVariableFile
# WorkflowDraftVariableFile uses TypeBase while WorkflowDraftVariable uses Base, so the relationship
# must resolve the class object lazily instead of relying on string lookup across registries.
variable_file: Mapped[Optional["WorkflowDraftVariableFile"]] = orm.relationship(
lambda: WorkflowDraftVariableFile,
foreign_keys=[file_id],
lazy="raise",
uselist=False,
primaryjoin="WorkflowDraftVariableFile.id == WorkflowDraftVariable.file_id",
primaryjoin=lambda: orm.foreign(WorkflowDraftVariable.file_id) == WorkflowDraftVariableFile.id,
)
# Cache for deserialized value
@ -1889,7 +1894,7 @@ class WorkflowDraftVariable(Base):
return self.last_edited_at is not None
class WorkflowDraftVariableFile(Base):
class WorkflowDraftVariableFile(TypeBase):
"""Stores metadata about files associated with large workflow draft variables.
This model acts as an intermediary between WorkflowDraftVariable and UploadFile,
@ -1903,18 +1908,7 @@ class WorkflowDraftVariableFile(Base):
__tablename__ = "workflow_draft_variable_files"
# Primary key
id: Mapped[str] = mapped_column(
StringUUID,
primary_key=True,
default=lambda: str(uuidv7()),
)
created_at: Mapped[datetime] = mapped_column(
DateTime,
nullable=False,
default=naive_utc_now,
server_default=func.current_timestamp(),
)
id: Mapped[str] = mapped_column(StringUUID, primary_key=True, default_factory=lambda: str(uuidv7()), init=False)
tenant_id: Mapped[str] = mapped_column(
StringUUID,
@ -1966,12 +1960,21 @@ class WorkflowDraftVariableFile(Base):
nullable=False,
)
# Relationship to UploadFile
# Rows are created with `upload_file_id`; callers should load this relationship explicitly when needed.
upload_file: Mapped["UploadFile"] = orm.relationship(
UploadFile,
foreign_keys=[upload_file_id],
lazy="raise",
init=False,
uselist=False,
primaryjoin="WorkflowDraftVariableFile.upload_file_id == UploadFile.id",
primaryjoin=lambda: orm.foreign(WorkflowDraftVariableFile.upload_file_id) == UploadFile.id,
)
created_at: Mapped[datetime] = mapped_column(
DateTime,
nullable=False,
default_factory=naive_utc_now,
server_default=func.current_timestamp(),
)

View File

@ -1,12 +1,12 @@
[project]
name = "dify-api"
version = "1.13.3"
version = "1.14.0"
requires-python = "~=3.12.0"
dependencies = [
# Legacy: mature and widely deployed
"bleach>=6.3.0",
"boto3>=1.42.96",
"boto3>=1.43.3",
"celery>=5.6.3",
"croniter>=6.2.2",
"flask>=3.1.3,<4.0.0",
@ -14,7 +14,7 @@ dependencies = [
"gevent>=26.4.0",
"gevent-websocket>=0.10.1",
"gmpy2>=2.3.0",
"google-api-python-client>=2.194.0",
"google-api-python-client>=2.195.0",
"gunicorn>=25.3.0",
"psycogreen>=1.0.2",
"psycopg2-binary>=2.9.12",
@ -31,7 +31,7 @@ dependencies = [
"flask-migrate>=4.1.0,<5.0.0",
"flask-orjson>=2.0.0,<3.0.0",
"flask-restx>=1.3.2,<2.0.0",
"google-cloud-aiplatform>=1.148.1,<2.0.0",
"google-cloud-aiplatform>=1.149.0,<2.0.0",
"httpx[socks]>=0.28.1,<1.0.0",
"opentelemetry-distro>=0.62b1,<1.0.0",
"opentelemetry-instrumentation-celery>=0.62b0,<1.0.0",
@ -127,7 +127,7 @@ dev = [
"testcontainers>=4.14.2",
"types-aiofiles>=25.1.0",
"types-beautifulsoup4>=4.12.0",
"types-cachetools>=6.2.0",
"types-cachetools>=7.0.0.20260503",
"types-colorama>=0.4.15",
"types-defusedxml>=0.7.0",
"types-deprecated>=1.3.1",
@ -135,7 +135,7 @@ dev = [
"types-flask-cors>=6.0.0",
"types-flask-migrate>=4.1.0",
"types-gevent>=26.4.0",
"types-greenlet>=3.4.0",
"types-greenlet>=3.5.0.20260428",
"types-html5lib>=1.1.11",
"types-markdown>=3.10.2",
"types-oauthlib>=3.3.0",
@ -143,7 +143,7 @@ dev = [
"types-olefile>=0.47.0",
"types-openpyxl>=3.1.5",
"types-pexpect>=4.9.0",
"types-protobuf>=7.34.1",
"types-protobuf>=7.34.1.20260503",
"types-psutil>=7.2.2",
"types-psycopg2>=2.9.21.20260422",
"types-pygments>=2.20.0",
@ -158,11 +158,11 @@ dev = [
"types-tensorflow>=2.18.0.20260408",
"types-tqdm>=4.67.3.20260408",
"types-ujson>=5.10.0",
"boto3-stubs>=1.42.96",
"boto3-stubs>=1.43.2",
"types-jmespath>=1.1.0.20260408",
"hypothesis>=6.152.3",
"hypothesis>=6.152.4",
"types_pyOpenSSL>=24.1.0",
"types_cffi>=2.0.0.20260408",
"types_cffi>=2.0.0.20260429",
"types_setuptools>=82.0.0.20260408",
"pandas-stubs>=3.0.0",
"scipy-stubs>=1.17.1.4",
@ -184,7 +184,7 @@ dev = [
############################################################
storage = [
"azure-storage-blob>=12.28.0",
"bce-python-sdk>=0.9.70",
"bce-python-sdk>=0.9.71",
"cos-python-sdk-v5>=1.9.42",
"esdk-obs-python>=3.22.2",
"google-cloud-storage>=3.10.1",

View File

@ -3,6 +3,7 @@ from typing import Any, Literal
from pydantic import BaseModel, field_validator
from core.rag.entities import Rule
from core.rag.entities.metadata_entities import MetadataFilteringCondition
from core.rag.index_processor.constant.index_type import IndexStructureType
from core.rag.retrieval.retrieval_methods import RetrievalMethod
@ -83,6 +84,7 @@ class RetrievalModel(BaseModel):
score_threshold_enabled: bool
score_threshold: float | None = None
weights: WeightModel | None = None
metadata_filtering_conditions: MetadataFilteringCondition | None = None
class MetaDataConfig(BaseModel):

View File

@ -1083,10 +1083,9 @@ class DraftVariableSaver:
mimetype=content_type,
user=self._user,
)
assert self._user.current_tenant_id
# Create WorkflowDraftVariableFile record
variable_file = WorkflowDraftVariableFile(
id=uuidv7(),
upload_file_id=upload_file.id,
size=original_size,
length=original_length,
@ -1095,6 +1094,7 @@ class DraftVariableSaver:
tenant_id=self._user.current_tenant_id,
user_id=self._user.id,
)
variable_file.id = str(uuidv7())
engine = bind = self._session.get_bind()
assert isinstance(engine, Engine)
with sessionmaker(bind=engine, expire_on_commit=False).begin() as session:

View File

@ -1,4 +1,5 @@
import unittest
from __future__ import annotations
from datetime import UTC, datetime
from unittest.mock import patch
from uuid import uuid4
@ -16,7 +17,7 @@ from models.enums import CreatorUserRole
@pytest.mark.usefixtures("flask_req_ctx_with_containers")
class TestStorageKeyLoader(unittest.TestCase):
class TestStorageKeyLoader:
"""
Integration tests for StorageKeyLoader class.
@ -24,110 +25,82 @@ class TestStorageKeyLoader(unittest.TestCase):
with different transfer methods: LOCAL_FILE, REMOTE_URL, and TOOL_FILE.
"""
def setUp(self):
"""Set up test data before each test method."""
self.session = db.session()
self.tenant_id = str(uuid4())
self.user_id = str(uuid4())
self.conversation_id = str(uuid4())
# Create test data that will be cleaned up after each test
self.test_upload_files = []
self.test_tool_files = []
# Create StorageKeyLoader instance
self.loader = StorageKeyLoader(
self.session,
self.tenant_id,
access_controller=DatabaseFileAccessController(),
)
def tearDown(self):
"""Clean up test data after each test method."""
self.session.rollback()
# ------------------------------------------------------------------
# Per-test helpers (use db_session_with_containers as parameter)
# ------------------------------------------------------------------
@staticmethod
def _create_upload_file(
self, file_id: str | None = None, storage_key: str | None = None, tenant_id: str | None = None
session: Session,
tenant_id: str,
user_id: str,
*,
file_id: str | None = None,
storage_key: str | None = None,
override_tenant_id: str | None = None,
) -> UploadFile:
"""Helper method to create an UploadFile record for testing."""
if file_id is None:
file_id = str(uuid4())
if storage_key is None:
storage_key = f"test_storage_key_{uuid4()}"
if tenant_id is None:
tenant_id = self.tenant_id
"""Create and flush an UploadFile record for testing."""
upload_file = UploadFile(
tenant_id=tenant_id,
tenant_id=override_tenant_id if override_tenant_id is not None else tenant_id,
storage_type=StorageType.LOCAL,
key=storage_key,
key=storage_key or f"test_storage_key_{uuid4()}",
name="test_file.txt",
size=1024,
extension=".txt",
mime_type="text/plain",
created_by_role=CreatorUserRole.ACCOUNT,
created_by=self.user_id,
created_by=user_id,
created_at=datetime.now(UTC),
used=False,
)
upload_file.id = file_id
self.session.add(upload_file)
self.session.flush()
self.test_upload_files.append(upload_file)
upload_file.id = file_id or str(uuid4())
session.add(upload_file)
session.flush()
return upload_file
@staticmethod
def _create_tool_file(
self, file_id: str | None = None, file_key: str | None = None, tenant_id: str | None = None
session: Session,
tenant_id: str,
user_id: str,
conversation_id: str,
*,
file_id: str | None = None,
file_key: str | None = None,
override_tenant_id: str | None = None,
) -> ToolFile:
"""Helper method to create a ToolFile record for testing."""
if file_id is None:
file_id = str(uuid4())
if file_key is None:
file_key = f"test_file_key_{uuid4()}"
if tenant_id is None:
tenant_id = self.tenant_id
"""Create and flush a ToolFile record for testing."""
tool_file = ToolFile(
user_id=self.user_id,
tenant_id=tenant_id,
conversation_id=self.conversation_id,
file_key=file_key,
user_id=user_id,
tenant_id=override_tenant_id if override_tenant_id is not None else tenant_id,
conversation_id=conversation_id,
file_key=file_key or f"test_file_key_{uuid4()}",
mimetype="text/plain",
original_url="http://example.com/file.txt",
name="test_tool_file.txt",
size=2048,
)
tool_file.id = file_id
self.session.add(tool_file)
self.session.flush()
self.test_tool_files.append(tool_file)
tool_file.id = file_id or str(uuid4())
session.add(tool_file)
session.flush()
return tool_file
def _create_file(self, related_id: str, transfer_method: FileTransferMethod, tenant_id: str | None = None) -> File:
"""Helper method to create a File object for testing."""
if tenant_id is None:
tenant_id = self.tenant_id
# Set related_id for LOCAL_FILE and TOOL_FILE transfer methods
file_related_id = None
remote_url = None
if transfer_method in (FileTransferMethod.LOCAL_FILE, FileTransferMethod.TOOL_FILE):
file_related_id = related_id
elif transfer_method == FileTransferMethod.REMOTE_URL:
remote_url = "https://example.com/test_file.txt"
file_related_id = related_id
@staticmethod
def _create_file(
tenant_id: str,
related_id: str,
transfer_method: FileTransferMethod,
*,
override_tenant_id: str | None = None,
) -> File:
"""Build a File value-object for testing."""
remote_url = "https://example.com/test_file.txt" if transfer_method == FileTransferMethod.REMOTE_URL else None
return File(
file_id=str(uuid4()), # Generate new UUID for File.id
tenant_id=tenant_id,
file_id=str(uuid4()),
tenant_id=override_tenant_id if override_tenant_id is not None else tenant_id,
file_type=FileType.DOCUMENT,
transfer_method=transfer_method,
related_id=file_related_id,
related_id=related_id,
remote_url=remote_url,
filename="test_file.txt",
extension=".txt",
@ -136,240 +109,280 @@ class TestStorageKeyLoader(unittest.TestCase):
storage_key="initial_key",
)
def test_load_storage_keys_local_file(self):
# ------------------------------------------------------------------
# Tests
# ------------------------------------------------------------------
def test_load_storage_keys_local_file(self, db_session_with_containers: Session):
"""Test loading storage keys for LOCAL_FILE transfer method."""
# Create test data
upload_file = self._create_upload_file()
file = self._create_file(related_id=upload_file.id, transfer_method=FileTransferMethod.LOCAL_FILE)
tenant_id = str(uuid4())
user_id = str(uuid4())
# Load storage keys
self.loader.load_storage_keys([file])
upload_file = self._create_upload_file(db_session_with_containers, tenant_id, user_id)
file = self._create_file(tenant_id, related_id=upload_file.id, transfer_method=FileTransferMethod.LOCAL_FILE)
loader = StorageKeyLoader(
db_session_with_containers, tenant_id, access_controller=DatabaseFileAccessController()
)
loader.load_storage_keys([file])
# Verify storage key was loaded correctly
assert file._storage_key == upload_file.key
def test_load_storage_keys_remote_url(self):
def test_load_storage_keys_remote_url(self, db_session_with_containers: Session):
"""Test loading storage keys for REMOTE_URL transfer method."""
# Create test data
upload_file = self._create_upload_file()
file = self._create_file(related_id=upload_file.id, transfer_method=FileTransferMethod.REMOTE_URL)
tenant_id = str(uuid4())
user_id = str(uuid4())
# Load storage keys
self.loader.load_storage_keys([file])
upload_file = self._create_upload_file(db_session_with_containers, tenant_id, user_id)
file = self._create_file(tenant_id, related_id=upload_file.id, transfer_method=FileTransferMethod.REMOTE_URL)
loader = StorageKeyLoader(
db_session_with_containers, tenant_id, access_controller=DatabaseFileAccessController()
)
loader.load_storage_keys([file])
# Verify storage key was loaded correctly
assert file._storage_key == upload_file.key
def test_load_storage_keys_tool_file(self):
def test_load_storage_keys_tool_file(self, db_session_with_containers: Session):
"""Test loading storage keys for TOOL_FILE transfer method."""
# Create test data
tool_file = self._create_tool_file()
file = self._create_file(related_id=tool_file.id, transfer_method=FileTransferMethod.TOOL_FILE)
tenant_id = str(uuid4())
user_id = str(uuid4())
conversation_id = str(uuid4())
# Load storage keys
self.loader.load_storage_keys([file])
tool_file = self._create_tool_file(db_session_with_containers, tenant_id, user_id, conversation_id)
file = self._create_file(tenant_id, related_id=tool_file.id, transfer_method=FileTransferMethod.TOOL_FILE)
loader = StorageKeyLoader(
db_session_with_containers, tenant_id, access_controller=DatabaseFileAccessController()
)
loader.load_storage_keys([file])
# Verify storage key was loaded correctly
assert file._storage_key == tool_file.file_key
def test_load_storage_keys_mixed_methods(self):
def test_load_storage_keys_mixed_methods(self, db_session_with_containers: Session):
"""Test batch loading with mixed transfer methods."""
# Create test data for different transfer methods
upload_file1 = self._create_upload_file()
upload_file2 = self._create_upload_file()
tool_file = self._create_tool_file()
tenant_id = str(uuid4())
user_id = str(uuid4())
conversation_id = str(uuid4())
file1 = self._create_file(related_id=upload_file1.id, transfer_method=FileTransferMethod.LOCAL_FILE)
file2 = self._create_file(related_id=upload_file2.id, transfer_method=FileTransferMethod.REMOTE_URL)
file3 = self._create_file(related_id=tool_file.id, transfer_method=FileTransferMethod.TOOL_FILE)
upload_file1 = self._create_upload_file(db_session_with_containers, tenant_id, user_id)
upload_file2 = self._create_upload_file(db_session_with_containers, tenant_id, user_id)
tool_file = self._create_tool_file(db_session_with_containers, tenant_id, user_id, conversation_id)
files = [file1, file2, file3]
file1 = self._create_file(tenant_id, related_id=upload_file1.id, transfer_method=FileTransferMethod.LOCAL_FILE)
file2 = self._create_file(tenant_id, related_id=upload_file2.id, transfer_method=FileTransferMethod.REMOTE_URL)
file3 = self._create_file(tenant_id, related_id=tool_file.id, transfer_method=FileTransferMethod.TOOL_FILE)
# Load storage keys
self.loader.load_storage_keys(files)
loader = StorageKeyLoader(
db_session_with_containers, tenant_id, access_controller=DatabaseFileAccessController()
)
loader.load_storage_keys([file1, file2, file3])
# Verify all storage keys were loaded correctly
assert file1._storage_key == upload_file1.key
assert file2._storage_key == upload_file2.key
assert file3._storage_key == tool_file.file_key
def test_load_storage_keys_empty_list(self):
"""Test with empty file list."""
# Should not raise any exceptions
self.loader.load_storage_keys([])
def test_load_storage_keys_empty_list(self, db_session_with_containers: Session):
"""Test with empty file list — should not raise."""
tenant_id = str(uuid4())
loader = StorageKeyLoader(
db_session_with_containers, tenant_id, access_controller=DatabaseFileAccessController()
)
loader.load_storage_keys([])
def test_load_storage_keys_ignores_legacy_file_tenant_id(self):
def test_load_storage_keys_ignores_legacy_file_tenant_id(self, db_session_with_containers: Session):
"""Legacy file tenant_id should not override the loader tenant scope."""
upload_file = self._create_upload_file()
tenant_id = str(uuid4())
user_id = str(uuid4())
upload_file = self._create_upload_file(db_session_with_containers, tenant_id, user_id)
file = self._create_file(
related_id=upload_file.id, transfer_method=FileTransferMethod.LOCAL_FILE, tenant_id=str(uuid4())
tenant_id,
related_id=upload_file.id,
transfer_method=FileTransferMethod.LOCAL_FILE,
override_tenant_id=str(uuid4()),
)
self.loader.load_storage_keys([file])
loader = StorageKeyLoader(
db_session_with_containers, tenant_id, access_controller=DatabaseFileAccessController()
)
loader.load_storage_keys([file])
assert file._storage_key == upload_file.key
def test_load_storage_keys_missing_file_id(self):
"""Test with None file.related_id."""
# Create a file with valid parameters first, then manually set related_id to None
file = self._create_file(related_id=str(uuid4()), transfer_method=FileTransferMethod.LOCAL_FILE)
def test_load_storage_keys_missing_file_id(self, db_session_with_containers: Session):
"""Test with None file.related_id — should raise ValueError."""
tenant_id = str(uuid4())
user_id = str(uuid4())
upload_file = self._create_upload_file(db_session_with_containers, tenant_id, user_id)
file = self._create_file(tenant_id, related_id=upload_file.id, transfer_method=FileTransferMethod.LOCAL_FILE)
file.related_id = None
# Should raise ValueError for None file related_id
with pytest.raises(ValueError) as context:
self.loader.load_storage_keys([file])
loader = StorageKeyLoader(
db_session_with_containers, tenant_id, access_controller=DatabaseFileAccessController()
)
with pytest.raises(ValueError, match="file id should not be None."):
loader.load_storage_keys([file])
assert str(context.value) == "file id should not be None."
def test_load_storage_keys_nonexistent_upload_file_records(self, db_session_with_containers: Session):
"""Test with missing UploadFile database records — should raise ValueError."""
tenant_id = str(uuid4())
file = self._create_file(tenant_id, related_id=str(uuid4()), transfer_method=FileTransferMethod.LOCAL_FILE)
def test_load_storage_keys_nonexistent_upload_file_records(self):
"""Test with missing UploadFile database records."""
# Create file with non-existent upload file id
non_existent_id = str(uuid4())
file = self._create_file(related_id=non_existent_id, transfer_method=FileTransferMethod.LOCAL_FILE)
# Should raise ValueError for missing record
loader = StorageKeyLoader(
db_session_with_containers, tenant_id, access_controller=DatabaseFileAccessController()
)
with pytest.raises(ValueError):
self.loader.load_storage_keys([file])
loader.load_storage_keys([file])
def test_load_storage_keys_nonexistent_tool_file_records(self):
"""Test with missing ToolFile database records."""
# Create file with non-existent tool file id
non_existent_id = str(uuid4())
file = self._create_file(related_id=non_existent_id, transfer_method=FileTransferMethod.TOOL_FILE)
def test_load_storage_keys_nonexistent_tool_file_records(self, db_session_with_containers: Session):
"""Test with missing ToolFile database records — should raise ValueError."""
tenant_id = str(uuid4())
file = self._create_file(tenant_id, related_id=str(uuid4()), transfer_method=FileTransferMethod.TOOL_FILE)
# Should raise ValueError for missing record
loader = StorageKeyLoader(
db_session_with_containers, tenant_id, access_controller=DatabaseFileAccessController()
)
with pytest.raises(ValueError):
self.loader.load_storage_keys([file])
loader.load_storage_keys([file])
def test_load_storage_keys_invalid_uuid(self):
"""Test with invalid UUID format."""
# Create a file with valid parameters first, then manually set invalid related_id
file = self._create_file(related_id=str(uuid4()), transfer_method=FileTransferMethod.LOCAL_FILE)
def test_load_storage_keys_invalid_uuid(self, db_session_with_containers: Session):
"""Test with invalid UUID format — should raise ValueError."""
tenant_id = str(uuid4())
user_id = str(uuid4())
upload_file = self._create_upload_file(db_session_with_containers, tenant_id, user_id)
file = self._create_file(tenant_id, related_id=upload_file.id, transfer_method=FileTransferMethod.LOCAL_FILE)
file.related_id = "invalid-uuid-format"
# Should raise ValueError for invalid UUID
loader = StorageKeyLoader(
db_session_with_containers, tenant_id, access_controller=DatabaseFileAccessController()
)
with pytest.raises(ValueError):
self.loader.load_storage_keys([file])
loader.load_storage_keys([file])
def test_load_storage_keys_batch_efficiency(self):
"""Test batched operations use efficient queries."""
# Create multiple files of different types
upload_files = [self._create_upload_file() for _ in range(3)]
tool_files = [self._create_tool_file() for _ in range(2)]
def test_load_storage_keys_batch_efficiency(self, db_session_with_containers: Session):
"""Batched operations should issue exactly 2 queries for mixed file types."""
tenant_id = str(uuid4())
user_id = str(uuid4())
conversation_id = str(uuid4())
files = []
files.extend(
[self._create_file(related_id=uf.id, transfer_method=FileTransferMethod.LOCAL_FILE) for uf in upload_files]
upload_files = [self._create_upload_file(db_session_with_containers, tenant_id, user_id) for _ in range(3)]
tool_files = [
self._create_tool_file(db_session_with_containers, tenant_id, user_id, conversation_id) for _ in range(2)
]
files = [
self._create_file(tenant_id, related_id=uf.id, transfer_method=FileTransferMethod.LOCAL_FILE)
for uf in upload_files
] + [
self._create_file(tenant_id, related_id=tf.id, transfer_method=FileTransferMethod.TOOL_FILE)
for tf in tool_files
]
loader = StorageKeyLoader(
db_session_with_containers, tenant_id, access_controller=DatabaseFileAccessController()
)
files.extend(
[self._create_file(related_id=tf.id, transfer_method=FileTransferMethod.TOOL_FILE) for tf in tool_files]
)
# Mock the session to count queries
with patch.object(self.session, "scalars", wraps=self.session.scalars) as mock_scalars:
self.loader.load_storage_keys(files)
# Should make exactly 2 queries (one for upload_files, one for tool_files)
with patch.object(
db_session_with_containers, "scalars", wraps=db_session_with_containers.scalars
) as mock_scalars:
loader.load_storage_keys(files)
# Exactly 2 DB round-trips: one for UploadFile, one for ToolFile
assert mock_scalars.call_count == 2
# Verify all storage keys were loaded correctly
for i, file in enumerate(files[:3]):
assert file._storage_key == upload_files[i].key
for i, file in enumerate(files[3:]):
assert file._storage_key == tool_files[i].file_key
def test_load_storage_keys_tenant_isolation(self):
"""Test that tenant isolation works correctly."""
# Create files for different tenants
def test_load_storage_keys_tenant_isolation(self, db_session_with_containers: Session):
"""Loader should not surface records belonging to a different tenant."""
tenant_id = str(uuid4())
other_tenant_id = str(uuid4())
user_id = str(uuid4())
# Create upload file for current tenant
upload_file_current = self._create_upload_file()
upload_file_current = self._create_upload_file(db_session_with_containers, tenant_id, user_id)
file_current = self._create_file(
related_id=upload_file_current.id, transfer_method=FileTransferMethod.LOCAL_FILE
tenant_id, related_id=upload_file_current.id, transfer_method=FileTransferMethod.LOCAL_FILE
)
# Create upload file for other tenant (but don't add to cleanup list)
upload_file_other = UploadFile(
tenant_id=other_tenant_id,
storage_type=StorageType.LOCAL,
key="other_tenant_key",
name="other_file.txt",
size=1024,
extension=".txt",
mime_type="text/plain",
created_by_role=CreatorUserRole.ACCOUNT,
created_by=self.user_id,
created_at=datetime.now(UTC),
used=False,
upload_file_other = self._create_upload_file(
db_session_with_containers,
tenant_id,
user_id,
override_tenant_id=other_tenant_id,
)
upload_file_other.id = str(uuid4())
self.session.add(upload_file_other)
self.session.flush()
# Create file for other tenant but try to load with current tenant's loader
file_other = self._create_file(
related_id=upload_file_other.id, transfer_method=FileTransferMethod.LOCAL_FILE, tenant_id=other_tenant_id
tenant_id,
related_id=upload_file_other.id,
transfer_method=FileTransferMethod.LOCAL_FILE,
override_tenant_id=other_tenant_id,
)
# Should raise ValueError due to tenant mismatch
with pytest.raises(ValueError) as context:
self.loader.load_storage_keys([file_other])
loader = StorageKeyLoader(
db_session_with_containers, tenant_id, access_controller=DatabaseFileAccessController()
)
assert "Upload file not found for id:" in str(context.value)
with pytest.raises(ValueError, match="Upload file not found for id:"):
loader.load_storage_keys([file_other])
# Current tenant's file should still work
self.loader.load_storage_keys([file_current])
# Current-tenant file still resolves correctly
loader.load_storage_keys([file_current])
assert file_current._storage_key == upload_file_current.key
def test_load_storage_keys_mixed_tenant_batch(self):
"""Test batch with mixed tenant files (should fail on first mismatch)."""
# Create files for current tenant
upload_file_current = self._create_upload_file()
def test_load_storage_keys_mixed_tenant_batch(self, db_session_with_containers: Session):
"""A batch containing a foreign-tenant file should fail on the mismatch."""
tenant_id = str(uuid4())
user_id = str(uuid4())
upload_file_current = self._create_upload_file(db_session_with_containers, tenant_id, user_id)
file_current = self._create_file(
related_id=upload_file_current.id, transfer_method=FileTransferMethod.LOCAL_FILE
tenant_id, related_id=upload_file_current.id, transfer_method=FileTransferMethod.LOCAL_FILE
)
# Create file for different tenant
other_tenant_id = str(uuid4())
file_other = self._create_file(
related_id=str(uuid4()), transfer_method=FileTransferMethod.LOCAL_FILE, tenant_id=other_tenant_id
tenant_id,
related_id=str(uuid4()),
transfer_method=FileTransferMethod.LOCAL_FILE,
override_tenant_id=str(uuid4()),
)
# Should raise ValueError on tenant mismatch
with pytest.raises(ValueError) as context:
self.loader.load_storage_keys([file_current, file_other])
loader = StorageKeyLoader(
db_session_with_containers, tenant_id, access_controller=DatabaseFileAccessController()
)
with pytest.raises(ValueError, match="Upload file not found for id:"):
loader.load_storage_keys([file_current, file_other])
assert "Upload file not found for id:" in str(context.value)
def test_load_storage_keys_duplicate_file_ids(self, db_session_with_containers: Session):
"""Duplicate file IDs in the batch should be handled gracefully."""
tenant_id = str(uuid4())
user_id = str(uuid4())
def test_load_storage_keys_duplicate_file_ids(self):
"""Test handling of duplicate file IDs in the batch."""
# Create upload file
upload_file = self._create_upload_file()
upload_file = self._create_upload_file(db_session_with_containers, tenant_id, user_id)
file1 = self._create_file(tenant_id, related_id=upload_file.id, transfer_method=FileTransferMethod.LOCAL_FILE)
file2 = self._create_file(tenant_id, related_id=upload_file.id, transfer_method=FileTransferMethod.LOCAL_FILE)
# Create two File objects with same related_id
file1 = self._create_file(related_id=upload_file.id, transfer_method=FileTransferMethod.LOCAL_FILE)
file2 = self._create_file(related_id=upload_file.id, transfer_method=FileTransferMethod.LOCAL_FILE)
loader = StorageKeyLoader(
db_session_with_containers, tenant_id, access_controller=DatabaseFileAccessController()
)
loader.load_storage_keys([file1, file2])
# Should handle duplicates gracefully
self.loader.load_storage_keys([file1, file2])
# Both files should have the same storage key
assert file1._storage_key == upload_file.key
assert file2._storage_key == upload_file.key
def test_load_storage_keys_session_isolation(self):
"""Test that the loader uses the provided session correctly."""
# Create test data
upload_file = self._create_upload_file()
file = self._create_file(related_id=upload_file.id, transfer_method=FileTransferMethod.LOCAL_FILE)
def test_load_storage_keys_session_isolation(self, db_session_with_containers: Session):
"""A loader backed by an uncommitted session should not see data from another session."""
tenant_id = str(uuid4())
user_id = str(uuid4())
# Create loader with different session (same underlying connection)
upload_file = self._create_upload_file(db_session_with_containers, tenant_id, user_id)
file = self._create_file(tenant_id, related_id=upload_file.id, transfer_method=FileTransferMethod.LOCAL_FILE)
# A loader with a fresh, separate session cannot see uncommitted rows from db_session_with_containers
with Session(bind=db.engine) as other_session:
other_loader = StorageKeyLoader(
other_session,
self.tenant_id,
tenant_id,
access_controller=DatabaseFileAccessController(),
)
with pytest.raises(ValueError):

View File

@ -8,6 +8,7 @@ Covers real Redis 7+ sharded pub/sub interactions including:
- Resource cleanup accounting via PUBSUB SHARDNUMSUB
"""
import socket
import threading
import time
import uuid
@ -356,10 +357,17 @@ class TestShardedRedisBroadcastChannelClusterIntegration:
def _get_test_topic_name(cls) -> str:
return f"test_sharded_cluster_topic_{uuid.uuid4()}"
@staticmethod
def _resolve_announced_ip(host: str) -> str:
"""Resolve the container host name to a literal IP accepted by Redis cluster config."""
return socket.getaddrinfo(host, None, type=socket.SOCK_STREAM)[0][4][0]
@staticmethod
def _ensure_single_node_cluster(host: str, port: int) -> None:
"""Bootstrap a single-node cluster using a literal IP for Redis node advertisement."""
client = redis.Redis(host=host, port=port, decode_responses=False)
client.config_set("cluster-announce-ip", host)
announced_ip = TestShardedRedisBroadcastChannelClusterIntegration._resolve_announced_ip(host)
client.config_set("cluster-announce-ip", announced_ip)
client.config_set("cluster-announce-port", port)
slots = client.execute_command("CLUSTER", "SLOTS")
if not slots:

View File

@ -3,6 +3,7 @@ from __future__ import annotations
import base64
import json
from types import SimpleNamespace
from typing import Any, cast
from unittest.mock import MagicMock, patch
from uuid import uuid4
@ -17,7 +18,7 @@ from core.trigger.constants import (
)
from extensions.ext_redis import redis_client
from graphon.enums import BuiltinNodeTypes
from models import Account, AppMode
from models import Account, App, AppMode
from models.model import AppModelConfig, IconType
from services import app_dsl_service
from services.account_service import AccountService, TenantService
@ -67,6 +68,22 @@ def _pending_yaml_content(version: str = "99.0.0") -> bytes:
return (f'version: "{version}"\nkind: app\napp:\n name: Loop Test\n mode: workflow\n').encode()
def _app_stub(**overrides: Any) -> App:
defaults = {
"id": str(uuid4()),
"tenant_id": _DEFAULT_TENANT_ID,
"mode": AppMode.WORKFLOW.value,
"name": "n",
"description": "d",
"icon_type": IconType.EMOJI,
"icon": "i",
"icon_background": "#fff",
"use_icon_as_answer_icon": False,
"app_model_config": None,
}
return cast(App, SimpleNamespace(**(defaults | overrides)))
class TestAppDslService:
"""Integration tests for AppDslService using testcontainers."""
@ -585,7 +602,7 @@ class TestAppDslService:
def test_check_dependencies_returns_empty_when_no_redis_data(self, db_session_with_containers):
service = AppDslService(db_session_with_containers)
app_model = SimpleNamespace(id=str(uuid4()), tenant_id=_DEFAULT_TENANT_ID)
app_model = _app_stub()
result = service.check_dependencies(app_model=app_model)
assert result.leaked_dependencies == []
@ -614,7 +631,7 @@ class TestAppDslService:
)
service = AppDslService(db_session_with_containers)
result = service.check_dependencies(app_model=SimpleNamespace(id=app_id, tenant_id=_DEFAULT_TENANT_ID))
result = service.check_dependencies(app_model=_app_stub(id=app_id))
assert len(result.leaked_dependencies) == 1
def test_check_dependencies_with_real_app(self, db_session_with_containers, mock_external_service_dependencies):
@ -656,9 +673,7 @@ class TestAppDslService:
lambda _m: SimpleNamespace(kind="conv"),
)
app = SimpleNamespace(
id=str(uuid4()),
tenant_id=_DEFAULT_TENANT_ID,
app = _app_stub(
mode=AppMode.WORKFLOW.value,
name="old",
description="old-desc",
@ -667,7 +682,6 @@ class TestAppDslService:
icon_background="#111111",
updated_by=None,
updated_at=None,
app_model_config=None,
)
service = AppDslService(db_session_with_containers)
updated = service._create_or_update_app(
@ -745,15 +759,7 @@ class TestAppDslService:
service = AppDslService(db_session_with_containers)
with pytest.raises(ValueError, match="Missing workflow data"):
service._create_or_update_app(
app=SimpleNamespace(
id=str(uuid4()),
tenant_id=_DEFAULT_TENANT_ID,
mode=AppMode.WORKFLOW.value,
name="n",
description="d",
icon_background="#fff",
app_model_config=None,
),
app=_app_stub(mode=AppMode.WORKFLOW.value),
data={"app": {"mode": AppMode.WORKFLOW.value}},
account=_account_mock(),
)
@ -762,15 +768,7 @@ class TestAppDslService:
service = AppDslService(db_session_with_containers)
with pytest.raises(ValueError, match="Missing model_config"):
service._create_or_update_app(
app=SimpleNamespace(
id=str(uuid4()),
tenant_id=_DEFAULT_TENANT_ID,
mode=AppMode.CHAT.value,
name="n",
description="d",
icon_background="#fff",
app_model_config=None,
),
app=_app_stub(mode=AppMode.CHAT.value),
data={"app": {"mode": AppMode.CHAT.value}},
account=_account_mock(),
)
@ -799,15 +797,7 @@ class TestAppDslService:
service = AppDslService(db_session_with_containers)
with pytest.raises(ValueError, match="Invalid app mode"):
service._create_or_update_app(
app=SimpleNamespace(
id=str(uuid4()),
tenant_id=_DEFAULT_TENANT_ID,
mode=AppMode.RAG_PIPELINE.value,
name="n",
description="d",
icon_background="#fff",
app_model_config=None,
),
app=_app_stub(mode=AppMode.RAG_PIPELINE.value),
data={"app": {"mode": AppMode.RAG_PIPELINE.value}},
account=_account_mock(),
)
@ -828,29 +818,16 @@ class TestAppDslService:
lambda *_args, **_kwargs: model_calls.append(True),
)
workflow_app = SimpleNamespace(
workflow_app = _app_stub(
mode=AppMode.WORKFLOW.value,
tenant_id=_DEFAULT_TENANT_ID,
name="n",
icon="i",
icon_type="emoji",
icon_background="#fff",
description="d",
use_icon_as_answer_icon=False,
app_model_config=None,
)
AppDslService.export_dsl(workflow_app)
assert workflow_calls == [True]
chat_app = SimpleNamespace(
chat_app = _app_stub(
mode=AppMode.CHAT.value,
tenant_id=_DEFAULT_TENANT_ID,
name="n",
icon="i",
icon_type="emoji",
icon_background="#fff",
description="d",
use_icon_as_answer_icon=False,
app_model_config=SimpleNamespace(to_dict=lambda: {"agent_mode": {"tools": []}}),
)
AppDslService.export_dsl(chat_app)
@ -863,16 +840,14 @@ class TestAppDslService:
lambda **_kwargs: None,
)
emoji_app = SimpleNamespace(
emoji_app = _app_stub(
mode=AppMode.WORKFLOW.value,
tenant_id=_DEFAULT_TENANT_ID,
name="Emoji App",
icon="🎨",
icon_type=IconType.EMOJI,
icon_background="#FF5733",
description="App with emoji icon",
use_icon_as_answer_icon=True,
app_model_config=None,
)
yaml_output = AppDslService.export_dsl(emoji_app)
data = yaml.safe_load(yaml_output)
@ -880,16 +855,14 @@ class TestAppDslService:
assert data["app"]["icon_type"] == "emoji"
assert data["app"]["icon_background"] == "#FF5733"
image_app = SimpleNamespace(
image_app = _app_stub(
mode=AppMode.WORKFLOW.value,
tenant_id=_DEFAULT_TENANT_ID,
name="Image App",
icon="https://example.com/icon.png",
icon_type=IconType.IMAGE,
icon_background="#FFEAD5",
description="App with image icon",
use_icon_as_answer_icon=False,
app_model_config=None,
)
yaml_output = AppDslService.export_dsl(image_app)
data = yaml.safe_load(yaml_output)
@ -1106,7 +1079,7 @@ class TestAppDslService:
export_data: dict = {}
AppDslService._append_workflow_export_data(
export_data=export_data,
app_model=SimpleNamespace(tenant_id=_DEFAULT_TENANT_ID),
app_model=_app_stub(),
include_secret=False,
workflow_id=None,
)
@ -1132,7 +1105,7 @@ class TestAppDslService:
with pytest.raises(ValueError, match="Missing draft workflow configuration"):
AppDslService._append_workflow_export_data(
export_data={},
app_model=SimpleNamespace(tenant_id=_DEFAULT_TENANT_ID),
app_model=_app_stub(),
include_secret=False,
workflow_id=None,
)
@ -1160,7 +1133,7 @@ class TestAppDslService:
monkeypatch.setattr(app_dsl_service, "jsonable_encoder", lambda x: x)
app_model_config = SimpleNamespace(to_dict=lambda: {"agent_mode": {"tools": [{"credential_id": "secret"}]}})
app_model = SimpleNamespace(tenant_id=_DEFAULT_TENANT_ID, app_model_config=app_model_config)
app_model = _app_stub(app_model_config=app_model_config)
export_data: dict = {}
AppDslService._append_model_config_export_data(export_data, app_model)
@ -1169,7 +1142,7 @@ class TestAppDslService:
def test_append_model_config_export_data_requires_app_config(self):
with pytest.raises(ValueError, match="Missing app configuration"):
AppDslService._append_model_config_export_data({}, SimpleNamespace(app_model_config=None))
AppDslService._append_model_config_export_data({}, _app_stub(app_model_config=None))
# ── Dependency Extraction ─────────────────────────────────────────

View File

@ -10,6 +10,8 @@ from typing import Any
import pytest
from flask.views import MethodView
from pydantic import ValidationError
from werkzeug.datastructures import MultiDict
# kombu references MethodView as a global when importing celery/kombu pools.
if not hasattr(builtins, "MethodView"):
@ -174,6 +176,101 @@ def _dummy_workflow():
)
def test_app_list_query_normalizes_orpc_bracket_tag_ids(app_module):
first_tag_id = "8c4ef3d1-58a1-4d94-8a1c-1c171d889e08"
second_tag_id = "3c39395b-6d1f-4030-8b17-eaa7cc85221c"
query_args = MultiDict(
[
("page", "1"),
("limit", "30"),
("tag_ids[1]", second_tag_id),
("tag_ids[0]", first_tag_id),
]
)
normalized = app_module._normalize_app_list_query_args(query_args)
query = app_module.AppListQuery.model_validate(normalized)
assert query.tag_ids == [first_tag_id, second_tag_id]
def test_app_list_query_preserves_regular_query_params(app_module):
query_args = MultiDict(
[
("page", "2"),
("limit", "50"),
("mode", "chat"),
("name", "Sales Copilot"),
("is_created_by_me", "true"),
]
)
normalized = app_module._normalize_app_list_query_args(query_args)
query = app_module.AppListQuery.model_validate(normalized)
assert normalized == {
"page": "2",
"limit": "50",
"mode": "chat",
"name": "Sales Copilot",
"is_created_by_me": "true",
}
assert query.page == 2
assert query.limit == 50
assert query.mode == "chat"
assert query.name == "Sales Copilot"
assert query.is_created_by_me is True
assert query.tag_ids is None
def test_app_list_query_normalizes_empty_bracket_tag_ids_to_none(app_module):
query_args = MultiDict(
[
("tag_ids[0]", ""),
("tag_ids[1]", " "),
]
)
normalized = app_module._normalize_app_list_query_args(query_args)
query = app_module.AppListQuery.model_validate(normalized)
assert normalized == {"tag_ids": ["", " "]}
assert query.tag_ids is None
def test_app_list_query_rejects_invalid_bracket_tag_id(app_module):
normalized = app_module._normalize_app_list_query_args(MultiDict([("tag_ids[0]", "not-a-uuid")]))
with pytest.raises(ValidationError):
app_module.AppListQuery.model_validate(normalized)
def test_app_list_query_sorts_bracket_tag_ids_by_index(app_module):
first_tag_id = "8c4ef3d1-58a1-4d94-8a1c-1c171d889e08"
second_tag_id = "3c39395b-6d1f-4030-8b17-eaa7cc85221c"
third_tag_id = "9d5ec0f7-4f2b-4e7f-9c13-1e7a034d0eb1"
query_args = MultiDict(
[
("tag_ids[2]", third_tag_id),
("tag_ids[1]", second_tag_id),
("tag_ids[0]", first_tag_id),
]
)
normalized = app_module._normalize_app_list_query_args(query_args)
query = app_module.AppListQuery.model_validate(normalized)
assert query.tag_ids == [first_tag_id, second_tag_id, third_tag_id]
def test_app_list_query_rejects_flat_tag_ids(app_module):
tag_id = "8c4ef3d1-58a1-4d94-8a1c-1c171d889e08"
normalized = app_module._normalize_app_list_query_args(MultiDict([("tag_ids", tag_id)]))
with pytest.raises(ValidationError):
app_module.AppListQuery.model_validate(normalized)
def test_app_partial_serialization_uses_aliases(app_models):
AppPartial = app_models.AppPartial
created_at = _ts()

View File

@ -363,7 +363,8 @@ def test_workflow_online_users_filters_inaccessible_workflow(app, monkeypatch: p
)
monkeypatch.setattr(workflow_module.file_helpers, "get_signed_file_url", sign_avatar)
workflow_module.redis_client.hgetall.side_effect = lambda key: (
redis_pipeline = Mock()
redis_pipeline.execute.return_value = [
{
b"sid-1": json.dumps(
{
@ -374,16 +375,16 @@ def test_workflow_online_users_filters_inaccessible_workflow(app, monkeypatch: p
}
)
}
if key == f"{workflow_module.WORKFLOW_ONLINE_USERS_PREFIX}{app_id_1}"
else {}
)
]
workflow_module.redis_client.pipeline.return_value = redis_pipeline
api = workflow_module.WorkflowOnlineUsersApi()
handler = _unwrap(api.get)
handler = _unwrap(api.post)
with app.test_request_context(
f"/apps/workflows/online-users?app_ids={app_id_1},{app_id_2}",
method="GET",
"/apps/workflows/online-users",
method="POST",
json={"app_ids": [app_id_1, app_id_2]},
):
response = handler(api)
@ -402,12 +403,43 @@ def test_workflow_online_users_filters_inaccessible_workflow(app, monkeypatch: p
}
]
}
workflow_module.redis_client.hgetall.assert_called_once_with(
f"{workflow_module.WORKFLOW_ONLINE_USERS_PREFIX}{app_id_1}"
)
workflow_module.redis_client.pipeline.assert_called_once_with(transaction=False)
redis_pipeline.hgetall.assert_called_once_with(f"{workflow_module.WORKFLOW_ONLINE_USERS_PREFIX}{app_id_1}")
redis_pipeline.execute.assert_called_once_with()
sign_avatar.assert_called_once_with("avatar-file-id")
def test_workflow_online_users_batches_redis_reads(app, monkeypatch: pytest.MonkeyPatch) -> None:
app_ids = [f"wf-{index}" for index in range(workflow_module.WORKFLOW_ONLINE_USERS_REDIS_BATCH_SIZE + 1)]
monkeypatch.setattr(workflow_module, "current_account_with_tenant", lambda: (SimpleNamespace(), "tenant-1"))
monkeypatch.setattr(
workflow_module,
"WorkflowService",
lambda: SimpleNamespace(get_accessible_app_ids=lambda app_ids, tenant_id: set(app_ids)),
)
first_pipeline = Mock()
first_pipeline.execute.return_value = [{} for _ in range(workflow_module.WORKFLOW_ONLINE_USERS_REDIS_BATCH_SIZE)]
second_pipeline = Mock()
second_pipeline.execute.return_value = [{}]
workflow_module.redis_client.pipeline.side_effect = [first_pipeline, second_pipeline]
api = workflow_module.WorkflowOnlineUsersApi()
handler = _unwrap(api.post)
with app.test_request_context(
"/apps/workflows/online-users",
method="POST",
json={"app_ids": app_ids},
):
response = handler(api)
assert len(response["data"]) == len(app_ids)
assert workflow_module.redis_client.pipeline.call_count == 2
assert first_pipeline.hgetall.call_count == workflow_module.WORKFLOW_ONLINE_USERS_REDIS_BATCH_SIZE
assert second_pipeline.hgetall.call_count == 1
def test_workflow_online_users_rejects_excessive_workflow_ids(app, monkeypatch: pytest.MonkeyPatch) -> None:
monkeypatch.setattr(workflow_module, "current_account_with_tenant", lambda: (SimpleNamespace(), "tenant-1"))
accessible_app_ids = Mock(return_value=set())
@ -417,14 +449,15 @@ def test_workflow_online_users_rejects_excessive_workflow_ids(app, monkeypatch:
lambda: SimpleNamespace(get_accessible_app_ids=accessible_app_ids),
)
excessive_ids = ",".join(f"wf-{index}" for index in range(workflow_module.MAX_WORKFLOW_ONLINE_USERS_QUERY_IDS + 1))
excessive_ids = [f"wf-{index}" for index in range(workflow_module.MAX_WORKFLOW_ONLINE_USERS_REQUEST_IDS + 1)]
api = workflow_module.WorkflowOnlineUsersApi()
handler = _unwrap(api.get)
handler = _unwrap(api.post)
with app.test_request_context(
f"/apps/workflows/online-users?app_ids={excessive_ids}",
method="GET",
"/apps/workflows/online-users",
method="POST",
json={"app_ids": excessive_ids},
):
with pytest.raises(HTTPException) as exc:
handler(api)

View File

@ -1,7 +1,7 @@
import uuid
from collections import OrderedDict
from typing import Any, NamedTuple
from unittest.mock import MagicMock, patch
from unittest.mock import patch
import pytest
from flask_restx import marshal
@ -29,15 +29,18 @@ class TestWorkflowDraftVariableFields:
def test_serialize_full_content(self):
"""Test that _serialize_full_content uses pre-loaded relationships."""
# Create mock objects with relationships pre-loaded
mock_variable_file = MagicMock(spec=WorkflowDraftVariableFile)
mock_variable_file.size = 100000
mock_variable_file.length = 50
mock_variable_file.value_type = SegmentType.OBJECT
mock_variable_file.upload_file_id = "test-upload-file-id"
mock_variable = MagicMock(spec=WorkflowDraftVariable)
mock_variable.file_id = "test-file-id"
mock_variable.variable_file = mock_variable_file
mock_variable = WorkflowDraftVariable(
file_id="test-file-id",
variable_file=WorkflowDraftVariableFile(
size=100000,
length=50,
value_type=SegmentType.OBJECT,
upload_file_id="test-upload-file-id",
tenant_id=str(uuid.uuid4()),
app_id=str(uuid.uuid4()),
user_id=str(uuid.uuid4()),
),
)
# Mock the file helpers
with patch("controllers.console.app.workflow_draft_variable.file_helpers", autospec=True) as mock_file_helpers:
@ -84,7 +87,7 @@ class TestWorkflowDraftVariableFields:
expected_without_value: OrderedDict[str, Any] = OrderedDict(
{
"id": str(conv_var.id),
"id": conv_var.id,
"type": conv_var.get_variable_type().value,
"name": "conv_var",
"description": "",
@ -117,7 +120,7 @@ class TestWorkflowDraftVariableFields:
expected_without_value = OrderedDict(
{
"id": str(sys_var.id),
"id": sys_var.id,
"type": sys_var.get_variable_type().value,
"name": "sys_var",
"description": "",
@ -149,7 +152,7 @@ class TestWorkflowDraftVariableFields:
expected_without_value: OrderedDict[str, Any] = OrderedDict(
{
"id": str(node_var.id),
"id": node_var.id,
"type": node_var.get_variable_type().value,
"name": "node_var",
"description": "",
@ -180,19 +183,22 @@ class TestWorkflowDraftVariableFields:
node_var.id = str(uuid.uuid4())
node_var.last_edited_at = naive_utc_now()
variable_file = WorkflowDraftVariableFile(
id=str(uuidv7()),
upload_file_id=str(uuid.uuid4()),
size=1024,
length=10,
value_type=SegmentType.ARRAY_STRING,
tenant_id=str(uuidv7()),
app_id=str(uuidv7()),
user_id=str(uuidv7()),
)
variable_file.id = str(uuidv7())
node_var.variable_file = variable_file
node_var.file_id = variable_file.id
expected_without_value: OrderedDict[str, Any] = OrderedDict(
{
"id": str(node_var.id),
"type": node_var.get_variable_type().value,
"id": node_var.id,
"type": node_var.get_variable_type(),
"name": "node_var",
"description": "",
"selector": ["test_node", "node_var"],
@ -235,7 +241,7 @@ class TestWorkflowDraftVariableList:
node_var.id = str(uuid.uuid4())
node_var_dict = OrderedDict(
{
"id": str(node_var.id),
"id": node_var.id,
"type": node_var.get_variable_type().value,
"name": "test_var",
"description": "",

View File

@ -134,6 +134,42 @@ class TestPerformHitTesting:
assert result["query"] == "hello"
assert result["records"] == []
def test_success_normalizes_legacy_query_and_nullable_list_fields(self, dataset):
response = {
"query": {"content": "hello"},
"records": [
{
"segment": {"id": "segment-1", "keywords": None},
"child_chunks": None,
"files": None,
"score": 0.8,
}
],
}
with (
patch.object(
HitTestingService,
"retrieve",
return_value=response,
),
patch(
"controllers.console.datasets.hit_testing_base.marshal",
return_value=response["records"],
),
):
result = DatasetsHitTestingBase.perform_hit_testing(dataset, {"query": "hello"})
assert result["query"] == "hello"
assert result["records"] == [
{
"segment": {"id": "segment-1", "keywords": []},
"child_chunks": [],
"files": [],
"score": 0.8,
}
]
def test_index_not_initialized(self, dataset):
with patch.object(
HitTestingService,

View File

@ -1,6 +1,7 @@
from unittest.mock import MagicMock, PropertyMock, patch
import pytest
from werkzeug.exceptions import NotFound
from controllers.console import console_ns
from controllers.console.auth.error import (
@ -29,6 +30,7 @@ from controllers.console.workspace.error import (
CurrentPasswordIncorrectError,
InvalidAccountDeletionCodeError,
)
from models.enums import CreatorUserRole
from services.errors.account import CurrentPasswordIncorrectError as ServicePwdError
@ -135,6 +137,131 @@ class TestAccountUpdateApis:
assert result["id"] == "u1"
class TestAccountAvatarApiGet:
"""GET /account/avatar must not sign arbitrary upload_file IDs (IDOR)."""
def test_get_avatar_signed_url_when_upload_owned_by_current_account(self, app):
api = AccountAvatarApi()
method = unwrap(api.get)
user = MagicMock()
user.id = "acc-owner"
tenant_id = "tenant-1"
file_id = "550e8400-e29b-41d4-a716-446655440000"
upload_file = MagicMock()
upload_file.id = file_id
upload_file.tenant_id = tenant_id
upload_file.created_by = user.id
upload_file.created_by_role = CreatorUserRole.ACCOUNT
with (
app.test_request_context(f"/account/avatar?avatar={file_id}"),
patch(
"controllers.console.workspace.account.current_account_with_tenant",
return_value=(user, tenant_id),
),
patch("controllers.console.workspace.account.db.session.scalar", return_value=upload_file),
patch(
"controllers.console.workspace.account.file_helpers.get_signed_file_url",
return_value="https://signed/example",
) as sign_mock,
):
result = method(api)
assert result == {"avatar_url": "https://signed/example"}
sign_mock.assert_called_once_with(upload_file_id=file_id)
def test_get_avatar_not_found_when_upload_created_by_other_account_same_tenant(self, app):
api = AccountAvatarApi()
method = unwrap(api.get)
user = MagicMock()
user.id = "acc-a"
tenant_id = "tenant-1"
file_id = "550e8400-e29b-41d4-a716-446655440001"
upload_file = MagicMock()
upload_file.id = file_id
upload_file.tenant_id = tenant_id
upload_file.created_by = "acc-b"
upload_file.created_by_role = CreatorUserRole.ACCOUNT
with (
app.test_request_context(f"/account/avatar?avatar={file_id}"),
patch(
"controllers.console.workspace.account.current_account_with_tenant",
return_value=(user, tenant_id),
),
patch("controllers.console.workspace.account.db.session.scalar", return_value=upload_file),
patch(
"controllers.console.workspace.account.file_helpers.get_signed_file_url",
return_value="https://signed/leak",
) as sign_mock,
):
with pytest.raises(NotFound):
method(api)
sign_mock.assert_not_called()
def test_get_avatar_not_found_when_upload_belongs_to_other_tenant(self, app):
api = AccountAvatarApi()
method = unwrap(api.get)
user = MagicMock()
user.id = "acc-owner"
tenant_id = "tenant-1"
file_id = "550e8400-e29b-41d4-a716-446655440002"
upload_file = MagicMock()
upload_file.id = file_id
upload_file.tenant_id = "tenant-other"
upload_file.created_by = user.id
upload_file.created_by_role = CreatorUserRole.ACCOUNT
with (
app.test_request_context(f"/account/avatar?avatar={file_id}"),
patch(
"controllers.console.workspace.account.current_account_with_tenant",
return_value=(user, tenant_id),
),
patch("controllers.console.workspace.account.db.session.scalar", return_value=upload_file),
patch(
"controllers.console.workspace.account.file_helpers.get_signed_file_url",
return_value="https://signed/leak",
) as sign_mock,
):
with pytest.raises(NotFound):
method(api)
sign_mock.assert_not_called()
def test_get_avatar_https_pass_through_without_signing(self, app):
api = AccountAvatarApi()
method = unwrap(api.get)
user = MagicMock()
user.id = "acc-owner"
tenant_id = "tenant-1"
external = "https://cdn.example/avatar.png"
with (
app.test_request_context(f"/account/avatar?avatar={external}"),
patch(
"controllers.console.workspace.account.current_account_with_tenant",
return_value=(user, tenant_id),
),
patch(
"controllers.console.workspace.account.file_helpers.get_signed_file_url",
return_value="https://signed/should-not-use",
) as sign_mock,
):
result = method(api)
assert result == {"avatar_url": external}
sign_mock.assert_not_called()
class TestAccountPasswordApi:
def test_password_success(self, app):
api = AccountPasswordApi()

View File

@ -23,6 +23,7 @@ from werkzeug.exceptions import Forbidden, NotFound
from controllers.service_api.dataset.document import (
DeprecatedDocumentAddByTextApi,
DeprecatedDocumentUpdateByFileApi,
DeprecatedDocumentUpdateByTextApi,
DocumentAddByFileApi,
DocumentAddByTextApi,
@ -32,7 +33,6 @@ from controllers.service_api.dataset.document import (
DocumentListQuery,
DocumentTextCreatePayload,
DocumentTextUpdate,
DocumentUpdateByFileApi,
DocumentUpdateByTextApi,
InvalidMetadataError,
)
@ -1095,8 +1095,8 @@ class TestArchivedDocumentImmutableError:
assert error.code == 403
class TestDocumentTextRouteDeprecation:
"""Test that legacy underscore text routes stay marked deprecated."""
class TestDocumentRouteDeprecation:
"""Test that legacy document routes stay marked deprecated."""
def test_create_by_text_legacy_alias_is_deprecated(self):
"""Ensure only the legacy create-by-text alias is marked deprecated."""
@ -1108,10 +1108,15 @@ class TestDocumentTextRouteDeprecation:
assert DeprecatedDocumentUpdateByTextApi.post.__apidoc__["deprecated"] is True
assert DocumentUpdateByTextApi.post.__apidoc__.get("deprecated") is not True
def test_update_by_file_legacy_aliases_are_deprecated(self):
"""Ensure only the legacy file-update aliases are marked deprecated."""
assert DeprecatedDocumentUpdateByFileApi.post.__apidoc__["deprecated"] is True
assert DocumentApi.patch.__apidoc__.get("deprecated") is not True
# =============================================================================
# Endpoint tests for DocumentUpdateByTextApi, DocumentAddByFileApi,
# DocumentUpdateByFileApi.
# and the canonical/deprecated document file update routes.
#
# These controllers use ``@cloud_edition_billing_resource_check`` (does NOT
# preserve ``__wrapped__``) and ``@cloud_edition_billing_rate_limit_check``
@ -1359,13 +1364,52 @@ class TestDocumentAddByFileApiPost:
api.post(tenant_id=mock_tenant.id, dataset_id=mock_dataset.id)
class TestDocumentUpdateByFileApiPost:
"""Test suite for DocumentUpdateByFileApi.post() endpoint.
class TestDocumentUpdateByFileApiPatch:
"""Test suite for the canonical document file update endpoint.
``post`` is wrapped by ``@cloud_edition_billing_resource_check`` and
``patch`` is wrapped by ``@cloud_edition_billing_resource_check`` and
``@cloud_edition_billing_rate_limit_check``.
"""
@pytest.mark.parametrize("route_name", ["update_by_file", "update-by-file"])
@patch("controllers.service_api.dataset.document._update_document_by_file")
@patch("controllers.service_api.wraps.FeatureService")
@patch("controllers.service_api.wraps.validate_and_get_api_token")
def test_update_by_file_deprecated_aliases_delegate_to_shared_handler(
self,
mock_validate_token,
mock_feature_svc,
mock_update_document_by_file,
route_name,
app,
mock_tenant,
mock_dataset,
):
"""Test legacy POST aliases still dispatch while marked deprecated."""
_setup_billing_mocks(mock_validate_token, mock_feature_svc, mock_tenant.id)
mock_update_document_by_file.return_value = ({"document": {"id": "doc-1"}, "batch": "batch-1"}, 200)
doc_id = str(uuid.uuid4())
with app.test_request_context(
f"/datasets/{mock_dataset.id}/documents/{doc_id}/{route_name}",
method="POST",
headers={"Authorization": "Bearer test_token"},
):
api = DeprecatedDocumentUpdateByFileApi()
response, status = api.post(
tenant_id=mock_tenant.id,
dataset_id=mock_dataset.id,
document_id=doc_id,
)
assert status == 200
assert response["batch"] == "batch-1"
mock_update_document_by_file.assert_called_once_with(
tenant_id=mock_tenant.id,
dataset_id=mock_dataset.id,
document_id=doc_id,
)
@patch("controllers.service_api.dataset.document.db")
@patch("controllers.service_api.wraps.FeatureService")
@patch("controllers.service_api.wraps.validate_and_get_api_token")
@ -1387,15 +1431,15 @@ class TestDocumentUpdateByFileApiPost:
doc_id = str(uuid.uuid4())
data = {"file": (BytesIO(b"content"), "test.pdf", "application/pdf")}
with app.test_request_context(
f"/datasets/{mock_dataset.id}/documents/{doc_id}/update_by_file",
method="POST",
f"/datasets/{mock_dataset.id}/documents/{doc_id}",
method="PATCH",
content_type="multipart/form-data",
data=data,
headers={"Authorization": "Bearer test_token"},
):
api = DocumentUpdateByFileApi()
api = DocumentApi()
with pytest.raises(ValueError, match="Dataset does not exist"):
api.post(
api.patch(
tenant_id=mock_tenant.id,
dataset_id=mock_dataset.id,
document_id=doc_id,
@ -1423,15 +1467,15 @@ class TestDocumentUpdateByFileApiPost:
doc_id = str(uuid.uuid4())
data = {"file": (BytesIO(b"content"), "test.pdf", "application/pdf")}
with app.test_request_context(
f"/datasets/{mock_dataset.id}/documents/{doc_id}/update_by_file",
method="POST",
f"/datasets/{mock_dataset.id}/documents/{doc_id}",
method="PATCH",
content_type="multipart/form-data",
data=data,
headers={"Authorization": "Bearer test_token"},
):
api = DocumentUpdateByFileApi()
api = DocumentApi()
with pytest.raises(ValueError, match="External datasets"):
api.post(
api.patch(
tenant_id=mock_tenant.id,
dataset_id=mock_dataset.id,
document_id=doc_id,
@ -1482,14 +1526,14 @@ class TestDocumentUpdateByFileApiPost:
doc_id = str(uuid.uuid4())
data = {"file": (BytesIO(b"file content"), "test.pdf", "application/pdf")}
with app.test_request_context(
f"/datasets/{mock_dataset.id}/documents/{doc_id}/update_by_file",
method="POST",
f"/datasets/{mock_dataset.id}/documents/{doc_id}",
method="PATCH",
content_type="multipart/form-data",
data=data,
headers={"Authorization": "Bearer test_token"},
):
api = DocumentUpdateByFileApi()
response, status = api.post(
api = DocumentApi()
response, status = api.patch(
tenant_id=mock_tenant.id,
dataset_id=mock_dataset.id,
document_id=doc_id,

View File

@ -171,6 +171,113 @@ class TestHitTestingApiPost:
assert passed_retrieval_model["search_method"] == "semantic_search"
assert passed_retrieval_model["top_k"] == 10
@patch("controllers.service_api.dataset.hit_testing.service_api_ns")
@patch("controllers.console.datasets.hit_testing_base.marshal")
@patch("controllers.console.datasets.hit_testing_base.HitTestingService")
@patch("controllers.console.datasets.hit_testing_base.DatasetService")
@patch("controllers.console.datasets.hit_testing_base.current_user", new_callable=lambda: Mock(spec=Account))
def test_post_preserves_retrieval_model_metadata_filtering_conditions(
self,
mock_current_user,
mock_dataset_svc,
mock_hit_svc,
mock_marshal,
mock_ns,
app,
):
"""Service API retrieval payload should not drop metadata filters."""
dataset_id = str(uuid.uuid4())
tenant_id = str(uuid.uuid4())
mock_dataset = Mock()
mock_dataset.id = dataset_id
mock_dataset_svc.get_dataset.return_value = mock_dataset
mock_dataset_svc.check_dataset_permission.return_value = None
mock_hit_svc.retrieve.return_value = {"query": "filtered query", "records": []}
mock_hit_svc.hit_testing_args_check.return_value = None
mock_marshal.return_value = []
metadata_filtering_conditions = {
"logical_operator": "and",
"conditions": [
{
"name": "category",
"comparison_operator": "is",
"value": "finance",
}
],
}
mock_ns.payload = {
"query": "filtered query",
"retrieval_model": {
"search_method": "semantic_search",
"reranking_enable": False,
"score_threshold_enabled": False,
"top_k": 4,
"metadata_filtering_conditions": metadata_filtering_conditions,
},
}
with app.test_request_context():
api = HitTestingApi()
HitTestingApi.post.__wrapped__(api, tenant_id, dataset_id)
passed_retrieval_model = mock_hit_svc.retrieve.call_args.kwargs.get("retrieval_model")
assert passed_retrieval_model is not None
assert passed_retrieval_model["metadata_filtering_conditions"] == metadata_filtering_conditions
@patch("controllers.service_api.dataset.hit_testing.service_api_ns")
@patch("controllers.console.datasets.hit_testing_base.marshal")
@patch("controllers.console.datasets.hit_testing_base.HitTestingService")
@patch("controllers.console.datasets.hit_testing_base.DatasetService")
@patch("controllers.console.datasets.hit_testing_base.current_user", new_callable=lambda: Mock(spec=Account))
def test_post_normalizes_legacy_query_and_nullable_list_fields(
self,
mock_current_user,
mock_dataset_svc,
mock_hit_svc,
mock_marshal,
mock_ns,
app,
):
"""Test service API normalizes legacy query shape and nullable list fields."""
dataset_id = str(uuid.uuid4())
tenant_id = str(uuid.uuid4())
mock_dataset = Mock()
mock_dataset.id = dataset_id
mock_dataset_svc.get_dataset.return_value = mock_dataset
mock_dataset_svc.check_dataset_permission.return_value = None
mock_hit_svc.retrieve.return_value = {"query": {"content": "legacy query"}, "records": ["placeholder"]}
mock_hit_svc.hit_testing_args_check.return_value = None
mock_marshal.return_value = [
{
"segment": {"id": "segment-1", "keywords": None},
"child_chunks": None,
"files": None,
"score": 0.9,
}
]
mock_ns.payload = {"query": "legacy query"}
with app.test_request_context():
api = HitTestingApi()
response = HitTestingApi.post.__wrapped__(api, tenant_id, dataset_id)
assert response["query"] == "legacy query"
assert response["records"] == [
{
"segment": {"id": "segment-1", "keywords": []},
"child_chunks": [],
"files": [],
"score": 0.9,
}
]
@patch("controllers.service_api.dataset.hit_testing.service_api_ns")
@patch("controllers.console.datasets.hit_testing_base.DatasetService")
@patch("controllers.console.datasets.hit_testing_base.current_user", new_callable=lambda: Mock(spec=Account))

View File

@ -323,6 +323,50 @@ class TestDeserializeResponse:
with pytest.raises(ValueError, match="Invalid status line"):
deserialize_response(raw_data)
def test_deserialize_response_preserves_duplicate_set_cookie_headers(self):
# Regression test for https://github.com/langgenius/dify/issues/35722
# Multiple Set-Cookie headers must be preserved per RFC 9110, not collapsed
# into a single value by dict-style assignment.
raw_data = (
b"HTTP/1.1 200 OK\r\n"
b"Content-Type: text/plain\r\n"
b"Set-Cookie: session=abc; Path=/; HttpOnly\r\n"
b"Set-Cookie: tracking=xyz; Path=/; Secure\r\n"
b"\r\n"
b"ok"
)
response = deserialize_response(raw_data)
cookies = response.headers.getlist("Set-Cookie")
assert cookies == [
"session=abc; Path=/; HttpOnly",
"tracking=xyz; Path=/; Secure",
]
# Single-valued headers should still be readable normally.
assert response.headers.get("Content-Type") == "text/plain"
def test_deserialize_response_preserves_duplicate_generic_headers(self):
# Any header name (not just Set-Cookie) may legitimately repeat; verify the
# parser preserves all values rather than overwriting earlier ones.
raw_data = b"HTTP/1.1 200 OK\r\nX-Custom: first\r\nX-Custom: second\r\n\r\n"
response = deserialize_response(raw_data)
assert response.headers.getlist("X-Custom") == ["first", "second"]
def test_deserialize_response_does_not_inject_default_content_type(self):
# Flask's Response constructor adds a default Content-Type header. When the
# raw response has no Content-Type, the parsed response should not silently
# gain one from the framework default.
raw_data = b"HTTP/1.1 204 No Content\r\nX-Trace-Id: abc\r\n\r\n"
response = deserialize_response(raw_data)
header_names = [name for name, _ in response.headers.items()]
assert "Content-Type" not in header_names
assert response.headers.get("X-Trace-Id") == "abc"
def test_roundtrip_response(self):
# Test that serialize -> deserialize produces equivalent response
original_response = Response(

View File

@ -316,6 +316,33 @@ def test_create_batches_texts_and_skips_empty_input(vector_factory_module):
vector._vector_processor.create.assert_not_called()
def test_create_skips_empty_text_documents_before_embedding(vector_factory_module):
vector = vector_factory_module.Vector.__new__(vector_factory_module.Vector)
vector._embeddings = MagicMock()
vector._embeddings.embed_documents.return_value = [[0.1], [0.2]]
vector._vector_processor = MagicMock()
docs = [
Document(page_content="foo", metadata={"doc_id": "id-1"}),
Document(page_content="", metadata={"doc_id": "id-empty"}),
Document(page_content=" \n", metadata={"doc_id": "id-blank"}),
Document(page_content="bar", metadata={"doc_id": "id-2"}),
]
vector.create(texts=docs, request_id="r-1")
vector._embeddings.embed_documents.assert_called_once_with(["foo", "bar"])
vector._vector_processor.create.assert_called_once_with(
texts=[docs[0], docs[3]], embeddings=[[0.1], [0.2]], request_id="r-1"
)
vector._embeddings.embed_documents.reset_mock()
vector._vector_processor.create.reset_mock()
vector.create(texts=[docs[1], docs[2]])
vector._embeddings.embed_documents.assert_not_called()
vector._vector_processor.create.assert_not_called()
def test_create_multimodal_filters_missing_uploads(vector_factory_module, monkeypatch):
class _Field:
def in_(self, value):
@ -396,6 +423,48 @@ def test_add_texts_with_optional_duplicate_check(vector_factory_module):
vector._vector_processor.create.assert_called_once()
def test_add_texts_skips_empty_text_documents(vector_factory_module):
vector = vector_factory_module.Vector.__new__(vector_factory_module.Vector)
vector._embeddings = MagicMock()
vector._embeddings.embed_documents.return_value = [[0.1]]
vector._vector_processor = MagicMock()
docs = [
Document(page_content="keep", metadata={"doc_id": "id-1"}),
Document(page_content="", metadata={"doc_id": "id-empty"}),
]
vector.add_texts(docs, source="api")
vector._embeddings.embed_documents.assert_called_once_with(["keep"])
vector._vector_processor.create.assert_called_once_with(texts=[docs[0]], embeddings=[[0.1]], source="api")
vector._embeddings.embed_documents.reset_mock()
vector._vector_processor.create.reset_mock()
vector.add_texts([docs[1]])
vector._embeddings.embed_documents.assert_not_called()
vector._vector_processor.create.assert_not_called()
def test_add_texts_filters_empty_documents_before_duplicate_check(vector_factory_module):
vector = vector_factory_module.Vector.__new__(vector_factory_module.Vector)
vector._embeddings = MagicMock()
vector._embeddings.embed_documents.return_value = [[0.1]]
vector._vector_processor = MagicMock()
vector._filter_duplicate_texts = MagicMock(return_value=[])
docs = [
Document(page_content="keep", metadata={"doc_id": "id-1"}),
Document(page_content=" ", metadata={"doc_id": "id-empty"}),
]
vector.add_texts(docs, duplicate_check=True)
vector._filter_duplicate_texts.assert_called_once_with([docs[0]])
vector._embeddings.embed_documents.assert_not_called()
vector._vector_processor.create.assert_not_called()
def test_vector_delegation_methods(vector_factory_module):
vector = vector_factory_module.Vector.__new__(vector_factory_module.Vector)
vector._embeddings = MagicMock()

View File

@ -1,14 +1,12 @@
"""Primarily used for testing merged cell scenarios"""
import gc
import io
import os
import tempfile
import warnings
from collections import UserDict
from pathlib import Path
from types import SimpleNamespace
from unittest.mock import AsyncMock, MagicMock
from unittest.mock import MagicMock
import pytest
from docx import Document
@ -377,23 +375,21 @@ def test_close_is_idempotent():
extractor.temp_file.close.assert_called_once()
def test_close_handles_async_close_mock():
async def _async_close() -> None:
return None
def test_close_closes_awaitable_close_result():
extractor = object.__new__(WordExtractor)
extractor._closed = False
extractor.temp_file = MagicMock()
extractor.temp_file.close = AsyncMock()
close_result = _async_close()
extractor.temp_file.close = MagicMock(return_value=close_result)
with warnings.catch_warnings(record=True) as caught:
warnings.simplefilter("always")
extractor.close()
gc.collect()
extractor.close()
assert close_result.cr_frame is None
extractor.temp_file.close.assert_called_once()
assert not [
warning
for warning in caught
if issubclass(warning.category, RuntimeWarning) and "AsyncMockMixin._execute_mock_call" in str(warning.message)
]
def test_extract_images_handles_invalid_external_cases(monkeypatch):

View File

@ -1106,11 +1106,11 @@ class TestRetrievalService:
def test_deduplicate_documents_non_dify_provider(self):
"""
Test deduplication with non-dify provider documents.
Test deduplication with non-dify provider documents that have no doc_id.
Verifies:
- External provider documents use content-based deduplication
- Different providers are handled correctly
- External provider documents without doc_id use content-based deduplication
- Identical content from the same provider is collapsed to one result
"""
# Arrange
doc1 = Document(
@ -1131,7 +1131,96 @@ class TestRetrievalService:
# Assert
# External documents without doc_id should use content-based dedup
assert len(result) >= 1
assert len(result) == 1
def test_deduplicate_documents_non_dify_provider_with_doc_id_different_sources(self):
"""
Regression test for issue #35707.
Two chunks from different source documents share identical text content but carry
different doc_ids. Before the fix, non-dify providers were forced into content-based
deduplication and the second chunk was silently dropped. After the fix, doc_id is used
as the dedup key for any provider that exposes it, so both chunks must be retained.
Verifies:
- Non-dify provider documents with different doc_ids are NOT deduplicated even when
their page_content is identical.
"""
# Arrange — same content, different doc_ids, non-dify provider (e.g. Weaviate / Qdrant)
doc_a = Document(
page_content="Shared identical content",
metadata={"doc_id": "doc-from-file-a", "score": 0.85},
provider="weaviate",
)
doc_b = Document(
page_content="Shared identical content",
metadata={"doc_id": "doc-from-file-b", "score": 0.82},
provider="weaviate",
)
# Act
result = RetrievalService._deduplicate_documents([doc_a, doc_b])
# Assert — both documents must be kept; losing either silently drops a source citation
assert len(result) == 2
doc_ids = {doc.metadata["doc_id"] for doc in result}
assert doc_ids == {"doc-from-file-a", "doc-from-file-b"}
def test_deduplicate_documents_non_dify_provider_with_same_doc_id(self):
"""
Test that non-dify provider documents sharing the same doc_id are deduplicated by
doc_id key (not by content), and the higher-scored duplicate is retained.
Verifies:
- doc_id-based deduplication now applies to any provider, not only "dify"
- The document with the highest score wins when doc_ids collide
"""
# Arrange
doc_low = Document(
page_content="Content A",
metadata={"doc_id": "chunk-1", "score": 0.5},
provider="qdrant",
)
doc_high = Document(
page_content="Content A",
metadata={"doc_id": "chunk-1", "score": 0.9},
provider="qdrant",
)
# Act
result = RetrievalService._deduplicate_documents([doc_low, doc_high])
# Assert
assert len(result) == 1
assert result[0].metadata["score"] == 0.9
def test_deduplicate_documents_dify_provider_without_doc_id_falls_back_to_content(self):
"""
Test that a dify provider document without doc_id still falls back to content-based
deduplication (no regression from original behaviour).
Verifies:
- Absence of doc_id triggers content-based dedup regardless of provider
- First occurrence is kept when content is identical
"""
# Arrange — dify docs with no doc_id, same content
doc1 = Document(
page_content="Same content",
metadata={"score": 0.8},
provider="dify",
)
doc2 = Document(
page_content="Same content",
metadata={"score": 0.9},
provider="dify",
)
# Act
result = RetrievalService._deduplicate_documents([doc1, doc2])
# Assert — collapsed to one; first-seen wins (no score comparison in content branch)
assert len(result) == 1
assert result[0].metadata["score"] == 0.8
# ==================== Metadata Filtering Tests ====================

View File

@ -570,8 +570,7 @@ def test_get_all_providers_normalizes_provider_names_with_model_provider_id() ->
session.scalars.return_value = [openai_provider, gemini_provider]
with (
patch("core.provider_manager.db", SimpleNamespace(engine=object())),
patch("core.provider_manager.Session", return_value=_build_session_context(session)),
patch("core.provider_manager.session_factory.create_session", return_value=_build_session_context(session)),
):
result = ProviderManager._get_all_providers("tenant-id")
@ -595,8 +594,7 @@ def test_provider_grouping_helpers_group_records_by_provider_name(method_name: s
session.scalars.return_value = [openai_primary, openai_secondary, anthropic_record]
with (
patch("core.provider_manager.db", SimpleNamespace(engine=object())),
patch("core.provider_manager.Session", return_value=_build_session_context(session)),
patch("core.provider_manager.session_factory.create_session", return_value=_build_session_context(session)),
):
result = getattr(ProviderManager, method_name)("tenant-id")
@ -611,8 +609,7 @@ def test_get_all_preferred_model_providers_returns_mapping_by_provider_name() ->
session.scalars.return_value = [openai_preference, anthropic_preference]
with (
patch("core.provider_manager.db", SimpleNamespace(engine=object())),
patch("core.provider_manager.Session", return_value=_build_session_context(session)),
patch("core.provider_manager.session_factory.create_session", return_value=_build_session_context(session)),
):
result = ProviderManager._get_all_preferred_model_providers("tenant-id")
@ -626,13 +623,13 @@ def test_get_all_provider_load_balancing_configs_returns_empty_when_cached_flag_
with (
patch("core.provider_manager.redis_client.get", return_value=b"False"),
patch("core.provider_manager.FeatureService.get_features") as mock_get_features,
patch("core.provider_manager.Session") as mock_session_cls,
patch("core.provider_manager.session_factory.create_session") as mock_create_session,
):
result = ProviderManager._get_all_provider_load_balancing_configs("tenant-id")
assert result == {}
mock_get_features.assert_not_called()
mock_session_cls.assert_not_called()
mock_create_session.assert_not_called()
def test_get_all_provider_load_balancing_configs_populates_cache_and_groups_configs() -> None:
@ -642,14 +639,13 @@ def test_get_all_provider_load_balancing_configs_populates_cache_and_groups_conf
session.scalars.return_value = [openai_config, anthropic_config]
with (
patch("core.provider_manager.db", SimpleNamespace(engine=object())),
patch("core.provider_manager.redis_client.get", return_value=None),
patch("core.provider_manager.redis_client.setex") as mock_setex,
patch(
"core.provider_manager.FeatureService.get_features",
return_value=SimpleNamespace(model_load_balancing_enabled=True),
),
patch("core.provider_manager.Session", return_value=_build_session_context(session)),
patch("core.provider_manager.session_factory.create_session", return_value=_build_session_context(session)),
):
result = ProviderManager._get_all_provider_load_balancing_configs("tenant-id")

View File

@ -925,3 +925,78 @@ def test_convert_tool_parameters_type_constant_branch():
)
assert constant == {"text": "fixed"}
def test_convert_tool_parameters_type_model_selector_from_legacy_top_level_config():
model_param = ToolParameter.get_simple_instance(
name="vision_llm_model",
llm_description="vision model",
typ=ToolParameter.ToolParameterType.MODEL_SELECTOR,
required=True,
)
model_param.form = ToolParameter.ToolParameterForm.FORM
variable_pool = Mock()
runtime_parameters = ToolManager._convert_tool_parameters_type(
parameters=[model_param],
variable_pool=variable_pool,
tool_configurations={
"vision_llm_model": {
"type": "constant",
"value": "",
"provider": "langgenius/tongyi/tongyi",
"model": "qwen3-vl-plus",
"model_type": "llm",
"mode": "chat",
}
},
typ="workflow",
)
assert runtime_parameters == {
"vision_llm_model": {
"provider": "langgenius/tongyi/tongyi",
"model": "qwen3-vl-plus",
"model_type": "llm",
"mode": "chat",
}
}
def test_convert_tool_parameters_type_model_selector_from_constant_value_config():
model_param = ToolParameter.get_simple_instance(
name="tts_model",
llm_description="tts model",
typ=ToolParameter.ToolParameterType.MODEL_SELECTOR,
required=True,
)
model_param.form = ToolParameter.ToolParameterForm.FORM
variable_pool = Mock()
runtime_parameters = ToolManager._convert_tool_parameters_type(
parameters=[model_param],
variable_pool=variable_pool,
tool_configurations={
"tts_model": {
"type": "constant",
"value": {
"provider": "langgenius/tongyi/tongyi",
"model": "qwen3-tts-flash",
"model_type": "tts",
"language": "Chinese",
"voice": "Cherry",
},
}
},
typ="workflow",
)
assert runtime_parameters == {
"tts_model": {
"provider": "langgenius/tongyi/tongyi",
"model": "qwen3-tts-flash",
"model_type": "tts",
"language": "Chinese",
"voice": "Cherry",
}
}

View File

@ -5,6 +5,7 @@ from graphon.graph_events import (
NodeRunStreamChunkEvent,
)
from .test_mock_config import MockConfigBuilder
from .test_table_runner import TableTestRunner
@ -44,3 +45,51 @@ def test_tool_in_chatflow():
assert stream_chunk_events[0].chunk == "hello, dify!", (
f"Expected chunk to be 'hello, dify!', but got {stream_chunk_events[0].chunk}"
)
def test_answer_can_render_llm_structured_output_in_chatflow():
runner = TableTestRunner()
fixture_data = runner.workflow_runner.load_fixture("basic_chatflow")
nodes = fixture_data["workflow"]["graph"]["nodes"]
answer_node = next(node for node in nodes if node["id"] == "answer")
answer_node["data"]["answer"] = "{{#llm.structured_output#}}"
mock_config = (
MockConfigBuilder()
.with_node_output(
"llm",
{
"text": "plain text",
"structured_output": {"type": "greeting"},
"usage": {
"prompt_tokens": 10,
"completion_tokens": 5,
"total_tokens": 15,
},
"finish_reason": "stop",
},
)
.build()
)
graph, graph_runtime_state = runner.workflow_runner.create_graph_from_fixture(
fixture_data=fixture_data,
query="hello",
use_mock_factory=True,
mock_config=mock_config,
)
engine = GraphEngine(
workflow_id="test_workflow",
graph=graph,
graph_runtime_state=graph_runtime_state,
command_channel=InMemoryChannel(),
config=GraphEngineConfig(),
)
events = list(engine.run())
success_events = [e for e in events if isinstance(e, GraphRunSucceededEvent)]
assert success_events, "Workflow should complete successfully"
assert success_events[-1].outputs["answer"] == '{\n "type": "greeting"\n}'

View File

@ -86,3 +86,80 @@ def test_execute_answer():
assert result.status == WorkflowNodeExecutionStatus.SUCCEEDED
assert result.outputs["answer"] == "Today's weather is sunny\nYou are a helpful AI.\n{{img}}\nFin."
def test_execute_answer_renders_structured_output_object_as_json() -> None:
init_params = build_test_graph_init_params(
workflow_id="1",
graph_config={"nodes": [], "edges": []},
tenant_id="1",
app_id="1",
user_id="1",
user_from=UserFrom.ACCOUNT,
invoke_from=InvokeFrom.DEBUGGER,
call_depth=0,
)
variable_pool = VariablePool(
system_variables=build_system_variables(user_id="aaa", files=[]),
user_inputs={},
environment_variables=[],
conversation_variables=[],
)
variable_pool.add(["1777539038857", "structured_output"], {"type": "greeting"})
graph_runtime_state = GraphRuntimeState(variable_pool=variable_pool, start_at=time.perf_counter())
node = AnswerNode(
node_id=str(uuid.uuid4()),
graph_init_params=init_params,
graph_runtime_state=graph_runtime_state,
config=AnswerNodeData(
title="123",
type="answer",
answer="{{#1777539038857.structured_output#}}",
),
)
result = node._run()
assert result.status == WorkflowNodeExecutionStatus.SUCCEEDED
assert result.outputs["answer"] == '{\n "type": "greeting"\n}'
def test_execute_answer_falls_back_to_plain_selector_text_when_structured_output_missing() -> None:
init_params = build_test_graph_init_params(
workflow_id="1",
graph_config={"nodes": [], "edges": []},
tenant_id="1",
app_id="1",
user_id="1",
user_from=UserFrom.ACCOUNT,
invoke_from=InvokeFrom.DEBUGGER,
call_depth=0,
)
variable_pool = VariablePool(
system_variables=build_system_variables(user_id="aaa", files=[]),
user_inputs={},
environment_variables=[],
conversation_variables=[],
)
graph_runtime_state = GraphRuntimeState(variable_pool=variable_pool, start_at=time.perf_counter())
node = AnswerNode(
node_id=str(uuid.uuid4()),
graph_init_params=init_params,
graph_runtime_state=graph_runtime_state,
config=AnswerNodeData(
title="123",
type="answer",
answer="{{#1777539038857.structured_output#}}",
),
)
result = node._run()
assert result.status == WorkflowNodeExecutionStatus.SUCCEEDED
assert result.outputs["answer"] == "1777539038857.structured_output"

View File

@ -166,6 +166,71 @@ def test_adapt_node_data_for_graph_migrates_legacy_tool_configurations() -> None
}
def test_adapt_node_data_for_graph_preserves_model_selector_top_level_configurations() -> None:
normalized = adapt_node_data_for_graph(
{
"type": BuiltinNodeTypes.TOOL,
"tool_configurations": {
"vision_llm_model": {
"type": "constant",
"value": "",
"provider": "langgenius/tongyi/tongyi",
"model": "qwen3-vl-plus",
"model_type": "llm",
"mode": "chat",
},
},
}
)
assert normalized["tool_configurations"] == {}
assert normalized["tool_parameters"] == {
"vision_llm_model": {
"type": "constant",
"value": {
"provider": "langgenius/tongyi/tongyi",
"model": "qwen3-vl-plus",
"model_type": "llm",
"mode": "chat",
},
}
}
def test_adapt_node_data_for_graph_flattens_constant_model_selector_value() -> None:
normalized = adapt_node_data_for_graph(
{
"type": BuiltinNodeTypes.TOOL,
"tool_configurations": {
"tts_model": {
"type": "constant",
"value": {
"provider": "langgenius/tongyi/tongyi",
"model": "qwen3-tts-flash",
"model_type": "tts",
"language": "Chinese",
"voice": "Cherry",
},
},
},
}
)
assert normalized["tool_configurations"] == {}
assert normalized["tool_parameters"] == {
"tts_model": {
"type": "constant",
"value": {
"provider": "langgenius/tongyi/tongyi",
"model": "qwen3-tts-flash",
"model_type": "tts",
"language": "Chinese",
"voice": "Cherry",
},
}
}
def test_adapt_node_config_for_graph_rewrites_nested_node_data() -> None:
normalized = adapt_node_config_for_graph(
{

View File

@ -10,14 +10,20 @@ from core.workflow.nodes.knowledge_index import KNOWLEDGE_INDEX_NODE_TYPE
from graphon.entities.base_node_data import BaseNodeData
from graphon.enums import BuiltinNodeTypes, NodeType
from graphon.nodes.code.entities import CodeLanguage
from graphon.nodes.llm.entities import LLMNodeData
from graphon.variables.segments import StringSegment
def _assert_typed_node_config(config, *, node_id: str, node_type: NodeType, version: str = "1") -> None:
_ = node_id
assert isinstance(config, BaseNodeData)
assert config.type == node_type
assert config.version == version
if isinstance(config, BaseNodeData):
assert config.type == node_type
assert config.version == version
return
assert isinstance(config, dict)
assert config["type"] == node_type
assert config["version"] == version
def _node_constructor(*, return_value):
@ -546,6 +552,84 @@ class TestDifyNodeFactoryCreateNode:
assert kwargs["unstructured_api_config"] is sentinel.unstructured_api_config
assert kwargs["http_client"] is sentinel.http_client
def test_build_llm_compatible_node_init_kwargs_preserves_structured_output_switch(self, factory):
node_data = LLMNodeData.model_validate(
{
"type": BuiltinNodeTypes.LLM,
"title": "LLM",
"model": {"provider": "provider", "name": "model", "mode": "chat", "completion_params": {}},
"prompt_template": [{"role": "system", "text": "x"}],
"context": {"enabled": False, "variable_selector": []},
"vision": {"enabled": False},
"structured_output_enabled": True,
"structured_output": {
"schema": {
"type": "object",
"properties": {"type": {"type": "string"}},
"required": ["type"],
}
},
}
)
wrapped_model_instance = sentinel.wrapped_model_instance
memory = sentinel.memory
factory._build_model_instance_for_llm_node = MagicMock(return_value=sentinel.model_instance)
factory._build_memory_for_llm_node = MagicMock(return_value=memory)
with patch.object(node_factory, "DifyPreparedLLM", return_value=wrapped_model_instance) as prepared_llm:
kwargs = factory._build_llm_compatible_node_init_kwargs(
node_class=sentinel.node_class,
node_data=node_data,
wrap_model_instance=True,
include_http_client=True,
include_llm_file_saver=True,
include_prompt_message_serializer=True,
include_retriever_attachment_loader=True,
include_jinja2_template_renderer=True,
)
assert node_data.structured_output_switch_on is True
assert node_data.structured_output_enabled is True
factory._build_model_instance_for_llm_node.assert_called_once_with(node_data)
factory._build_memory_for_llm_node.assert_called_once_with(
node_data=node_data,
model_instance=sentinel.model_instance,
)
prepared_llm.assert_called_once_with(sentinel.model_instance)
assert kwargs["model_instance"] is wrapped_model_instance
def test_create_node_passes_alias_preserving_llm_config_to_constructor(self, monkeypatch, factory):
created_node = object()
constructor = _node_constructor(return_value=created_node)
monkeypatch.setattr(factory, "_resolve_node_class", MagicMock(return_value=constructor))
monkeypatch.setattr(factory, "_build_llm_compatible_node_init_kwargs", MagicMock(return_value={}))
node_config = {
"id": "llm-node-id",
"data": {
"type": BuiltinNodeTypes.LLM,
"title": "LLM",
"model": {"provider": "provider", "name": "model", "mode": "chat", "completion_params": {}},
"prompt_template": [{"role": "system", "text": "x"}],
"context": {"enabled": False, "variable_selector": []},
"vision": {"enabled": False},
"structured_output_enabled": True,
"structured_output": {
"schema": {
"type": "object",
"properties": {"type": {"type": "string"}},
"required": ["type"],
}
},
},
}
factory.create_node(node_config)
config = constructor.call_args.kwargs["config"]
assert isinstance(config, dict)
assert config["structured_output_enabled"] is True
assert "structured_output_switch_on" not in config
@pytest.mark.parametrize(
("node_type", "constructor_name", "expected_extra_kwargs"),
[

View File

@ -22,6 +22,7 @@ from core.workflow.node_runtime import (
DifyPromptMessageSerializer,
DifyRetrieverAttachmentLoader,
DifyToolFileManager,
DifyToolNodeRuntime,
apply_dify_debug_email_recipient,
build_dify_llm_file_saver,
resolve_dify_run_context,
@ -30,6 +31,7 @@ from graphon.file import FileTransferMethod, FileType
from graphon.model_runtime.entities.common_entities import I18nObject
from graphon.model_runtime.entities.model_entities import AIModelEntity, FetchFrom, ModelType
from graphon.nodes.human_input.entities import HumanInputNodeData
from graphon.nodes.tool.entities import ToolNodeData, ToolProviderType
from tests.workflow_test_utils import build_test_run_context
@ -334,6 +336,41 @@ def test_dify_human_input_runtime_builds_debug_repository(monkeypatch: pytest.Mo
)
def test_dify_tool_runtime_spec_prefers_tool_parameters_for_runtime_form_values() -> None:
node_data = ToolNodeData(
provider_id="video-mixcut-agent",
provider_type=ToolProviderType.PLUGIN,
provider_name="sawyer-shi/video-mixcut-agent",
tool_name="mixcut",
tool_label="MixCut",
tool_configurations={"count": 2},
tool_parameters={
"vision_llm_model": {
"type": "constant",
"value": {
"provider": "langgenius/tongyi/tongyi",
"model": "qwen3-vl-plus",
"model_type": "llm",
},
}
},
)
spec = DifyToolNodeRuntime._build_tool_runtime_spec(node_data)
assert spec.tool_configurations == {
"count": 2,
"vision_llm_model": {
"type": "constant",
"value": {
"provider": "langgenius/tongyi/tongyi",
"model": "qwen3-vl-plus",
"model_type": "llm",
},
},
}
def test_dify_human_input_runtime_create_form_filters_debugger_delivery_methods() -> None:
repository = MagicMock()
repository.create_form.return_value = sentinel.form

View File

@ -1,8 +1,11 @@
import re
from unittest.mock import MagicMock
import pytest
from factories.file_factory import builders
from factories.file_factory.remote import extract_filename, get_remote_file_info
from graphon.file import FileTransferMethod
class _FakeResponse:
@ -230,3 +233,153 @@ class TestExtractFilename:
"http://example.com/", 'attachment; filename="file%20with%20quotes%20%26%20encoding.txt"'
)
assert result == "file with quotes & encoding.txt"
def test_url_with_query_string(self):
"""Test that query strings are stripped from URL basename."""
result = extract_filename("http://example.com/path/file.txt?signature=abc123&expires=12345", None)
assert result == "file.txt"
def test_url_with_hash_fragment(self):
"""Test that hash fragments are stripped from URL basename."""
result = extract_filename("http://example.com/path/file.txt#section", None)
assert result == "file.txt"
def test_url_with_query_and_fragment(self):
"""Test that both query strings and hash fragments are stripped."""
result = extract_filename("http://example.com/path/file.txt?token=xyz#section", None)
assert result == "file.txt"
def test_signed_url_preserves_filename(self):
"""Test that signed URL parameters don't affect filename extraction."""
result = extract_filename(
"http://storage.example.com/bucket/documents/report.pdf?AWSAccessKeyId=xxx&Signature=yyy&Expires=12345",
None,
)
assert result == "report.pdf"
def test_percent_encoded_filename_with_query_string(self):
"""Test percent-encoded filename with query string is decoded correctly."""
result = extract_filename("http://example.com/path/my%20file.txt?download=true", None)
assert result == "my file.txt"
def test_percent_encoded_filename_with_fragment(self):
"""Test percent-encoded filename with fragment is decoded correctly."""
result = extract_filename("http://example.com/path/my%20file.txt#page=1", None)
assert result == "my file.txt"
def test_complex_percent_encoding_with_query(self):
"""Test complex percent-encoded filename with query parameters."""
result = extract_filename("http://example.com/docs/%E4%B8%AD%E6%96%87%E6%96%87%E4%BB%B6.pdf?v=1", None)
assert result == "中文文件.pdf"
def test_url_with_special_chars_in_query(self):
"""Test that special characters in query string don't affect filename."""
result = extract_filename("http://example.com/file.bin?name=test&path=/some/path", None)
assert result == "file.bin"
def test_malformed_percent_encoding_safe_fallback(self):
"""Test that malformed percent-encoding is handled safely."""
result = extract_filename("http://example.com/path/file%20name%GG.txt?x=1", None)
# %GG is invalid, should be replaced with replacement character
assert "file" in result
assert ".txt" in result
def test_empty_path_with_query_returns_none(self):
"""Test that empty path with query string returns None."""
result = extract_filename("http://example.com/?query=value", None)
assert result is None
def test_path_only_with_query_string(self):
"""Test bare path (not full URL) with query string."""
result = extract_filename("/path/to/file.txt?extra=params", None)
assert result == "file.txt"
class TestBuildFromDatasourceFile:
"""Tests for _build_from_datasource_file extension handling."""
@staticmethod
def _patch_session(monkeypatch: pytest.MonkeyPatch, datasource_file):
"""Stub session_factory.create_session() so it returns the given UploadFile-shaped record."""
session = MagicMock()
session.scalar.return_value = datasource_file
ctx = MagicMock()
ctx.__enter__ = MagicMock(return_value=session)
ctx.__exit__ = MagicMock(return_value=False)
monkeypatch.setattr(builders.session_factory, "create_session", lambda: ctx)
def _make_datasource_file(self, *, key: str, mime_type: str = "text/csv"):
f = MagicMock()
f.id = "file-id"
f.key = key
f.name = key.split("/")[-1]
f.mime_type = mime_type
f.size = 123
f.source_url = f"https://example.com/{key}"
return f
def test_extension_passed_without_doubled_dot(self, monkeypatch: pytest.MonkeyPatch):
"""Regression: standardize_file_type must receive the extension exactly once-prefixed.
Previously the call was ``standardize_file_type(extension="." + extension, ...)`` while
``extension`` already had a leading dot, producing ``"..csv"``. The mitigating
``lstrip(".")`` inside ``standardize_file_type`` masked the bug from end users, but the
argument shape itself was wrong and showed up in any caller that didn't strip dots.
"""
captured: dict = {}
def fake_standardize(*, extension: str = "", mime_type: str = ""):
from graphon.file import FileType
captured["extension"] = extension
captured["mime_type"] = mime_type
return FileType.DOCUMENT
monkeypatch.setattr(builders, "standardize_file_type", fake_standardize)
datasource_file = self._make_datasource_file(key="folder/data.csv", mime_type="text/csv")
self._patch_session(monkeypatch, datasource_file)
access_controller = MagicMock()
access_controller.apply_upload_file_filters = lambda stmt: stmt
file = builders._build_from_datasource_file(
mapping={"datasource_file_id": "file-id", "transfer_method": "datasource_file"},
tenant_id="tenant-id",
transfer_method=FileTransferMethod.DATASOURCE_FILE,
access_controller=access_controller,
)
assert captured["extension"] == ".csv", (
f"standardize_file_type received {captured['extension']!r}; expected single-dot '.csv'"
)
assert captured["mime_type"] == "text/csv"
assert file.extension == ".csv"
def test_extension_falls_back_to_bin_when_key_has_no_dot(self, monkeypatch: pytest.MonkeyPatch):
captured: dict = {}
def fake_standardize(*, extension: str = "", mime_type: str = ""):
from graphon.file import FileType
captured["extension"] = extension
return FileType.CUSTOM
monkeypatch.setattr(builders, "standardize_file_type", fake_standardize)
datasource_file = self._make_datasource_file(key="dotless-key", mime_type="application/octet-stream")
self._patch_session(monkeypatch, datasource_file)
access_controller = MagicMock()
access_controller.apply_upload_file_filters = lambda stmt: stmt
file = builders._build_from_datasource_file(
mapping={"datasource_file_id": "file-id", "transfer_method": "datasource_file"},
tenant_id="tenant-id",
transfer_method=FileTransferMethod.DATASOURCE_FILE,
access_controller=access_controller,
)
assert captured["extension"] == ".bin"
assert file.extension == ".bin"

View File

@ -45,7 +45,7 @@ class TestWorkflowModelValidation:
workflow = Workflow.new(
tenant_id=tenant_id,
app_id=app_id,
type=WorkflowType.WORKFLOW.value,
type=WorkflowType.WORKFLOW,
version="draft",
graph=graph,
features=features,
@ -58,7 +58,7 @@ class TestWorkflowModelValidation:
# Assert
assert workflow.tenant_id == tenant_id
assert workflow.app_id == app_id
assert workflow.type == WorkflowType.WORKFLOW.value
assert workflow.type == WorkflowType.WORKFLOW
assert workflow.version == "draft"
assert workflow.graph == graph
assert workflow.created_by == created_by
@ -68,7 +68,7 @@ class TestWorkflowModelValidation:
def test_workflow_type_enum_values(self):
"""Test WorkflowType enum values."""
# Assert
assert WorkflowType.WORKFLOW.value == "workflow"
assert WorkflowType.WORKFLOW == "workflow"
assert WorkflowType.CHAT.value == "chat"
assert WorkflowType.RAG_PIPELINE.value == "rag-pipeline"
@ -89,7 +89,7 @@ class TestWorkflowModelValidation:
workflow = Workflow.new(
tenant_id=str(uuid4()),
app_id=str(uuid4()),
type=WorkflowType.WORKFLOW.value,
type=WorkflowType.WORKFLOW,
version="draft",
graph=json.dumps(graph_data),
features="{}",
@ -114,7 +114,7 @@ class TestWorkflowModelValidation:
workflow = Workflow.new(
tenant_id=str(uuid4()),
app_id=str(uuid4()),
type=WorkflowType.WORKFLOW.value,
type=WorkflowType.WORKFLOW,
version="draft",
graph="{}",
features=json.dumps(features_data),
@ -138,7 +138,7 @@ class TestWorkflowModelValidation:
workflow = Workflow.new(
tenant_id=str(uuid4()),
app_id=str(uuid4()),
type=WorkflowType.WORKFLOW.value,
type=WorkflowType.WORKFLOW,
version="v1.0",
graph="{}",
features="{}",
@ -176,11 +176,11 @@ class TestWorkflowRunStateTransitions:
tenant_id=tenant_id,
app_id=app_id,
workflow_id=workflow_id,
type=WorkflowType.WORKFLOW.value,
triggered_from=WorkflowRunTriggeredFrom.DEBUGGING.value,
type=WorkflowType.WORKFLOW,
triggered_from=WorkflowRunTriggeredFrom.DEBUGGING,
version="draft",
status=WorkflowExecutionStatus.RUNNING.value,
created_by_role=CreatorUserRole.ACCOUNT.value,
status=WorkflowExecutionStatus.RUNNING,
created_by_role=CreatorUserRole.ACCOUNT,
created_by=created_by,
)
@ -188,9 +188,9 @@ class TestWorkflowRunStateTransitions:
assert workflow_run.tenant_id == tenant_id
assert workflow_run.app_id == app_id
assert workflow_run.workflow_id == workflow_id
assert workflow_run.type == WorkflowType.WORKFLOW.value
assert workflow_run.triggered_from == WorkflowRunTriggeredFrom.DEBUGGING.value
assert workflow_run.status == WorkflowExecutionStatus.RUNNING.value
assert workflow_run.type == WorkflowType.WORKFLOW
assert workflow_run.triggered_from == WorkflowRunTriggeredFrom.DEBUGGING
assert workflow_run.status == WorkflowExecutionStatus.RUNNING
assert workflow_run.created_by == created_by
def test_workflow_run_state_transition_running_to_succeeded(self):
@ -200,21 +200,21 @@ class TestWorkflowRunStateTransitions:
tenant_id=str(uuid4()),
app_id=str(uuid4()),
workflow_id=str(uuid4()),
type=WorkflowType.WORKFLOW.value,
triggered_from=WorkflowRunTriggeredFrom.APP_RUN.value,
type=WorkflowType.WORKFLOW,
triggered_from=WorkflowRunTriggeredFrom.APP_RUN,
version="v1.0",
status=WorkflowExecutionStatus.RUNNING.value,
created_by_role=CreatorUserRole.END_USER.value,
status=WorkflowExecutionStatus.RUNNING,
created_by_role=CreatorUserRole.END_USER,
created_by=str(uuid4()),
)
# Act
workflow_run.status = WorkflowExecutionStatus.SUCCEEDED.value
workflow_run.status = WorkflowExecutionStatus.SUCCEEDED
workflow_run.finished_at = datetime.now(UTC)
workflow_run.elapsed_time = 2.5
# Assert
assert workflow_run.status == WorkflowExecutionStatus.SUCCEEDED.value
assert workflow_run.status == WorkflowExecutionStatus.SUCCEEDED
assert workflow_run.finished_at is not None
assert workflow_run.elapsed_time == 2.5
@ -225,21 +225,21 @@ class TestWorkflowRunStateTransitions:
tenant_id=str(uuid4()),
app_id=str(uuid4()),
workflow_id=str(uuid4()),
type=WorkflowType.WORKFLOW.value,
triggered_from=WorkflowRunTriggeredFrom.APP_RUN.value,
type=WorkflowType.WORKFLOW,
triggered_from=WorkflowRunTriggeredFrom.APP_RUN,
version="v1.0",
status=WorkflowExecutionStatus.RUNNING.value,
created_by_role=CreatorUserRole.ACCOUNT.value,
status=WorkflowExecutionStatus.RUNNING,
created_by_role=CreatorUserRole.ACCOUNT,
created_by=str(uuid4()),
)
# Act
workflow_run.status = WorkflowExecutionStatus.FAILED.value
workflow_run.status = WorkflowExecutionStatus.FAILED
workflow_run.error = "Node execution failed: Invalid input"
workflow_run.finished_at = datetime.now(UTC)
# Assert
assert workflow_run.status == WorkflowExecutionStatus.FAILED.value
assert workflow_run.status == WorkflowExecutionStatus.FAILED
assert workflow_run.error == "Node execution failed: Invalid input"
assert workflow_run.finished_at is not None
@ -250,20 +250,20 @@ class TestWorkflowRunStateTransitions:
tenant_id=str(uuid4()),
app_id=str(uuid4()),
workflow_id=str(uuid4()),
type=WorkflowType.WORKFLOW.value,
triggered_from=WorkflowRunTriggeredFrom.DEBUGGING.value,
type=WorkflowType.WORKFLOW,
triggered_from=WorkflowRunTriggeredFrom.DEBUGGING,
version="draft",
status=WorkflowExecutionStatus.RUNNING.value,
created_by_role=CreatorUserRole.ACCOUNT.value,
status=WorkflowExecutionStatus.RUNNING,
created_by_role=CreatorUserRole.ACCOUNT,
created_by=str(uuid4()),
)
# Act
workflow_run.status = WorkflowExecutionStatus.STOPPED.value
workflow_run.status = WorkflowExecutionStatus.STOPPED
workflow_run.finished_at = datetime.now(UTC)
# Assert
assert workflow_run.status == WorkflowExecutionStatus.STOPPED.value
assert workflow_run.status == WorkflowExecutionStatus.STOPPED
assert workflow_run.finished_at is not None
def test_workflow_run_state_transition_running_to_paused(self):
@ -273,19 +273,19 @@ class TestWorkflowRunStateTransitions:
tenant_id=str(uuid4()),
app_id=str(uuid4()),
workflow_id=str(uuid4()),
type=WorkflowType.WORKFLOW.value,
triggered_from=WorkflowRunTriggeredFrom.APP_RUN.value,
type=WorkflowType.WORKFLOW,
triggered_from=WorkflowRunTriggeredFrom.APP_RUN,
version="v1.0",
status=WorkflowExecutionStatus.RUNNING.value,
created_by_role=CreatorUserRole.END_USER.value,
status=WorkflowExecutionStatus.RUNNING,
created_by_role=CreatorUserRole.END_USER,
created_by=str(uuid4()),
)
# Act
workflow_run.status = WorkflowExecutionStatus.PAUSED.value
workflow_run.status = WorkflowExecutionStatus.PAUSED
# Assert
assert workflow_run.status == WorkflowExecutionStatus.PAUSED.value
assert workflow_run.status == WorkflowExecutionStatus.PAUSED
assert workflow_run.finished_at is None # Not finished when paused
def test_workflow_run_state_transition_paused_to_running(self):
@ -295,19 +295,19 @@ class TestWorkflowRunStateTransitions:
tenant_id=str(uuid4()),
app_id=str(uuid4()),
workflow_id=str(uuid4()),
type=WorkflowType.WORKFLOW.value,
triggered_from=WorkflowRunTriggeredFrom.APP_RUN.value,
type=WorkflowType.WORKFLOW,
triggered_from=WorkflowRunTriggeredFrom.APP_RUN,
version="v1.0",
status=WorkflowExecutionStatus.PAUSED.value,
created_by_role=CreatorUserRole.ACCOUNT.value,
status=WorkflowExecutionStatus.PAUSED,
created_by_role=CreatorUserRole.ACCOUNT,
created_by=str(uuid4()),
)
# Act
workflow_run.status = WorkflowExecutionStatus.RUNNING.value
workflow_run.status = WorkflowExecutionStatus.RUNNING
# Assert
assert workflow_run.status == WorkflowExecutionStatus.RUNNING.value
assert workflow_run.status == WorkflowExecutionStatus.RUNNING
def test_workflow_run_with_partial_succeeded_status(self):
"""Test workflow run with partial-succeeded status."""
@ -316,17 +316,17 @@ class TestWorkflowRunStateTransitions:
tenant_id=str(uuid4()),
app_id=str(uuid4()),
workflow_id=str(uuid4()),
type=WorkflowType.WORKFLOW.value,
triggered_from=WorkflowRunTriggeredFrom.APP_RUN.value,
type=WorkflowType.WORKFLOW,
triggered_from=WorkflowRunTriggeredFrom.APP_RUN,
version="v1.0",
status=WorkflowExecutionStatus.PARTIAL_SUCCEEDED.value,
created_by_role=CreatorUserRole.ACCOUNT.value,
status=WorkflowExecutionStatus.PARTIAL_SUCCEEDED,
created_by_role=CreatorUserRole.ACCOUNT,
created_by=str(uuid4()),
exceptions_count=2,
)
# Assert
assert workflow_run.status == WorkflowExecutionStatus.PARTIAL_SUCCEEDED.value
assert workflow_run.status == WorkflowExecutionStatus.PARTIAL_SUCCEEDED
assert workflow_run.exceptions_count == 2
def test_workflow_run_with_inputs_and_outputs(self):
@ -340,11 +340,11 @@ class TestWorkflowRunStateTransitions:
tenant_id=str(uuid4()),
app_id=str(uuid4()),
workflow_id=str(uuid4()),
type=WorkflowType.WORKFLOW.value,
triggered_from=WorkflowRunTriggeredFrom.APP_RUN.value,
type=WorkflowType.WORKFLOW,
triggered_from=WorkflowRunTriggeredFrom.APP_RUN,
version="v1.0",
status=WorkflowExecutionStatus.SUCCEEDED.value,
created_by_role=CreatorUserRole.END_USER.value,
status=WorkflowExecutionStatus.SUCCEEDED,
created_by_role=CreatorUserRole.END_USER,
created_by=str(uuid4()),
inputs=json.dumps(inputs),
outputs=json.dumps(outputs),
@ -362,11 +362,11 @@ class TestWorkflowRunStateTransitions:
tenant_id=str(uuid4()),
app_id=str(uuid4()),
workflow_id=str(uuid4()),
type=WorkflowType.WORKFLOW.value,
triggered_from=WorkflowRunTriggeredFrom.DEBUGGING.value,
type=WorkflowType.WORKFLOW,
triggered_from=WorkflowRunTriggeredFrom.DEBUGGING,
version="draft",
status=WorkflowExecutionStatus.RUNNING.value,
created_by_role=CreatorUserRole.ACCOUNT.value,
status=WorkflowExecutionStatus.RUNNING,
created_by_role=CreatorUserRole.ACCOUNT,
created_by=str(uuid4()),
graph=json.dumps(graph),
)
@ -391,11 +391,11 @@ class TestWorkflowRunStateTransitions:
tenant_id=tenant_id,
app_id=app_id,
workflow_id=workflow_id,
type=WorkflowType.WORKFLOW.value,
triggered_from=WorkflowRunTriggeredFrom.APP_RUN.value,
type=WorkflowType.WORKFLOW,
triggered_from=WorkflowRunTriggeredFrom.APP_RUN,
version="v1.0",
status=WorkflowExecutionStatus.SUCCEEDED.value,
created_by_role=CreatorUserRole.ACCOUNT.value,
status=WorkflowExecutionStatus.SUCCEEDED,
created_by_role=CreatorUserRole.ACCOUNT,
created_by=created_by,
total_tokens=1500,
total_steps=5,
@ -410,7 +410,7 @@ class TestWorkflowRunStateTransitions:
assert result["tenant_id"] == tenant_id
assert result["app_id"] == app_id
assert result["workflow_id"] == workflow_id
assert result["status"] == WorkflowExecutionStatus.SUCCEEDED.value
assert result["status"] == WorkflowExecutionStatus.SUCCEEDED
assert result["total_tokens"] == 1500
assert result["total_steps"] == 5
@ -422,18 +422,18 @@ class TestWorkflowRunStateTransitions:
"tenant_id": str(uuid4()),
"app_id": str(uuid4()),
"workflow_id": str(uuid4()),
"type": WorkflowType.WORKFLOW.value,
"triggered_from": WorkflowRunTriggeredFrom.APP_RUN.value,
"type": WorkflowType.WORKFLOW,
"triggered_from": WorkflowRunTriggeredFrom.APP_RUN,
"version": "v1.0",
"graph": {"nodes": [], "edges": []},
"inputs": {"query": "test"},
"status": WorkflowExecutionStatus.SUCCEEDED.value,
"status": WorkflowExecutionStatus.SUCCEEDED,
"outputs": {"result": "success"},
"error": None,
"elapsed_time": 3.5,
"total_tokens": 2000,
"total_steps": 10,
"created_by_role": CreatorUserRole.ACCOUNT.value,
"created_by_role": CreatorUserRole.ACCOUNT,
"created_by": str(uuid4()),
"created_at": datetime.now(UTC),
"finished_at": datetime.now(UTC),
@ -446,7 +446,7 @@ class TestWorkflowRunStateTransitions:
# Assert
assert workflow_run.id == data["id"]
assert workflow_run.workflow_id == data["workflow_id"]
assert workflow_run.status == WorkflowExecutionStatus.SUCCEEDED.value
assert workflow_run.status == WorkflowExecutionStatus.SUCCEEDED
assert workflow_run.total_tokens == 2000
@ -467,14 +467,14 @@ class TestNodeExecutionRelationships:
tenant_id=tenant_id,
app_id=app_id,
workflow_id=workflow_id,
triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN.value,
triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN,
workflow_run_id=workflow_run_id,
index=1,
node_id="start",
node_type=BuiltinNodeTypes.START,
title="Start Node",
status=WorkflowNodeExecutionStatus.SUCCEEDED.value,
created_by_role=CreatorUserRole.ACCOUNT.value,
status=WorkflowNodeExecutionStatus.SUCCEEDED,
created_by_role=CreatorUserRole.ACCOUNT,
created_by=created_by,
)
@ -498,15 +498,15 @@ class TestNodeExecutionRelationships:
tenant_id=str(uuid4()),
app_id=str(uuid4()),
workflow_id=str(uuid4()),
triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN.value,
triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN,
workflow_run_id=str(uuid4()),
index=2,
predecessor_node_id=predecessor_node_id,
node_id=current_node_id,
node_type=BuiltinNodeTypes.LLM,
title="LLM Node",
status=WorkflowNodeExecutionStatus.RUNNING.value,
created_by_role=CreatorUserRole.ACCOUNT.value,
status=WorkflowNodeExecutionStatus.RUNNING,
created_by_role=CreatorUserRole.ACCOUNT,
created_by=str(uuid4()),
)
@ -528,8 +528,8 @@ class TestNodeExecutionRelationships:
node_id="llm_test",
node_type=BuiltinNodeTypes.LLM,
title="Test LLM",
status=WorkflowNodeExecutionStatus.SUCCEEDED.value,
created_by_role=CreatorUserRole.ACCOUNT.value,
status=WorkflowNodeExecutionStatus.SUCCEEDED,
created_by_role=CreatorUserRole.ACCOUNT,
created_by=str(uuid4()),
)
@ -549,14 +549,14 @@ class TestNodeExecutionRelationships:
tenant_id=str(uuid4()),
app_id=str(uuid4()),
workflow_id=str(uuid4()),
triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN.value,
triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN,
workflow_run_id=str(uuid4()),
index=1,
node_id="llm_1",
node_type=BuiltinNodeTypes.LLM,
title="LLM Node",
status=WorkflowNodeExecutionStatus.SUCCEEDED.value,
created_by_role=CreatorUserRole.ACCOUNT.value,
status=WorkflowNodeExecutionStatus.SUCCEEDED,
created_by_role=CreatorUserRole.ACCOUNT,
created_by=str(uuid4()),
inputs=json.dumps(inputs),
outputs=json.dumps(outputs),
@ -575,24 +575,24 @@ class TestNodeExecutionRelationships:
tenant_id=str(uuid4()),
app_id=str(uuid4()),
workflow_id=str(uuid4()),
triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN.value,
triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN,
workflow_run_id=str(uuid4()),
index=1,
node_id="code_1",
node_type=BuiltinNodeTypes.CODE,
title="Code Node",
status=WorkflowNodeExecutionStatus.RUNNING.value,
created_by_role=CreatorUserRole.ACCOUNT.value,
status=WorkflowNodeExecutionStatus.RUNNING,
created_by_role=CreatorUserRole.ACCOUNT,
created_by=str(uuid4()),
)
# Act - transition to succeeded
node_execution.status = WorkflowNodeExecutionStatus.SUCCEEDED.value
node_execution.status = WorkflowNodeExecutionStatus.SUCCEEDED
node_execution.elapsed_time = 1.2
node_execution.finished_at = datetime.now(UTC)
# Assert
assert node_execution.status == WorkflowNodeExecutionStatus.SUCCEEDED.value
assert node_execution.status == WorkflowNodeExecutionStatus.SUCCEEDED
assert node_execution.elapsed_time == 1.2
assert node_execution.finished_at is not None
@ -606,20 +606,20 @@ class TestNodeExecutionRelationships:
tenant_id=str(uuid4()),
app_id=str(uuid4()),
workflow_id=str(uuid4()),
triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN.value,
triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN,
workflow_run_id=str(uuid4()),
index=3,
node_id="code_1",
node_type=BuiltinNodeTypes.CODE,
title="Code Node",
status=WorkflowNodeExecutionStatus.FAILED.value,
status=WorkflowNodeExecutionStatus.FAILED,
error=error_message,
created_by_role=CreatorUserRole.ACCOUNT.value,
created_by_role=CreatorUserRole.ACCOUNT,
created_by=str(uuid4()),
)
# Assert
assert node_execution.status == WorkflowNodeExecutionStatus.FAILED.value
assert node_execution.status == WorkflowNodeExecutionStatus.FAILED
assert node_execution.error == error_message
def test_node_execution_with_metadata(self):
@ -637,14 +637,14 @@ class TestNodeExecutionRelationships:
tenant_id=str(uuid4()),
app_id=str(uuid4()),
workflow_id=str(uuid4()),
triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN.value,
triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN,
workflow_run_id=str(uuid4()),
index=1,
node_id="llm_1",
node_type=BuiltinNodeTypes.LLM,
title="LLM Node",
status=WorkflowNodeExecutionStatus.SUCCEEDED.value,
created_by_role=CreatorUserRole.ACCOUNT.value,
status=WorkflowNodeExecutionStatus.SUCCEEDED,
created_by_role=CreatorUserRole.ACCOUNT,
created_by=str(uuid4()),
execution_metadata=json.dumps(metadata),
)
@ -660,14 +660,14 @@ class TestNodeExecutionRelationships:
tenant_id=str(uuid4()),
app_id=str(uuid4()),
workflow_id=str(uuid4()),
triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN.value,
triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN,
workflow_run_id=str(uuid4()),
index=1,
node_id="start",
node_type=BuiltinNodeTypes.START,
title="Start",
status=WorkflowNodeExecutionStatus.SUCCEEDED.value,
created_by_role=CreatorUserRole.ACCOUNT.value,
status=WorkflowNodeExecutionStatus.SUCCEEDED,
created_by_role=CreatorUserRole.ACCOUNT,
created_by=str(uuid4()),
execution_metadata=None,
)
@ -696,14 +696,14 @@ class TestNodeExecutionRelationships:
tenant_id=str(uuid4()),
app_id=str(uuid4()),
workflow_id=str(uuid4()),
triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN.value,
triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN,
workflow_run_id=str(uuid4()),
index=1,
node_id=f"{node_type}_1",
node_type=node_type,
title=title,
status=WorkflowNodeExecutionStatus.SUCCEEDED.value,
created_by_role=CreatorUserRole.ACCOUNT.value,
status=WorkflowNodeExecutionStatus.SUCCEEDED,
created_by_role=CreatorUserRole.ACCOUNT,
created_by=str(uuid4()),
)
@ -734,7 +734,7 @@ class TestGraphConfigurationValidation:
workflow = Workflow.new(
tenant_id=str(uuid4()),
app_id=str(uuid4()),
type=WorkflowType.WORKFLOW.value,
type=WorkflowType.WORKFLOW,
version="draft",
graph=json.dumps(graph_config),
features="{}",
@ -761,7 +761,7 @@ class TestGraphConfigurationValidation:
workflow = Workflow.new(
tenant_id=str(uuid4()),
app_id=str(uuid4()),
type=WorkflowType.WORKFLOW.value,
type=WorkflowType.WORKFLOW,
version="draft",
graph=json.dumps(graph_config),
features="{}",
@ -802,7 +802,7 @@ class TestGraphConfigurationValidation:
workflow = Workflow.new(
tenant_id=str(uuid4()),
app_id=str(uuid4()),
type=WorkflowType.WORKFLOW.value,
type=WorkflowType.WORKFLOW,
version="draft",
graph=json.dumps(graph_config),
features="{}",
@ -835,11 +835,11 @@ class TestGraphConfigurationValidation:
tenant_id=str(uuid4()),
app_id=str(uuid4()),
workflow_id=str(uuid4()),
type=WorkflowType.WORKFLOW.value,
triggered_from=WorkflowRunTriggeredFrom.APP_RUN.value,
type=WorkflowType.WORKFLOW,
triggered_from=WorkflowRunTriggeredFrom.APP_RUN,
version="v1.0",
status=WorkflowExecutionStatus.RUNNING.value,
created_by_role=CreatorUserRole.ACCOUNT.value,
status=WorkflowExecutionStatus.RUNNING,
created_by_role=CreatorUserRole.ACCOUNT,
created_by=str(uuid4()),
graph=json.dumps(original_graph),
)
@ -872,7 +872,7 @@ class TestGraphConfigurationValidation:
workflow = Workflow.new(
tenant_id=str(uuid4()),
app_id=str(uuid4()),
type=WorkflowType.WORKFLOW.value,
type=WorkflowType.WORKFLOW,
version="draft",
graph=json.dumps(graph_config),
features="{}",
@ -912,7 +912,7 @@ class TestGraphConfigurationValidation:
workflow = Workflow.new(
tenant_id=str(uuid4()),
app_id=str(uuid4()),
type=WorkflowType.WORKFLOW.value,
type=WorkflowType.WORKFLOW,
version="draft",
graph=json.dumps(graph_config),
features="{}",
@ -933,7 +933,7 @@ class TestGraphConfigurationValidation:
workflow = Workflow.new(
tenant_id=str(uuid4()),
app_id=str(uuid4()),
type=WorkflowType.WORKFLOW.value,
type=WorkflowType.WORKFLOW,
version="draft",
graph=None,
features="{}",
@ -956,11 +956,11 @@ class TestGraphConfigurationValidation:
tenant_id=str(uuid4()),
app_id=str(uuid4()),
workflow_id=str(uuid4()),
type=WorkflowType.WORKFLOW.value,
triggered_from=WorkflowRunTriggeredFrom.APP_RUN.value,
type=WorkflowType.WORKFLOW,
triggered_from=WorkflowRunTriggeredFrom.APP_RUN,
version="v1.0",
status=WorkflowExecutionStatus.RUNNING.value,
created_by_role=CreatorUserRole.ACCOUNT.value,
status=WorkflowExecutionStatus.RUNNING,
created_by_role=CreatorUserRole.ACCOUNT,
created_by=str(uuid4()),
inputs=None,
)
@ -978,11 +978,11 @@ class TestGraphConfigurationValidation:
tenant_id=str(uuid4()),
app_id=str(uuid4()),
workflow_id=str(uuid4()),
type=WorkflowType.WORKFLOW.value,
triggered_from=WorkflowRunTriggeredFrom.APP_RUN.value,
type=WorkflowType.WORKFLOW,
triggered_from=WorkflowRunTriggeredFrom.APP_RUN,
version="v1.0",
status=WorkflowExecutionStatus.RUNNING.value,
created_by_role=CreatorUserRole.ACCOUNT.value,
status=WorkflowExecutionStatus.RUNNING,
created_by_role=CreatorUserRole.ACCOUNT,
created_by=str(uuid4()),
outputs=None,
)
@ -1000,14 +1000,14 @@ class TestGraphConfigurationValidation:
tenant_id=str(uuid4()),
app_id=str(uuid4()),
workflow_id=str(uuid4()),
triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN.value,
triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN,
workflow_run_id=str(uuid4()),
index=1,
node_id="start",
node_type=BuiltinNodeTypes.START,
title="Start",
status=WorkflowNodeExecutionStatus.SUCCEEDED.value,
created_by_role=CreatorUserRole.ACCOUNT.value,
status=WorkflowNodeExecutionStatus.SUCCEEDED,
created_by_role=CreatorUserRole.ACCOUNT,
created_by=str(uuid4()),
inputs=None,
)
@ -1025,14 +1025,14 @@ class TestGraphConfigurationValidation:
tenant_id=str(uuid4()),
app_id=str(uuid4()),
workflow_id=str(uuid4()),
triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN.value,
triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN,
workflow_run_id=str(uuid4()),
index=1,
node_id="start",
node_type=BuiltinNodeTypes.START,
title="Start",
status=WorkflowNodeExecutionStatus.SUCCEEDED.value,
created_by_role=CreatorUserRole.ACCOUNT.value,
status=WorkflowNodeExecutionStatus.SUCCEEDED,
created_by_role=CreatorUserRole.ACCOUNT,
created_by=str(uuid4()),
outputs=None,
)

View File

@ -0,0 +1,70 @@
import base64
import hashlib
import os
from io import BytesIO
from types import SimpleNamespace
import pytest
from _pytest.monkeypatch import MonkeyPatch
from baidubce.services.bos.bos_client import BosClient
from tests.unit_tests.oss.__mock.base import (
get_example_bucket,
get_example_data,
get_example_filename,
get_example_filepath,
)
class MockBaiduObsClass:
def __init__(self, config=None):
self.bucket_name = get_example_bucket()
self.key = get_example_filename()
self.content = get_example_data()
self.filepath = get_example_filepath()
def put_object(self, bucket_name, key, data, content_length=None, content_md5=None, **kwargs):
assert bucket_name == self.bucket_name
assert key == self.key
assert data == self.content
assert content_length == len(self.content)
expected_md5 = base64.standard_b64encode(hashlib.md5(self.content).digest())
assert content_md5 == expected_md5
def get_object(self, bucket_name, key, **kwargs):
assert bucket_name == self.bucket_name
assert key == self.key
return SimpleNamespace(data=BytesIO(self.content))
def get_object_to_file(self, bucket_name, key, file_name, **kwargs):
assert bucket_name == self.bucket_name
assert key == self.key
assert file_name == self.filepath
def get_object_meta_data(self, bucket_name, key, **kwargs):
assert bucket_name == self.bucket_name
assert key == self.key
return SimpleNamespace(status=200)
def delete_object(self, bucket_name, key, **kwargs):
assert bucket_name == self.bucket_name
assert key == self.key
MOCK = os.getenv("MOCK_SWITCH", "false").lower() == "true"
@pytest.fixture
def setup_baidu_obs_mock(monkeypatch: MonkeyPatch):
if MOCK:
monkeypatch.setattr(BosClient, "__init__", MockBaiduObsClass.__init__)
monkeypatch.setattr(BosClient, "put_object", MockBaiduObsClass.put_object)
monkeypatch.setattr(BosClient, "get_object", MockBaiduObsClass.get_object)
monkeypatch.setattr(BosClient, "get_object_to_file", MockBaiduObsClass.get_object_to_file)
monkeypatch.setattr(BosClient, "get_object_meta_data", MockBaiduObsClass.get_object_meta_data)
monkeypatch.setattr(BosClient, "delete_object", MockBaiduObsClass.delete_object)
yield
if MOCK:
monkeypatch.undo()

View File

@ -0,0 +1 @@

View File

@ -0,0 +1,60 @@
from unittest.mock import MagicMock, patch
import pytest
from baidubce.auth.bce_credentials import BceCredentials
from baidubce.bce_client_configuration import BceClientConfiguration
from extensions.storage.baidu_obs_storage import BaiduObsStorage
from tests.unit_tests.oss.__mock.base import (
BaseStorageTest,
get_example_bucket,
)
pytest_plugins = ("tests.unit_tests.oss.__mock.baidu_obs",)
class TestBaiduObs(BaseStorageTest):
@pytest.fixture(autouse=True)
def setup_method(self, setup_baidu_obs_mock):
"""Executed before each test method."""
with (
patch.object(BceCredentials, "__init__", return_value=None),
patch.object(BceClientConfiguration, "__init__", return_value=None),
):
self.storage = BaiduObsStorage()
self.storage.bucket_name = get_example_bucket()
class TestBaiduObsConfiguration:
def test_init_with_config(self):
mock_dify_config = MagicMock()
mock_dify_config.BAIDU_OBS_BUCKET_NAME = "test-bucket"
mock_dify_config.BAIDU_OBS_ACCESS_KEY = "test-access-key"
mock_dify_config.BAIDU_OBS_SECRET_KEY = "test-secret-key"
mock_dify_config.BAIDU_OBS_ENDPOINT = "https://bj.bcebos.com"
mock_credentials = MagicMock(name="credentials")
mock_config = MagicMock(name="config")
mock_client = MagicMock(name="client")
with (
patch("extensions.storage.baidu_obs_storage.dify_config", mock_dify_config),
patch("extensions.storage.baidu_obs_storage.BceCredentials", return_value=mock_credentials) as credentials,
patch(
"extensions.storage.baidu_obs_storage.BceClientConfiguration", return_value=mock_config
) as configuration,
patch("extensions.storage.baidu_obs_storage.BosClient", return_value=mock_client) as client_cls,
):
storage = BaiduObsStorage()
assert storage.bucket_name == "test-bucket"
assert storage.client == mock_client
credentials.assert_called_once_with(
access_key_id="test-access-key",
secret_access_key="test-secret-key",
)
configuration.assert_called_once_with(
credentials=mock_credentials,
endpoint="https://bj.bcebos.com",
)
client_cls.assert_called_once_with(config=mock_config)

View File

@ -33,42 +33,6 @@ class TestDraftVarLoaderSimple:
fallback_variables=[],
)
def test_load_offloaded_variable_string_type_unit(self, draft_var_loader):
"""Test _load_offloaded_variable with string type - isolated unit test."""
# Create mock objects
upload_file = Mock(spec=UploadFile)
upload_file.key = "storage/key/test.txt"
variable_file = Mock(spec=WorkflowDraftVariableFile)
variable_file.value_type = SegmentType.STRING
variable_file.upload_file = upload_file
draft_var = Mock(spec=WorkflowDraftVariable)
draft_var.id = "draft-var-id"
draft_var.node_id = "test-node-id"
draft_var.name = "test_variable"
draft_var.description = "test description"
draft_var.get_selector.return_value = ["test-node-id", "test_variable"]
draft_var.variable_file = variable_file
test_content = "This is the full string content"
with patch("services.workflow_draft_variable_service.storage") as mock_storage:
mock_storage.load.return_value = test_content.encode()
# Execute the method
selector_tuple, variable = draft_var_loader._load_offloaded_variable(draft_var)
# Verify results
assert selector_tuple == ("test-node-id", "test_variable")
assert variable.id == "draft-var-id"
assert variable.name == "test_variable"
assert variable.description == "test description"
assert variable.value == test_content
# Verify storage was called correctly
mock_storage.load.assert_called_once_with("storage/key/test.txt")
def test_load_offloaded_variable_object_type_unit(self, draft_var_loader):
"""Test _load_offloaded_variable with object type - isolated unit test."""
# Create mock objects
@ -139,47 +103,6 @@ class TestDraftVarLoaderSimple:
result = draft_var_loader._selector_to_tuple(selector)
assert result == ("node_id", "var_name")
def test_load_offloaded_variable_number_type_unit(self, draft_var_loader):
"""Test _load_offloaded_variable with number type - isolated unit test."""
# Create mock objects
upload_file = Mock(spec=UploadFile)
upload_file.key = "storage/key/test_number.json"
variable_file = Mock(spec=WorkflowDraftVariableFile)
variable_file.value_type = SegmentType.NUMBER
variable_file.upload_file = upload_file
draft_var = Mock(spec=WorkflowDraftVariable)
draft_var.id = "draft-var-id"
draft_var.node_id = "test-node-id"
draft_var.name = "test_number"
draft_var.description = "test number description"
draft_var.get_selector.return_value = ["test-node-id", "test_number"]
draft_var.variable_file = variable_file
test_number = 123.45
test_json_content = json.dumps(test_number)
with patch("services.workflow_draft_variable_service.storage") as mock_storage:
mock_storage.load.return_value = test_json_content.encode()
from graphon.variables.segments import FloatSegment
mock_segment = FloatSegment(value=test_number)
draft_var.build_segment_from_serialized_value.return_value = mock_segment
# Execute the method
selector_tuple, variable = draft_var_loader._load_offloaded_variable(draft_var)
# Verify results
assert selector_tuple == ("test-node-id", "test_number")
assert variable.id == "draft-var-id"
assert variable.name == "test_number"
assert variable.description == "test number description"
# Verify method calls
mock_storage.load.assert_called_once_with("storage/key/test_number.json")
draft_var.build_segment_from_serialized_value.assert_called_once_with(SegmentType.NUMBER, test_number)
def test_load_offloaded_variable_array_type_unit(self, draft_var_loader):
"""Test _load_offloaded_variable with array type - isolated unit test."""
# Create mock objects
@ -229,12 +152,13 @@ class TestDraftVarLoaderSimple:
variable_file.value_type = SegmentType.FILE
variable_file.upload_file = upload_file
draft_var = WorkflowDraftVariable()
draft_var.id = "draft-var-id"
draft_var.app_id = "app-1"
draft_var.node_id = "test-node-id"
draft_var.name = "test_file"
draft_var.description = "test file description"
draft_var = WorkflowDraftVariable(
id="draft-var-id",
app_id="app-1",
node_id="test-node-id",
name="test_file",
description="test file description",
)
draft_var._set_selector(["test-node-id", "test_file"])
draft_var.variable_file = variable_file

View File

@ -200,7 +200,7 @@ class TestDraftVariableSaver:
user=mock_user,
)
def test_draft_saver_with_small_variables(self, draft_saver, mock_session):
def test_draft_saver_with_small_variables(self, draft_saver: DraftVariableSaver, mock_session):
with patch(
"services.workflow_draft_variable_service.DraftVariableSaver._try_offload_large_variable", autospec=True
) as _mock_try_offload:
@ -212,18 +212,21 @@ class TestDraftVariableSaver:
assert draft_var.file_id is None
_mock_try_offload.return_value = None
def test_draft_saver_with_large_variables(self, draft_saver, mock_session):
def test_draft_saver_with_large_variables(self, draft_saver: DraftVariableSaver, mock_session):
with patch(
"services.workflow_draft_variable_service.DraftVariableSaver._try_offload_large_variable", autospec=True
) as _mock_try_offload:
mock_segment = StringSegment(value="small value")
mock_draft_var_file = WorkflowDraftVariableFile(
id=str(uuidv7()),
tenant_id=str(uuidv7()),
app_id=str(uuidv7()),
user_id=str(uuidv7()),
size=1024,
length=10,
value_type=SegmentType.ARRAY_STRING,
upload_file_id=str(uuid.uuid4()),
upload_file_id=str(uuidv7()),
)
mock_draft_var_file.id = str(uuidv7())
_mock_try_offload.return_value = mock_segment, mock_draft_var_file
draft_var = draft_saver._create_draft_variable(name="small_var", value=mock_segment, visible=True)

100
api/uv.lock generated
View File

@ -481,7 +481,7 @@ wheels = [
[[package]]
name = "bce-python-sdk"
version = "0.9.70"
version = "0.9.71"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "crc32c" },
@ -489,9 +489,9 @@ dependencies = [
{ name = "pycryptodome" },
{ name = "six" },
]
sdist = { url = "https://files.pythonhosted.org/packages/f7/a9/7c21a9073eb9ad7e8cacf6f8a0e47c0d01ad7bf8fd8e0dc42164b117d60b/bce_python_sdk-0.9.70.tar.gz", hash = "sha256:3b37fd7448278dd33f745a6a23198a2cc2490fded9cb8d59b72500784853df4e", size = 299967, upload-time = "2026-04-14T12:02:42.034Z" }
sdist = { url = "https://files.pythonhosted.org/packages/5a/74/72058f098b9e7184376f2b3d4c1d233ca7fdc52d0f527078f3ce4d9828b9/bce_python_sdk-0.9.71.tar.gz", hash = "sha256:7a917edaee39082694776e25a9e6556ec8072400a3be649f28eb13f9c7a0b5b5", size = 301508, upload-time = "2026-04-28T06:23:21.061Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/3c/2d/70fc866ff98d1f6bd75b0a4235694129b3c519b014254d7bcfc02ffe1bee/bce_python_sdk-0.9.70-py3-none-any.whl", hash = "sha256:fd1f31113e4a8dca314f040662b7caf07ec11cf896c5da232627a9a2c9d2e3a1", size = 415660, upload-time = "2026-04-14T12:02:40.034Z" },
{ url = "https://files.pythonhosted.org/packages/2d/2d/821ae8878dc36b77e56bb7e5dbf9a8e73209c11d38c0ba6b38b5778668ae/bce_python_sdk-0.9.71-py3-none-any.whl", hash = "sha256:9f64a99267616456bac487983d92cc778720bf4f102c8931e8e38aea3cb63268", size = 417000, upload-time = "2026-04-28T06:23:19.078Z" },
]
[[package]]
@ -604,29 +604,29 @@ wheels = [
[[package]]
name = "boto3"
version = "1.42.96"
version = "1.43.3"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "botocore" },
{ name = "jmespath" },
{ name = "s3transfer" },
]
sdist = { url = "https://files.pythonhosted.org/packages/a6/2d/69fb3acd50bab83fb295c167d33c4b653faeb5fb0f42bfca4d9b69d6fb68/boto3-1.42.96.tar.gz", hash = "sha256:b38a9e4a3fbbee9017252576f1379780d0a5814768676c08df2f539d31fcdd68", size = 113203, upload-time = "2026-04-24T19:47:18.677Z" }
sdist = { url = "https://files.pythonhosted.org/packages/f2/50/ea184e159c4ac64fef816a72094fb8656eb071361a39ed22c0e3b15a35b4/boto3-1.43.3.tar.gz", hash = "sha256:7c7777862ffc898f05efa566032bbabfe226dbb810e35ec11125817f128bc5c5", size = 113111, upload-time = "2026-05-04T19:34:09.731Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/2b/9d/b3f617d011c42eb804d993103b8fa9acdce153e181a3042f58bfe33d7cb4/boto3-1.42.96-py3-none-any.whl", hash = "sha256:2f4566da2c209a98bdbfc874d813ef231c84ad24e4f815e9bc91de5f63351a24", size = 140557, upload-time = "2026-04-24T19:47:15.824Z" },
{ url = "https://files.pythonhosted.org/packages/c8/ad/8a6946a329f0127322108e537dc1c0d9f8eea4f1d1231702c073d2e85f46/boto3-1.43.3-py3-none-any.whl", hash = "sha256:fb9fe51849ef2a78198d582756fc06f14f7de27f73e0fa90275d6aa4171eb4d0", size = 140501, upload-time = "2026-05-04T19:34:07.991Z" },
]
[[package]]
name = "boto3-stubs"
version = "1.42.96"
version = "1.43.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "botocore-stubs" },
{ name = "types-s3transfer" },
]
sdist = { url = "https://files.pythonhosted.org/packages/77/86/65f45f84621cccc2471871088bab8fe515b4346ba9e48d9001484ec440d6/boto3_stubs-1.42.96.tar.gz", hash = "sha256:1e7819c34d1eae8e5e3cfaf9d144fdcad65aad184b380488871de1d0b2851879", size = 102691, upload-time = "2026-04-24T20:25:13.984Z" }
sdist = { url = "https://files.pythonhosted.org/packages/8a/7f/399bcdeaa60a89aafe5292c8364c313177d22b886dffc1bd7b56fe817900/boto3_stubs-1.43.2.tar.gz", hash = "sha256:0d46636f3e761a92070114b39a76b154c5da6c5794c890e1440a7f191bf1ff2e", size = 102658, upload-time = "2026-05-01T20:31:36.963Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/a7/51/bdac1ff9fd4321091183776c5adffce5fc7b4d0fec7e38af9064e24a2497/boto3_stubs-1.42.96-py3-none-any.whl", hash = "sha256:2c112e257f40006147a53f6f62075804689154271973b2807f5656feaa804216", size = 70668, upload-time = "2026-04-24T20:25:09.736Z" },
{ url = "https://files.pythonhosted.org/packages/da/df/17647562444b2047ca325eaaf2fea738571822b7b4efdaa6bacf0fd4fff9/boto3_stubs-1.43.2-py3-none-any.whl", hash = "sha256:941f2907236223a1209704eaf708d3cdf1ecc8695618c558f9fb9e23e90c513b", size = 70653, upload-time = "2026-05-01T20:31:30.057Z" },
]
[package.optional-dependencies]
@ -636,16 +636,16 @@ bedrock-runtime = [
[[package]]
name = "botocore"
version = "1.42.96"
version = "1.43.3"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "jmespath" },
{ name = "python-dateutil" },
{ name = "urllib3" },
]
sdist = { url = "https://files.pythonhosted.org/packages/61/77/2c333622a1d47cf5bf73cdcab0cb6c92addafbef2ec05f81b9f75687d9e5/botocore-1.42.96.tar.gz", hash = "sha256:75b3b841ffacaa944f645196655a21ca777591dd8911e732bfb6614545af0250", size = 15263344, upload-time = "2026-04-24T19:47:05.283Z" }
sdist = { url = "https://files.pythonhosted.org/packages/74/ac/cd55f886e17b6b952dbc95b792d3645a73d58586a1400ababe54406073bd/botocore-1.43.3.tar.gz", hash = "sha256:eac6da0fffccf87888ebf4d89f0b2378218a707efa748cd955b838995e944695", size = 15308705, upload-time = "2026-05-04T19:33:56.28Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/45/56/152c3a859ca1b9d77ed16deac3cf81682013677c68cf5715698781fc81bd/botocore-1.42.96-py3-none-any.whl", hash = "sha256:db2c3e2006628be6fde81a24124a6563c363d6982fb92728837cf174bad9d98a", size = 14945920, upload-time = "2026-04-24T19:47:00.323Z" },
{ url = "https://files.pythonhosted.org/packages/be/99/1d9e296edf244f47e0508032f20999f8fd40704dd3c5b601fed099424eb6/botocore-1.43.3-py3-none-any.whl", hash = "sha256:ec0769eb0f7c5034856bb406a92698dbc02a3d4be0f78a384747106b161d8ea3", size = 14989027, upload-time = "2026-05-04T19:33:50.81Z" },
]
[[package]]
@ -1289,7 +1289,7 @@ wheels = [
[[package]]
name = "dify-api"
version = "1.13.3"
version = "1.14.0"
source = { virtual = "." }
dependencies = [
{ name = "aliyun-log-python-sdk" },
@ -1578,7 +1578,7 @@ requires-dist = [
{ name = "aliyun-log-python-sdk", specifier = ">=0.9.44,<1.0.0" },
{ name = "azure-identity", specifier = ">=1.25.3,<2.0.0" },
{ name = "bleach", specifier = ">=6.3.0" },
{ name = "boto3", specifier = ">=1.42.96" },
{ name = "boto3", specifier = ">=1.43.3" },
{ name = "celery", specifier = ">=5.6.3" },
{ name = "croniter", specifier = ">=6.2.2" },
{ name = "fastopenapi", extras = ["flask"], specifier = "~=0.7.0" },
@ -1592,8 +1592,8 @@ requires-dist = [
{ name = "gevent", specifier = ">=26.4.0" },
{ name = "gevent-websocket", specifier = ">=0.10.1" },
{ name = "gmpy2", specifier = ">=2.3.0" },
{ name = "google-api-python-client", specifier = ">=2.194.0" },
{ name = "google-cloud-aiplatform", specifier = ">=1.148.1,<2.0.0" },
{ name = "google-api-python-client", specifier = ">=2.195.0" },
{ name = "google-cloud-aiplatform", specifier = ">=1.149.0,<2.0.0" },
{ name = "graphon", specifier = "~=0.2.2" },
{ name = "gunicorn", specifier = ">=25.3.0" },
{ name = "httpx", extras = ["socks"], specifier = ">=0.28.1,<1.0.0" },
@ -1619,12 +1619,12 @@ requires-dist = [
[package.metadata.requires-dev]
dev = [
{ name = "basedpyright", specifier = ">=1.39.3" },
{ name = "boto3-stubs", specifier = ">=1.42.96" },
{ name = "boto3-stubs", specifier = ">=1.43.2" },
{ name = "celery-types", specifier = ">=0.23.0" },
{ name = "coverage", specifier = ">=7.13.4" },
{ name = "dotenv-linter", specifier = ">=0.7.0" },
{ name = "faker", specifier = ">=40.15.0" },
{ name = "hypothesis", specifier = ">=6.152.3" },
{ name = "hypothesis", specifier = ">=6.152.4" },
{ name = "import-linter", specifier = ">=2.3" },
{ name = "lxml-stubs", specifier = ">=0.5.1" },
{ name = "mypy", specifier = ">=1.20.2" },
@ -1642,8 +1642,8 @@ dev = [
{ name = "testcontainers", specifier = ">=4.14.2" },
{ name = "types-aiofiles", specifier = ">=25.1.0" },
{ name = "types-beautifulsoup4", specifier = ">=4.12.0" },
{ name = "types-cachetools", specifier = ">=6.2.0" },
{ name = "types-cffi", specifier = ">=2.0.0.20260408" },
{ name = "types-cachetools", specifier = ">=7.0.0.20260503" },
{ name = "types-cffi", specifier = ">=2.0.0.20260429" },
{ name = "types-colorama", specifier = ">=0.4.15" },
{ name = "types-defusedxml", specifier = ">=0.7.0" },
{ name = "types-deprecated", specifier = ">=1.3.1" },
@ -1651,7 +1651,7 @@ dev = [
{ name = "types-flask-cors", specifier = ">=6.0.0" },
{ name = "types-flask-migrate", specifier = ">=4.1.0" },
{ name = "types-gevent", specifier = ">=26.4.0" },
{ name = "types-greenlet", specifier = ">=3.4.0" },
{ name = "types-greenlet", specifier = ">=3.5.0.20260428" },
{ name = "types-html5lib", specifier = ">=1.1.11" },
{ name = "types-jmespath", specifier = ">=1.1.0.20260408" },
{ name = "types-markdown", specifier = ">=3.10.2" },
@ -1660,7 +1660,7 @@ dev = [
{ name = "types-olefile", specifier = ">=0.47.0" },
{ name = "types-openpyxl", specifier = ">=3.1.5" },
{ name = "types-pexpect", specifier = ">=4.9.0" },
{ name = "types-protobuf", specifier = ">=7.34.1" },
{ name = "types-protobuf", specifier = ">=7.34.1.20260503" },
{ name = "types-psutil", specifier = ">=7.2.2" },
{ name = "types-psycopg2", specifier = ">=2.9.21.20260422" },
{ name = "types-pygments", specifier = ">=2.20.0" },
@ -1683,7 +1683,7 @@ dev = [
]
storage = [
{ name = "azure-storage-blob", specifier = ">=12.28.0" },
{ name = "bce-python-sdk", specifier = ">=0.9.70" },
{ name = "bce-python-sdk", specifier = ">=0.9.71" },
{ name = "cos-python-sdk-v5", specifier = ">=1.9.42" },
{ name = "esdk-obs-python", specifier = ">=3.22.2" },
{ name = "google-cloud-storage", specifier = ">=3.10.1" },
@ -2719,7 +2719,7 @@ grpc = [
[[package]]
name = "google-api-python-client"
version = "2.194.0"
version = "2.195.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "google-api-core" },
@ -2728,9 +2728,9 @@ dependencies = [
{ name = "httplib2" },
{ name = "uritemplate" },
]
sdist = { url = "https://files.pythonhosted.org/packages/60/ab/e83af0eb043e4ccc49571ca7a6a49984e9d00f4e9e6e6f1238d60bc84dce/google_api_python_client-2.194.0.tar.gz", hash = "sha256:db92647bd1a90f40b79c9618461553c2b20b6a43ce7395fa6de07132dc14f023", size = 14443469, upload-time = "2026-04-08T23:07:35.757Z" }
sdist = { url = "https://files.pythonhosted.org/packages/69/07/08d759b9cb10f48af14b25262dd0d6685ca8cda6c1f9e8a8109f57457205/google_api_python_client-2.195.0.tar.gz", hash = "sha256:c72cf2661c3addf01c880ce60541e83e1df354644b874f7f9d8d5ed2070446ae", size = 14584819, upload-time = "2026-04-30T21:51:50.638Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/b0/34/5a624e49f179aa5b0cb87b2ce8093960299030ff40423bfbde09360eb908/google_api_python_client-2.194.0-py3-none-any.whl", hash = "sha256:61eaaac3b8fc8fdf11c08af87abc3d1342d1b37319cc1b57405f86ef7697e717", size = 15016514, upload-time = "2026-04-08T23:07:33.093Z" },
{ url = "https://files.pythonhosted.org/packages/21/b9/2c71095e31fff57668fec7c07ac897df065f15521d070e63229e13689590/google_api_python_client-2.195.0-py3-none-any.whl", hash = "sha256:753e62057f23049a89534bea0162b60fe391b85fb86d80bcdf884d05ec91c5bf", size = 15162418, upload-time = "2026-04-30T21:51:47.444Z" },
]
[[package]]
@ -2766,7 +2766,7 @@ wheels = [
[[package]]
name = "google-cloud-aiplatform"
version = "1.148.1"
version = "1.149.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "docstring-parser" },
@ -2782,9 +2782,9 @@ dependencies = [
{ name = "pydantic" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/c9/f3/b2a9417014c93858a2e3266134f931eefd972c2d410b25d7b8782fc6f143/google_cloud_aiplatform-1.148.1.tar.gz", hash = "sha256:75d605fba34e68714bd08e1e482755d0a6e3ae972805f809d088e686c30879e7", size = 10278758, upload-time = "2026-04-17T23:45:26.738Z" }
sdist = { url = "https://files.pythonhosted.org/packages/42/2c/fba4adc56f74c0ee0fbd91a39d414ca2c3588dd8b71f9be8a507015ca886/google_cloud_aiplatform-1.149.0.tar.gz", hash = "sha256:a4d73485bf1d727a9e1bbbd13d08d7031490686bbf7d125eb905c1a6c1559a35", size = 10451466, upload-time = "2026-04-27T23:11:54.513Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/56/5b/e3515d7bbba602c2b0f6a0da5431785e897252443682e4735d0e6873dc8f/google_cloud_aiplatform-1.148.1-py2.py3-none-any.whl", hash = "sha256:035101e2d8e65c6a706cc3930b2452de7ddcbde50dd130320fcea0d8b03b0c5a", size = 8434481, upload-time = "2026-04-17T23:45:22.919Z" },
{ url = "https://files.pythonhosted.org/packages/bf/a0/27719ba23967ef62e52a1d54e013e0fc174bdab8dd84fb300bab9bf0d4a3/google_cloud_aiplatform-1.149.0-py2.py3-none-any.whl", hash = "sha256:e6b5299fa5d303e971cb29a19f03fdbb7b1e3b9d2faa3a788ca933341fba2f2e", size = 8570410, upload-time = "2026-04-27T23:11:50.495Z" },
]
[[package]]
@ -3319,14 +3319,14 @@ wheels = [
[[package]]
name = "hypothesis"
version = "6.152.3"
version = "6.152.4"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "sortedcontainers" },
]
sdist = { url = "https://files.pythonhosted.org/packages/70/90/fc0b263b6f2622e5f8d2aa93f2e95ba79718a5faa7d2a74bfab10d6b0905/hypothesis-6.152.3.tar.gz", hash = "sha256:c4e5300d3755b6c8a270a28fe5abff40153e927328e89d2bb0229c1384618998", size = 466478, upload-time = "2026-04-26T17:31:07.657Z" }
sdist = { url = "https://files.pythonhosted.org/packages/fa/c7/3147bd903d6b18324a016d43a259cf5b4bb4545e1ead6773dc8a0374e70a/hypothesis-6.152.4.tar.gz", hash = "sha256:31c8f9ce619716f543e2710b489b1633c833586641d9e6c94cee03f109a5afc4", size = 466444, upload-time = "2026-04-27T20:18:37.594Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/90/38/15475b91a4c12721d2be3349e9d6cf8649c76ed9bc1287e2de7c8d06c261/hypothesis-6.152.3-py3-none-any.whl", hash = "sha256:4b47f00916c858ed49cf870a2f08b04e5fff5afae0bb78f3b4a6d9c74fd6c7bc", size = 532154, upload-time = "2026-04-26T17:31:04.42Z" },
{ url = "https://files.pythonhosted.org/packages/19/89/0f50dd0d92e8a7dffc24f69ab910ff81db89b2f082ba42682bd57695e4d2/hypothesis-6.152.4-py3-none-any.whl", hash = "sha256:e730fd93c7578182efadc7f90b3c5437ee4d55edf738930eb5043c81ac1d97e8", size = 532145, upload-time = "2026-04-27T20:18:35.043Z" },
]
[[package]]
@ -3969,11 +3969,11 @@ wheels = [
[[package]]
name = "mypy-boto3-bedrock-runtime"
version = "1.42.42"
version = "1.43.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/46/bb/65dc1b2c5796a6ab5f60bdb57343bd6c3ecb82251c580eca415c8548333e/mypy_boto3_bedrock_runtime-1.42.42.tar.gz", hash = "sha256:3a4088218478b6fbbc26055c03c95bee4fc04624a801090b3cce3037e8275c8d", size = 29840, upload-time = "2026-02-04T20:53:05.999Z" }
sdist = { url = "https://files.pythonhosted.org/packages/21/f2/61519c0162307b1e4d47f63ed8b25390874640934f3d2d25c5d6c5078dd8/mypy_boto3_bedrock_runtime-1.43.0.tar.gz", hash = "sha256:19fc3167de6e66dd7a0ab293adc55c93e2fd67be35e8ab4fc3a7523a380752ce", size = 29903, upload-time = "2026-04-29T22:57:57.561Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/00/43/7ea062f2228f47b5779dcfa14dab48d6e29f979b35d1a5102b0ba80b9c1b/mypy_boto3_bedrock_runtime-1.42.42-py3-none-any.whl", hash = "sha256:b2d16eae22607d0685f90796b3a0afc78c0b09d45872e00eafd634a31dd9358f", size = 36077, upload-time = "2026-02-04T20:53:01.768Z" },
{ url = "https://files.pythonhosted.org/packages/40/4d/7e4c4d55af23b2b1304d6814db8c406beab7977056963200230417c1a2db/mypy_boto3_bedrock_runtime-1.43.0-py3-none-any.whl", hash = "sha256:a125296f992093d58bdcd95176002680fa81ca8a8b8bdf02afad7e5f2d8966aa", size = 36172, upload-time = "2026-04-29T22:57:54.777Z" },
]
[[package]]
@ -5914,14 +5914,14 @@ wheels = [
[[package]]
name = "s3transfer"
version = "0.16.0"
version = "0.17.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "botocore" },
]
sdist = { url = "https://files.pythonhosted.org/packages/05/04/74127fc843314818edfa81b5540e26dd537353b123a4edc563109d8f17dd/s3transfer-0.16.0.tar.gz", hash = "sha256:8e990f13268025792229cd52fa10cb7163744bf56e719e0b9cb925ab79abf920", size = 153827, upload-time = "2025-12-01T02:30:59.114Z" }
sdist = { url = "https://files.pythonhosted.org/packages/9b/ec/7c692cde9125b77e84b307354d4fb705f98b8ccad59a036d5957ca75bfc3/s3transfer-0.17.0.tar.gz", hash = "sha256:9edeb6d1c3c2f89d6050348548834ad8289610d886e5bf7b7207728bd43ce33a", size = 155337, upload-time = "2026-04-29T22:07:36.33Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/fc/51/727abb13f44c1fcf6d145979e1535a35794db0f6e450a0cb46aa24732fe2/s3transfer-0.16.0-py3-none-any.whl", hash = "sha256:18e25d66fed509e3868dc1572b3f427ff947dd2c56f844a5bf09481ad3f3b2fe", size = 86830, upload-time = "2025-12-01T02:30:57.729Z" },
{ url = "https://files.pythonhosted.org/packages/87/72/c6c32d2b657fa3dad1de340254e14390b1e334ce38268b7ad51abda3c8c2/s3transfer-0.17.0-py3-none-any.whl", hash = "sha256:ce3801712acf4ad3e89fb9990df97b4972e93f4b3b0004d214be5bce12814c20", size = 86811, upload-time = "2026-04-29T22:07:34.966Z" },
]
[[package]]
@ -6585,23 +6585,23 @@ wheels = [
[[package]]
name = "types-cachetools"
version = "6.2.0.20260408"
version = "7.0.0.20260503"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/ec/61/475b0e8f4a92e5e33affcc6f4e6344c6dee540824021d22f695ea170da63/types_cachetools-6.2.0.20260408.tar.gz", hash = "sha256:0d8ae2dd5ba0b4cfe6a55c34396dd0415f1be07d0033d84781cdc4ed9c2ebc6b", size = 9854, upload-time = "2026-04-08T04:31:49.665Z" }
sdist = { url = "https://files.pythonhosted.org/packages/ec/57/5d3b8b3e66b002911ec1274e87f904eeee1d843c8713d95476c25c29cf31/types_cachetools-7.0.0.20260503.tar.gz", hash = "sha256:dfa4dcdf453f397dfc6d69fc0a57423ac1f248393f70aa56b5d05fac2df7a96c", size = 10033, upload-time = "2026-05-03T05:19:54.128Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/bb/7d/579f50f4f004ee93c7d1baa95339591cac1fe02f4e3fb8fc0f900ee4a80f/types_cachetools-6.2.0.20260408-py3-none-any.whl", hash = "sha256:470e0b274737feae74beed3d764885bf4664002ecc393fba3778846b13ce92cb", size = 9350, upload-time = "2026-04-08T04:31:48.826Z" },
{ url = "https://files.pythonhosted.org/packages/3d/a8/84562723d9a3572e0851d82bdea6bed5a7dc033c6bd648f492c76b8c4ac8/types_cachetools-7.0.0.20260503-py3-none-any.whl", hash = "sha256:011b4fe0e85ef05c4a2471a4fda40254a78746b501cc1727359233872bb3a4e9", size = 9493, upload-time = "2026-05-03T05:19:53.124Z" },
]
[[package]]
name = "types-cffi"
version = "2.0.0.20260408"
version = "2.0.0.20260429"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "types-setuptools" },
]
sdist = { url = "https://files.pythonhosted.org/packages/64/67/eb4ef3408fdc0b4e5af38b30c0e6ad4663b41bdae9fb85a9f09a8db61a99/types_cffi-2.0.0.20260408.tar.gz", hash = "sha256:aa8b9c456ab715c079fc655929811f21f331bfb940f4a821987c581bf4e36230", size = 17541, upload-time = "2026-04-08T04:36:03.918Z" }
sdist = { url = "https://files.pythonhosted.org/packages/0c/7d/56b9be8b0f9dfbffb7c73e248aacf178693ff3c6cf765b77c43a1e886e04/types_cffi-2.0.0.20260429.tar.gz", hash = "sha256:afe7d9777a2921139623af0b94647637a5bd0b938b77ec125e5e5e068a1727bd", size = 17562, upload-time = "2026-04-29T05:16:43.29Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/c3/a3/7fbd93ededcc7c77e9e5948b9794161733ebdbf618a27965b1bea0e728a4/types_cffi-2.0.0.20260408-py3-none-any.whl", hash = "sha256:68bd296742b4ff7c0afe3547f50bd0acc55416ecf322ffefd2b7344ef6388a42", size = 20101, upload-time = "2026-04-08T04:36:02.995Z" },
{ url = "https://files.pythonhosted.org/packages/b8/2c/79fa47a70d534f63a54b6d22e28cc842f8c6d9ebec93048355b0020bc7a9/types_cffi-2.0.0.20260429-py3-none-any.whl", hash = "sha256:6a4237bfdbd50e4d0726929070d8b9983bde541726a5a6fe0e8e24e78c1b3826", size = 20103, upload-time = "2026-04-29T05:16:42.155Z" },
]
[[package]]
@ -6680,11 +6680,11 @@ wheels = [
[[package]]
name = "types-greenlet"
version = "3.4.0.20260409"
version = "3.5.0.20260428"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/27/a6/668751bc864efe820e1eb12c2a77f9e62537f433cc002e483ad01badb04b/types_greenlet-3.4.0.20260409.tar.gz", hash = "sha256:81d2cf628934a16856bb9e54136def8de5356e934f0ad5d5474f219a0c5cb205", size = 8976, upload-time = "2026-04-09T04:22:31.693Z" }
sdist = { url = "https://files.pythonhosted.org/packages/79/50/d255c0e068679d7b9441d9408424ddf9e1f35620548e121003b3660af526/types_greenlet-3.5.0.20260428.tar.gz", hash = "sha256:6c188f5e9c5775d50bd00780a3eb1fb3cde17c396cf9703e3d417936e9e7a082", size = 9003, upload-time = "2026-04-28T05:19:43.062Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/4f/3f/c8a4d8782f78fccb4b5fe91c5eae2efce6648072754bc7096b1e3b5407ad/types_greenlet-3.4.0.20260409-py3-none-any.whl", hash = "sha256:cbceadb4594eccd95b57b3f7fa8a9b851488f5e6c05026f4a3db9aac02ec8333", size = 8812, upload-time = "2026-04-09T04:22:30.734Z" },
{ url = "https://files.pythonhosted.org/packages/30/e5/5ff280f02392ced53cb5e866b660b492b4245b1395a61e57d2a6dc02977b/types_greenlet-3.5.0.20260428-py3-none-any.whl", hash = "sha256:7b0f23ce84ee93474d4aa8058920f0578181e11431be92ce9a4ad4123de2c41b", size = 8809, upload-time = "2026-04-28T05:19:41.976Z" },
]
[[package]]
@ -6764,11 +6764,11 @@ wheels = [
[[package]]
name = "types-protobuf"
version = "7.34.1.20260408"
version = "7.34.1.20260503"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/5b/b1/4521e68c2cc17703d80eb42796751345376dd4c706f84007ef5e7c707774/types_protobuf-7.34.1.20260408.tar.gz", hash = "sha256:e2c0a0430e08c75b52671a6f0035abfdcc791aad12af16274282de1b721758ab", size = 68835, upload-time = "2026-04-08T04:26:43.613Z" }
sdist = { url = "https://files.pythonhosted.org/packages/a0/31/87969cb3e62287bde7598b78b3c098d2873d54f5fb5a7cfbcaa73b8c965e/types_protobuf-7.34.1.20260503.tar.gz", hash = "sha256:effbc819aa17e02448dde99f089c6794662d66f4b2797e922f185ffe0b24e766", size = 68830, upload-time = "2026-05-03T05:19:50.739Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/ef/b5/0bc9874d89c58fb0ce851e150055ce732d254dbb10b06becbc7635d0d635/types_protobuf-7.34.1.20260408-py3-none-any.whl", hash = "sha256:ebbcd4e27b145aef6a59bc0cb6c013b3528151c1ba5e7f7337aeee355d276a5e", size = 86012, upload-time = "2026-04-08T04:26:42.566Z" },
{ url = "https://files.pythonhosted.org/packages/f9/67/a33fb18090a927794a5ee4b1a30730b528ace0dad6b18932540d21258184/types_protobuf-7.34.1.20260503-py3-none-any.whl", hash = "sha256:75fd66121d56785c91828b8bf7b511f39ba847f11e682573e41847f01e9cd1de", size = 86019, upload-time = "2026-05-03T05:19:49.486Z" },
]
[[package]]

View File

@ -21,7 +21,7 @@ services:
# API service
api:
image: langgenius/dify-api:1.13.3
image: langgenius/dify-api:1.14.0
restart: always
environment:
# Use the shared environment variables.
@ -69,7 +69,7 @@ services:
# worker service
# The Celery worker for processing all queues (dataset, workflow, mail, etc.)
worker:
image: langgenius/dify-api:1.13.3
image: langgenius/dify-api:1.14.0
restart: always
environment:
# Use the shared environment variables.
@ -115,7 +115,7 @@ services:
# worker_beat service
# Celery beat for scheduling periodic tasks.
worker_beat:
image: langgenius/dify-api:1.13.3
image: langgenius/dify-api:1.14.0
restart: always
environment:
# Use the shared environment variables.
@ -152,7 +152,7 @@ services:
# Frontend web application.
web:
image: langgenius/dify-web:1.13.3
image: langgenius/dify-web:1.14.0
restart: always
environment:
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
@ -268,7 +268,7 @@ services:
# The DifySandbox
sandbox:
image: langgenius/dify-sandbox:0.2.14
image: langgenius/dify-sandbox:0.2.15
restart: always
environment:
# The DifySandbox configurations
@ -292,7 +292,7 @@ services:
# plugin daemon
plugin_daemon:
image: langgenius/dify-plugin-daemon:0.5.3-local
image: langgenius/dify-plugin-daemon:0.6.0-local
restart: always
environment:
# Use the shared environment variables.

View File

@ -59,19 +59,25 @@ services:
- ${MYSQL_HOST_VOLUME:-./volumes/mysql/data}:/var/lib/mysql
ports:
- "${EXPOSE_MYSQL_PORT:-3306}:3306"
# mysqladmin ping passes during mysql:8.0's TCP-listening stage even while
# the server is still finalising init, leading to "Lost connection during
# query" on the first real query. Verify with a real SELECT instead.
healthcheck:
test:
[
"CMD",
"mysqladmin",
"ping",
"-u",
"root",
"mysql",
"-h",
"127.0.0.1",
"-uroot",
"-p${DB_PASSWORD:-difyai123456}",
"-e",
"SELECT 1",
]
interval: 1s
timeout: 3s
retries: 30
start_period: 20s
# The redis cache.
redis:
@ -97,7 +103,7 @@ services:
# The DifySandbox
sandbox:
image: langgenius/dify-sandbox:0.2.14
image: langgenius/dify-sandbox:0.2.15
restart: always
env_file:
- ./middleware.env
@ -123,7 +129,7 @@ services:
# plugin daemon
plugin_daemon:
image: langgenius/dify-plugin-daemon:0.5.3-local
image: langgenius/dify-plugin-daemon:0.6.0-local
restart: always
env_file:
- ./middleware.env

View File

@ -745,7 +745,7 @@ services:
# API service
api:
image: langgenius/dify-api:1.13.3
image: langgenius/dify-api:1.14.0
restart: always
environment:
# Use the shared environment variables.
@ -793,7 +793,7 @@ services:
# worker service
# The Celery worker for processing all queues (dataset, workflow, mail, etc.)
worker:
image: langgenius/dify-api:1.13.3
image: langgenius/dify-api:1.14.0
restart: always
environment:
# Use the shared environment variables.
@ -839,7 +839,7 @@ services:
# worker_beat service
# Celery beat for scheduling periodic tasks.
worker_beat:
image: langgenius/dify-api:1.13.3
image: langgenius/dify-api:1.14.0
restart: always
environment:
# Use the shared environment variables.
@ -876,7 +876,7 @@ services:
# Frontend web application.
web:
image: langgenius/dify-web:1.13.3
image: langgenius/dify-web:1.14.0
restart: always
environment:
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
@ -992,7 +992,7 @@ services:
# The DifySandbox
sandbox:
image: langgenius/dify-sandbox:0.2.14
image: langgenius/dify-sandbox:0.2.15
restart: always
environment:
# The DifySandbox configurations
@ -1016,7 +1016,7 @@ services:
# plugin daemon
plugin_daemon:
image: langgenius/dify-plugin-daemon:0.5.3-local
image: langgenius/dify-plugin-daemon:0.6.0-local
restart: always
environment:
# Use the shared environment variables.

View File

@ -17,3 +17,10 @@ Feature: Share app publicly
Given a workflow app has been published and shared via API
When I open the shared app URL
Then the shared app page should be accessible
@unauthenticated
Scenario: Run a shared workflow app without authentication
Given a workflow app has been published and shared via API
When I open the shared app URL
And I run the shared workflow app
Then the shared workflow run should succeed

View File

@ -37,3 +37,15 @@ Then('the shared app page should be accessible', async function (this: DifyWorld
await expect(this.getPage()).toHaveURL(/\/(workflow|chat)\/[a-zA-Z0-9]+/, { timeout: 15_000 })
await expect(this.getPage().locator('body')).toBeVisible({ timeout: 10_000 })
})
When('I run the shared workflow app', async function (this: DifyWorld) {
const page = this.getPage()
const runButton = page.getByTestId('run-button')
await expect(runButton).toBeEnabled({ timeout: 15_000 })
await runButton.click()
})
Then('the shared workflow run should succeed', async function (this: DifyWorld) {
await expect(this.getPage().getByTestId('status-icon-success')).toBeVisible({ timeout: 55_000 })
})

View File

@ -12,8 +12,10 @@ Given('a minimal runnable workflow draft has been synced', async function (this:
When('I run the workflow', async function (this: DifyWorld) {
const page = this.getPage()
await page.getByText('Test Run').click()
await expect(page.getByText('Running').first()).toBeVisible({ timeout: 15_000 })
const testRunButton = page.getByText('Test Run')
await expect(testRunButton).toBeVisible({ timeout: 15_000 })
await testRunButton.click()
})
Then('the workflow run should succeed', async function (this: DifyWorld) {

File diff suppressed because it is too large Load Diff

View File

@ -2,11 +2,12 @@
"name": "dify",
"type": "module",
"private": true,
"packageManager": "pnpm@10.33.2",
"packageManager": "pnpm@11.0.0",
"engines": {
"node": "^22.22.1"
},
"scripts": {
"dev": "concurrently -k -n vinext,proxy \"vp run dify-web#dev:vinext\" \"vp run dify-web#dev:proxy\"",
"prepare": "vp config",
"type-check": "vp run -r type-check",
"lint": "eslint --cache --concurrency=auto",
@ -16,6 +17,7 @@
},
"devDependencies": {
"@antfu/eslint-config": "catalog:",
"concurrently": "catalog:",
"eslint": "catalog:",
"eslint-markdown": "catalog:",
"eslint-plugin-markdown-preferences": "catalog:",

View File

@ -0,0 +1,378 @@
// This file is auto-generated by @hey-api/openapi-ts
import { oc } from '@orpc/contract'
import * as z from 'zod'
import {
zGetAccountAvatarQuery,
zGetAccountAvatarResponse,
zGetAccountDeleteVerifyResponse,
zGetAccountEducationAutocompleteQuery,
zGetAccountEducationAutocompleteResponse,
zGetAccountEducationResponse,
zGetAccountEducationVerifyResponse,
zGetAccountIntegratesResponse,
zGetAccountProfileResponse,
zPostAccountAvatarBody,
zPostAccountAvatarResponse,
zPostAccountChangeEmailBody,
zPostAccountChangeEmailCheckEmailUniqueBody,
zPostAccountChangeEmailCheckEmailUniqueResponse,
zPostAccountChangeEmailResetBody,
zPostAccountChangeEmailResetResponse,
zPostAccountChangeEmailResponse,
zPostAccountChangeEmailValidityBody,
zPostAccountChangeEmailValidityResponse,
zPostAccountDeleteBody,
zPostAccountDeleteFeedbackBody,
zPostAccountDeleteFeedbackResponse,
zPostAccountDeleteResponse,
zPostAccountEducationBody,
zPostAccountEducationResponse,
zPostAccountInitBody,
zPostAccountInitResponse,
zPostAccountInterfaceLanguageBody,
zPostAccountInterfaceLanguageResponse,
zPostAccountInterfaceThemeBody,
zPostAccountInterfaceThemeResponse,
zPostAccountNameBody,
zPostAccountNameResponse,
zPostAccountPasswordBody,
zPostAccountPasswordResponse,
zPostAccountTimezoneBody,
zPostAccountTimezoneResponse,
} from './zod.gen'
/**
* Get account avatar url
*/
export const get = oc
.route({
description: 'Get account avatar url',
inputStructure: 'detailed',
method: 'GET',
operationId: 'getAccountAvatar',
path: '/account/avatar',
tags: ['console'],
})
.input(z.object({ query: zGetAccountAvatarQuery }))
.output(zGetAccountAvatarResponse)
export const post = oc
.route({
inputStructure: 'detailed',
method: 'POST',
operationId: 'postAccountAvatar',
path: '/account/avatar',
tags: ['console'],
})
.input(z.object({ body: zPostAccountAvatarBody }))
.output(zPostAccountAvatarResponse)
export const avatar = {
get,
post,
}
export const post2 = oc
.route({
inputStructure: 'detailed',
method: 'POST',
operationId: 'postAccountChangeEmailCheckEmailUnique',
path: '/account/change-email/check-email-unique',
tags: ['console'],
})
.input(z.object({ body: zPostAccountChangeEmailCheckEmailUniqueBody }))
.output(zPostAccountChangeEmailCheckEmailUniqueResponse)
export const checkEmailUnique = {
post: post2,
}
export const post3 = oc
.route({
inputStructure: 'detailed',
method: 'POST',
operationId: 'postAccountChangeEmailReset',
path: '/account/change-email/reset',
tags: ['console'],
})
.input(z.object({ body: zPostAccountChangeEmailResetBody }))
.output(zPostAccountChangeEmailResetResponse)
export const reset = {
post: post3,
}
export const post4 = oc
.route({
inputStructure: 'detailed',
method: 'POST',
operationId: 'postAccountChangeEmailValidity',
path: '/account/change-email/validity',
tags: ['console'],
})
.input(z.object({ body: zPostAccountChangeEmailValidityBody }))
.output(zPostAccountChangeEmailValidityResponse)
export const validity = {
post: post4,
}
export const post5 = oc
.route({
inputStructure: 'detailed',
method: 'POST',
operationId: 'postAccountChangeEmail',
path: '/account/change-email',
tags: ['console'],
})
.input(z.object({ body: zPostAccountChangeEmailBody }))
.output(zPostAccountChangeEmailResponse)
export const changeEmail = {
post: post5,
checkEmailUnique,
reset,
validity,
}
export const post6 = oc
.route({
inputStructure: 'detailed',
method: 'POST',
operationId: 'postAccountDeleteFeedback',
path: '/account/delete/feedback',
tags: ['console'],
})
.input(z.object({ body: zPostAccountDeleteFeedbackBody }))
.output(zPostAccountDeleteFeedbackResponse)
export const feedback = {
post: post6,
}
export const get2 = oc
.route({
inputStructure: 'detailed',
method: 'GET',
operationId: 'getAccountDeleteVerify',
path: '/account/delete/verify',
tags: ['console'],
})
.output(zGetAccountDeleteVerifyResponse)
export const verify = {
get: get2,
}
export const post7 = oc
.route({
inputStructure: 'detailed',
method: 'POST',
operationId: 'postAccountDelete',
path: '/account/delete',
tags: ['console'],
})
.input(z.object({ body: zPostAccountDeleteBody }))
.output(zPostAccountDeleteResponse)
export const delete_ = {
post: post7,
feedback,
verify,
}
export const get3 = oc
.route({
inputStructure: 'detailed',
method: 'GET',
operationId: 'getAccountEducationAutocomplete',
path: '/account/education/autocomplete',
tags: ['console'],
})
.input(z.object({ query: zGetAccountEducationAutocompleteQuery }))
.output(zGetAccountEducationAutocompleteResponse)
export const autocomplete = {
get: get3,
}
export const get4 = oc
.route({
inputStructure: 'detailed',
method: 'GET',
operationId: 'getAccountEducationVerify',
path: '/account/education/verify',
tags: ['console'],
})
.output(zGetAccountEducationVerifyResponse)
export const verify2 = {
get: get4,
}
export const get5 = oc
.route({
inputStructure: 'detailed',
method: 'GET',
operationId: 'getAccountEducation',
path: '/account/education',
tags: ['console'],
})
.output(zGetAccountEducationResponse)
export const post8 = oc
.route({
inputStructure: 'detailed',
method: 'POST',
operationId: 'postAccountEducation',
path: '/account/education',
tags: ['console'],
})
.input(z.object({ body: zPostAccountEducationBody }))
.output(zPostAccountEducationResponse)
export const education = {
get: get5,
post: post8,
autocomplete,
verify: verify2,
}
export const post9 = oc
.route({
inputStructure: 'detailed',
method: 'POST',
operationId: 'postAccountInit',
path: '/account/init',
tags: ['console'],
})
.input(z.object({ body: zPostAccountInitBody }))
.output(zPostAccountInitResponse)
export const init = {
post: post9,
}
export const get6 = oc
.route({
inputStructure: 'detailed',
method: 'GET',
operationId: 'getAccountIntegrates',
path: '/account/integrates',
tags: ['console'],
})
.output(zGetAccountIntegratesResponse)
export const integrates = {
get: get6,
}
export const post10 = oc
.route({
inputStructure: 'detailed',
method: 'POST',
operationId: 'postAccountInterfaceLanguage',
path: '/account/interface-language',
tags: ['console'],
})
.input(z.object({ body: zPostAccountInterfaceLanguageBody }))
.output(zPostAccountInterfaceLanguageResponse)
export const interfaceLanguage = {
post: post10,
}
export const post11 = oc
.route({
inputStructure: 'detailed',
method: 'POST',
operationId: 'postAccountInterfaceTheme',
path: '/account/interface-theme',
tags: ['console'],
})
.input(z.object({ body: zPostAccountInterfaceThemeBody }))
.output(zPostAccountInterfaceThemeResponse)
export const interfaceTheme = {
post: post11,
}
export const post12 = oc
.route({
inputStructure: 'detailed',
method: 'POST',
operationId: 'postAccountName',
path: '/account/name',
tags: ['console'],
})
.input(z.object({ body: zPostAccountNameBody }))
.output(zPostAccountNameResponse)
export const name = {
post: post12,
}
export const post13 = oc
.route({
inputStructure: 'detailed',
method: 'POST',
operationId: 'postAccountPassword',
path: '/account/password',
tags: ['console'],
})
.input(z.object({ body: zPostAccountPasswordBody }))
.output(zPostAccountPasswordResponse)
export const password = {
post: post13,
}
export const get7 = oc
.route({
inputStructure: 'detailed',
method: 'GET',
operationId: 'getAccountProfile',
path: '/account/profile',
tags: ['console'],
})
.output(zGetAccountProfileResponse)
export const profile = {
get: get7,
}
export const post14 = oc
.route({
inputStructure: 'detailed',
method: 'POST',
operationId: 'postAccountTimezone',
path: '/account/timezone',
tags: ['console'],
})
.input(z.object({ body: zPostAccountTimezoneBody }))
.output(zPostAccountTimezoneResponse)
export const timezone = {
post: post14,
}
export const account = {
avatar,
changeEmail,
delete: delete_,
education,
init,
integrates,
interfaceLanguage,
interfaceTheme,
name,
password,
profile,
timezone,
}
export const contract = {
account,
}

View File

@ -0,0 +1,429 @@
// This file is auto-generated by @hey-api/openapi-ts
export type ClientOptions = {
baseUrl: `${string}://${string}/console/api` | (string & {})
}
export type AccountAvatarPayload = {
avatar: string
}
export type Account = {
avatar?: string | null
created_at?: number | null
email: string
id: string
interface_language?: string | null
interface_theme?: string | null
is_password_set: boolean
last_login_at?: number | null
last_login_ip?: string | null
name: string
timezone?: string | null
}
export type ChangeEmailSendPayload = {
email: string
language?: string | null
phase?: string | null
token?: string | null
}
export type CheckEmailUniquePayload = {
email: string
}
export type ChangeEmailResetPayload = {
new_email: string
token: string
}
export type ChangeEmailValidityPayload = {
code: string
email: string
token: string
}
export type AccountDeletePayload = {
code: string
token: string
}
export type AccountDeletionFeedbackPayload = {
email: string
feedback: string
}
export type EducationStatusResponse = {
allow_refresh?: boolean | null
expire_at?: number | null
is_student?: boolean | null
result?: boolean | null
}
export type EducationActivatePayload = {
institution: string
role: string
token: string
}
export type EducationAutocompleteResponse = {
curr_page?: number | null
data?: Array<string>
has_next?: boolean | null
}
export type EducationVerifyResponse = {
token?: string | null
}
export type AccountInitPayload = {
interface_language: string
invitation_code?: string | null
timezone: string
}
export type AccountIntegrateListResponse = {
data: Array<AccountIntegrateResponse>
}
export type AccountInterfaceLanguagePayload = {
interface_language: string
}
export type AccountInterfaceThemePayload = {
interface_theme: 'light' | 'dark'
}
export type AccountNamePayload = {
name: string
}
export type AccountPasswordPayload = {
new_password: string
password?: string | null
repeat_new_password: string
}
export type AccountTimezonePayload = {
timezone: string
}
export type AccountIntegrateResponse = {
created_at?: number | null
is_bound: boolean
link?: string | null
provider: string
}
export type GetAccountAvatarData = {
body?: never
path?: never
query: {
avatar: string
}
url: '/account/avatar'
}
export type GetAccountAvatarResponses = {
200: {
[key: string]: unknown
}
}
export type GetAccountAvatarResponse = GetAccountAvatarResponses[keyof GetAccountAvatarResponses]
export type PostAccountAvatarData = {
body: AccountAvatarPayload
path?: never
query?: never
url: '/account/avatar'
}
export type PostAccountAvatarResponses = {
200: Account
}
export type PostAccountAvatarResponse = PostAccountAvatarResponses[keyof PostAccountAvatarResponses]
export type PostAccountChangeEmailData = {
body: ChangeEmailSendPayload
path?: never
query?: never
url: '/account/change-email'
}
export type PostAccountChangeEmailResponses = {
200: {
[key: string]: unknown
}
}
export type PostAccountChangeEmailResponse
= PostAccountChangeEmailResponses[keyof PostAccountChangeEmailResponses]
export type PostAccountChangeEmailCheckEmailUniqueData = {
body: CheckEmailUniquePayload
path?: never
query?: never
url: '/account/change-email/check-email-unique'
}
export type PostAccountChangeEmailCheckEmailUniqueResponses = {
200: {
[key: string]: unknown
}
}
export type PostAccountChangeEmailCheckEmailUniqueResponse
= PostAccountChangeEmailCheckEmailUniqueResponses[keyof PostAccountChangeEmailCheckEmailUniqueResponses]
export type PostAccountChangeEmailResetData = {
body: ChangeEmailResetPayload
path?: never
query?: never
url: '/account/change-email/reset'
}
export type PostAccountChangeEmailResetResponses = {
200: Account
}
export type PostAccountChangeEmailResetResponse
= PostAccountChangeEmailResetResponses[keyof PostAccountChangeEmailResetResponses]
export type PostAccountChangeEmailValidityData = {
body: ChangeEmailValidityPayload
path?: never
query?: never
url: '/account/change-email/validity'
}
export type PostAccountChangeEmailValidityResponses = {
200: {
[key: string]: unknown
}
}
export type PostAccountChangeEmailValidityResponse
= PostAccountChangeEmailValidityResponses[keyof PostAccountChangeEmailValidityResponses]
export type PostAccountDeleteData = {
body: AccountDeletePayload
path?: never
query?: never
url: '/account/delete'
}
export type PostAccountDeleteResponses = {
200: {
[key: string]: unknown
}
}
export type PostAccountDeleteResponse = PostAccountDeleteResponses[keyof PostAccountDeleteResponses]
export type PostAccountDeleteFeedbackData = {
body: AccountDeletionFeedbackPayload
path?: never
query?: never
url: '/account/delete/feedback'
}
export type PostAccountDeleteFeedbackResponses = {
200: {
[key: string]: unknown
}
}
export type PostAccountDeleteFeedbackResponse
= PostAccountDeleteFeedbackResponses[keyof PostAccountDeleteFeedbackResponses]
export type GetAccountDeleteVerifyData = {
body?: never
path?: never
query?: never
url: '/account/delete/verify'
}
export type GetAccountDeleteVerifyResponses = {
200: {
[key: string]: unknown
}
}
export type GetAccountDeleteVerifyResponse
= GetAccountDeleteVerifyResponses[keyof GetAccountDeleteVerifyResponses]
export type GetAccountEducationData = {
body?: never
path?: never
query?: never
url: '/account/education'
}
export type GetAccountEducationResponses = {
200: EducationStatusResponse
}
export type GetAccountEducationResponse
= GetAccountEducationResponses[keyof GetAccountEducationResponses]
export type PostAccountEducationData = {
body: EducationActivatePayload
path?: never
query?: never
url: '/account/education'
}
export type PostAccountEducationResponses = {
200: {
[key: string]: unknown
}
}
export type PostAccountEducationResponse
= PostAccountEducationResponses[keyof PostAccountEducationResponses]
export type GetAccountEducationAutocompleteData = {
body?: never
path?: never
query: {
keywords: string
limit?: number
page?: number
}
url: '/account/education/autocomplete'
}
export type GetAccountEducationAutocompleteResponses = {
200: EducationAutocompleteResponse
}
export type GetAccountEducationAutocompleteResponse
= GetAccountEducationAutocompleteResponses[keyof GetAccountEducationAutocompleteResponses]
export type GetAccountEducationVerifyData = {
body?: never
path?: never
query?: never
url: '/account/education/verify'
}
export type GetAccountEducationVerifyResponses = {
200: EducationVerifyResponse
}
export type GetAccountEducationVerifyResponse
= GetAccountEducationVerifyResponses[keyof GetAccountEducationVerifyResponses]
export type PostAccountInitData = {
body: AccountInitPayload
path?: never
query?: never
url: '/account/init'
}
export type PostAccountInitResponses = {
200: {
[key: string]: unknown
}
}
export type PostAccountInitResponse = PostAccountInitResponses[keyof PostAccountInitResponses]
export type GetAccountIntegratesData = {
body?: never
path?: never
query?: never
url: '/account/integrates'
}
export type GetAccountIntegratesResponses = {
200: AccountIntegrateListResponse
}
export type GetAccountIntegratesResponse
= GetAccountIntegratesResponses[keyof GetAccountIntegratesResponses]
export type PostAccountInterfaceLanguageData = {
body: AccountInterfaceLanguagePayload
path?: never
query?: never
url: '/account/interface-language'
}
export type PostAccountInterfaceLanguageResponses = {
200: Account
}
export type PostAccountInterfaceLanguageResponse
= PostAccountInterfaceLanguageResponses[keyof PostAccountInterfaceLanguageResponses]
export type PostAccountInterfaceThemeData = {
body: AccountInterfaceThemePayload
path?: never
query?: never
url: '/account/interface-theme'
}
export type PostAccountInterfaceThemeResponses = {
200: Account
}
export type PostAccountInterfaceThemeResponse
= PostAccountInterfaceThemeResponses[keyof PostAccountInterfaceThemeResponses]
export type PostAccountNameData = {
body: AccountNamePayload
path?: never
query?: never
url: '/account/name'
}
export type PostAccountNameResponses = {
200: Account
}
export type PostAccountNameResponse = PostAccountNameResponses[keyof PostAccountNameResponses]
export type PostAccountPasswordData = {
body: AccountPasswordPayload
path?: never
query?: never
url: '/account/password'
}
export type PostAccountPasswordResponses = {
200: Account
}
export type PostAccountPasswordResponse
= PostAccountPasswordResponses[keyof PostAccountPasswordResponses]
export type GetAccountProfileData = {
body?: never
path?: never
query?: never
url: '/account/profile'
}
export type GetAccountProfileResponses = {
200: Account
}
export type GetAccountProfileResponse = GetAccountProfileResponses[keyof GetAccountProfileResponses]
export type PostAccountTimezoneData = {
body: AccountTimezonePayload
path?: never
query?: never
url: '/account/timezone'
}
export type PostAccountTimezoneResponses = {
200: Account
}
export type PostAccountTimezoneResponse
= PostAccountTimezoneResponses[keyof PostAccountTimezoneResponses]

View File

@ -0,0 +1,318 @@
// This file is auto-generated by @hey-api/openapi-ts
import * as z from 'zod'
/**
* AccountAvatarPayload
*/
export const zAccountAvatarPayload = z.object({
avatar: z.string(),
})
/**
* Account
*/
export const zAccount = z.object({
avatar: z.string().nullish(),
created_at: z.int().nullish(),
email: z.string(),
id: z.string(),
interface_language: z.string().nullish(),
interface_theme: z.string().nullish(),
is_password_set: z.boolean(),
last_login_at: z.int().nullish(),
last_login_ip: z.string().nullish(),
name: z.string(),
timezone: z.string().nullish(),
})
/**
* ChangeEmailSendPayload
*/
export const zChangeEmailSendPayload = z.object({
email: z.string(),
language: z.string().nullish(),
phase: z.string().nullish(),
token: z.string().nullish(),
})
/**
* CheckEmailUniquePayload
*/
export const zCheckEmailUniquePayload = z.object({
email: z.string(),
})
/**
* ChangeEmailResetPayload
*/
export const zChangeEmailResetPayload = z.object({
new_email: z.string(),
token: z.string(),
})
/**
* ChangeEmailValidityPayload
*/
export const zChangeEmailValidityPayload = z.object({
code: z.string(),
email: z.string(),
token: z.string(),
})
/**
* AccountDeletePayload
*/
export const zAccountDeletePayload = z.object({
code: z.string(),
token: z.string(),
})
/**
* AccountDeletionFeedbackPayload
*/
export const zAccountDeletionFeedbackPayload = z.object({
email: z.string(),
feedback: z.string(),
})
/**
* EducationStatusResponse
*/
export const zEducationStatusResponse = z.object({
allow_refresh: z.boolean().nullish(),
expire_at: z.int().nullish(),
is_student: z.boolean().nullish(),
result: z.boolean().nullish(),
})
/**
* EducationActivatePayload
*/
export const zEducationActivatePayload = z.object({
institution: z.string(),
role: z.string(),
token: z.string(),
})
/**
* EducationAutocompleteResponse
*/
export const zEducationAutocompleteResponse = z.object({
curr_page: z.int().nullish(),
data: z.array(z.string()).optional(),
has_next: z.boolean().nullish(),
})
/**
* EducationVerifyResponse
*/
export const zEducationVerifyResponse = z.object({
token: z.string().nullish(),
})
/**
* AccountInitPayload
*/
export const zAccountInitPayload = z.object({
interface_language: z.string(),
invitation_code: z.string().nullish(),
timezone: z.string(),
})
/**
* AccountInterfaceLanguagePayload
*/
export const zAccountInterfaceLanguagePayload = z.object({
interface_language: z.string(),
})
/**
* AccountInterfaceThemePayload
*/
export const zAccountInterfaceThemePayload = z.object({
interface_theme: z.enum(['light', 'dark']),
})
/**
* AccountNamePayload
*/
export const zAccountNamePayload = z.object({
name: z.string().min(3).max(30),
})
/**
* AccountPasswordPayload
*/
export const zAccountPasswordPayload = z.object({
new_password: z.string(),
password: z.string().nullish(),
repeat_new_password: z.string(),
})
/**
* AccountTimezonePayload
*/
export const zAccountTimezonePayload = z.object({
timezone: z.string(),
})
/**
* AccountIntegrateResponse
*/
export const zAccountIntegrateResponse = z.object({
created_at: z.int().nullish(),
is_bound: z.boolean(),
link: z.string().nullish(),
provider: z.string(),
})
/**
* AccountIntegrateListResponse
*/
export const zAccountIntegrateListResponse = z.object({
data: z.array(zAccountIntegrateResponse),
})
export const zGetAccountAvatarQuery = z.object({
avatar: z.string(),
})
/**
* Success
*/
export const zGetAccountAvatarResponse = z.record(z.string(), z.unknown())
export const zPostAccountAvatarBody = zAccountAvatarPayload
/**
* Success
*/
export const zPostAccountAvatarResponse = zAccount
export const zPostAccountChangeEmailBody = zChangeEmailSendPayload
/**
* Success
*/
export const zPostAccountChangeEmailResponse = z.record(z.string(), z.unknown())
export const zPostAccountChangeEmailCheckEmailUniqueBody = zCheckEmailUniquePayload
/**
* Success
*/
export const zPostAccountChangeEmailCheckEmailUniqueResponse = z.record(z.string(), z.unknown())
export const zPostAccountChangeEmailResetBody = zChangeEmailResetPayload
/**
* Success
*/
export const zPostAccountChangeEmailResetResponse = zAccount
export const zPostAccountChangeEmailValidityBody = zChangeEmailValidityPayload
/**
* Success
*/
export const zPostAccountChangeEmailValidityResponse = z.record(z.string(), z.unknown())
export const zPostAccountDeleteBody = zAccountDeletePayload
/**
* Success
*/
export const zPostAccountDeleteResponse = z.record(z.string(), z.unknown())
export const zPostAccountDeleteFeedbackBody = zAccountDeletionFeedbackPayload
/**
* Success
*/
export const zPostAccountDeleteFeedbackResponse = z.record(z.string(), z.unknown())
/**
* Success
*/
export const zGetAccountDeleteVerifyResponse = z.record(z.string(), z.unknown())
/**
* Success
*/
export const zGetAccountEducationResponse = zEducationStatusResponse
export const zPostAccountEducationBody = zEducationActivatePayload
/**
* Success
*/
export const zPostAccountEducationResponse = z.record(z.string(), z.unknown())
export const zGetAccountEducationAutocompleteQuery = z.object({
keywords: z.string(),
limit: z.int().optional().default(20),
page: z.int().optional().default(0),
})
/**
* Success
*/
export const zGetAccountEducationAutocompleteResponse = zEducationAutocompleteResponse
/**
* Success
*/
export const zGetAccountEducationVerifyResponse = zEducationVerifyResponse
export const zPostAccountInitBody = zAccountInitPayload
/**
* Success
*/
export const zPostAccountInitResponse = z.record(z.string(), z.unknown())
/**
* Success
*/
export const zGetAccountIntegratesResponse = zAccountIntegrateListResponse
export const zPostAccountInterfaceLanguageBody = zAccountInterfaceLanguagePayload
/**
* Success
*/
export const zPostAccountInterfaceLanguageResponse = zAccount
export const zPostAccountInterfaceThemeBody = zAccountInterfaceThemePayload
/**
* Success
*/
export const zPostAccountInterfaceThemeResponse = zAccount
export const zPostAccountNameBody = zAccountNamePayload
/**
* Success
*/
export const zPostAccountNameResponse = zAccount
export const zPostAccountPasswordBody = zAccountPasswordPayload
/**
* Success
*/
export const zPostAccountPasswordResponse = zAccount
/**
* Success
*/
export const zGetAccountProfileResponse = zAccount
export const zPostAccountTimezoneBody = zAccountTimezonePayload
/**
* Success
*/
export const zPostAccountTimezoneResponse = zAccount

View File

@ -0,0 +1,54 @@
// This file is auto-generated by @hey-api/openapi-ts
import { oc } from '@orpc/contract'
import * as z from 'zod'
import {
zGetActivateCheckQuery,
zGetActivateCheckResponse,
zPostActivateBody,
zPostActivateResponse,
} from './zod.gen'
/**
* Check if activation token is valid
*/
export const get = oc
.route({
description: 'Check if activation token is valid',
inputStructure: 'detailed',
method: 'GET',
operationId: 'getActivateCheck',
path: '/activate/check',
tags: ['console'],
})
.input(z.object({ query: zGetActivateCheckQuery }))
.output(zGetActivateCheckResponse)
export const check = {
get,
}
/**
* Activate account with invitation token
*/
export const post = oc
.route({
description: 'Activate account with invitation token',
inputStructure: 'detailed',
method: 'POST',
operationId: 'postActivate',
path: '/activate',
tags: ['console'],
})
.input(z.object({ body: zPostActivateBody }))
.output(zPostActivateResponse)
export const activate = {
post,
check,
}
export const contract = {
activate,
}

View File

@ -0,0 +1,63 @@
// This file is auto-generated by @hey-api/openapi-ts
export type ClientOptions = {
baseUrl: `${string}://${string}/console/api` | (string & {})
}
export type ActivatePayload = {
email?: string | null
interface_language: string
name: string
timezone: string
token: string
workspace_id?: string | null
}
export type ActivationResponse = {
result: string
}
export type ActivationCheckResponse = {
data?: {
[key: string]: unknown
} | null
is_valid: boolean
}
export type PostActivateData = {
body: ActivatePayload
path?: never
query?: never
url: '/activate'
}
export type PostActivateErrors = {
400: {
[key: string]: unknown
}
}
export type PostActivateError = PostActivateErrors[keyof PostActivateErrors]
export type PostActivateResponses = {
200: ActivationResponse
}
export type PostActivateResponse = PostActivateResponses[keyof PostActivateResponses]
export type GetActivateCheckData = {
body?: never
path?: never
query: {
email?: string | null
token: string
workspace_id?: string | null
}
url: '/activate/check'
}
export type GetActivateCheckResponses = {
200: ActivationCheckResponse
}
export type GetActivateCheckResponse = GetActivateCheckResponses[keyof GetActivateCheckResponses]

View File

@ -0,0 +1,48 @@
// This file is auto-generated by @hey-api/openapi-ts
import * as z from 'zod'
/**
* ActivatePayload
*/
export const zActivatePayload = z.object({
email: z.string().nullish(),
interface_language: z.string(),
name: z.string().max(30),
timezone: z.string(),
token: z.string(),
workspace_id: z.string().nullish(),
})
/**
* ActivationResponse
*/
export const zActivationResponse = z.object({
result: z.string(),
})
/**
* ActivationCheckResponse
*/
export const zActivationCheckResponse = z.object({
data: z.record(z.string(), z.unknown()).nullish(),
is_valid: z.boolean(),
})
export const zPostActivateBody = zActivatePayload
/**
* Account activated successfully
*/
export const zPostActivateResponse = zActivationResponse
export const zGetActivateCheckQuery = z.object({
email: z.string().nullish(),
token: z.string(),
workspace_id: z.string().nullish(),
})
/**
* Success
*/
export const zGetActivateCheckResponse = zActivationCheckResponse

View File

@ -0,0 +1,153 @@
// This file is auto-generated by @hey-api/openapi-ts
import { oc } from '@orpc/contract'
import * as z from 'zod'
import {
zDeleteAdminDeleteExploreBannerByBannerIdPath,
zDeleteAdminDeleteExploreBannerByBannerIdResponse,
zDeleteAdminInsertExploreAppsByAppIdPath,
zDeleteAdminInsertExploreAppsByAppIdResponse,
zPostAdminBatchAddNotificationAccountsResponse,
zPostAdminInsertExploreAppsBody,
zPostAdminInsertExploreAppsResponse,
zPostAdminInsertExploreBannerBody,
zPostAdminInsertExploreBannerResponse,
zPostAdminUpsertNotificationBody,
zPostAdminUpsertNotificationResponse,
} from './zod.gen'
/**
* Register target accounts for a notification by email address. JSON body: {"notification_id": "...", "user_email": ["a@example.com", ...]}. File upload: multipart/form-data with a 'file' field (CSV or TXT, one email per line) plus a 'notification_id' field. Emails that do not match any account are silently skipped.
*/
export const post = oc
.route({
description:
'Register target accounts for a notification by email address. JSON body: {"notification_id": "...", "user_email": ["a@example.com", ...]}. File upload: multipart/form-data with a \'file\' field (CSV or TXT, one email per line) plus a \'notification_id\' field. Emails that do not match any account are silently skipped.',
inputStructure: 'detailed',
method: 'POST',
operationId: 'postAdminBatchAddNotificationAccounts',
path: '/admin/batch_add_notification_accounts',
tags: ['console'],
})
.output(zPostAdminBatchAddNotificationAccountsResponse)
export const batchAddNotificationAccounts = {
post,
}
/**
* Delete an explore banner
*/
export const delete_ = oc
.route({
description: 'Delete an explore banner',
inputStructure: 'detailed',
method: 'DELETE',
operationId: 'deleteAdminDeleteExploreBannerByBannerId',
path: '/admin/delete-explore-banner/{banner_id}',
successStatus: 204,
tags: ['console'],
})
.input(z.object({ params: zDeleteAdminDeleteExploreBannerByBannerIdPath }))
.output(zDeleteAdminDeleteExploreBannerByBannerIdResponse)
export const byBannerId = {
delete: delete_,
}
export const deleteExploreBanner = {
byBannerId,
}
/**
* Remove an app from the explore list
*/
export const delete2 = oc
.route({
description: 'Remove an app from the explore list',
inputStructure: 'detailed',
method: 'DELETE',
operationId: 'deleteAdminInsertExploreAppsByAppId',
path: '/admin/insert-explore-apps/{app_id}',
successStatus: 204,
tags: ['console'],
})
.input(z.object({ params: zDeleteAdminInsertExploreAppsByAppIdPath }))
.output(zDeleteAdminInsertExploreAppsByAppIdResponse)
export const byAppId = {
delete: delete2,
}
/**
* Insert or update an app in the explore list
*/
export const post2 = oc
.route({
description: 'Insert or update an app in the explore list',
inputStructure: 'detailed',
method: 'POST',
operationId: 'postAdminInsertExploreApps',
path: '/admin/insert-explore-apps',
tags: ['console'],
})
.input(z.object({ body: zPostAdminInsertExploreAppsBody }))
.output(zPostAdminInsertExploreAppsResponse)
export const insertExploreApps = {
post: post2,
byAppId,
}
/**
* Insert an explore banner
*/
export const post3 = oc
.route({
description: 'Insert an explore banner',
inputStructure: 'detailed',
method: 'POST',
operationId: 'postAdminInsertExploreBanner',
path: '/admin/insert-explore-banner',
successStatus: 201,
tags: ['console'],
})
.input(z.object({ body: zPostAdminInsertExploreBannerBody }))
.output(zPostAdminInsertExploreBannerResponse)
export const insertExploreBanner = {
post: post3,
}
/**
* Create or update an in-product notification. Supply notification_id to update an existing one; omit it to create a new one. Pass at least one language variant in contents (zh / en / jp).
*/
export const post4 = oc
.route({
description:
'Create or update an in-product notification. Supply notification_id to update an existing one; omit it to create a new one. Pass at least one language variant in contents (zh / en / jp).',
inputStructure: 'detailed',
method: 'POST',
operationId: 'postAdminUpsertNotification',
path: '/admin/upsert_notification',
tags: ['console'],
})
.input(z.object({ body: zPostAdminUpsertNotificationBody }))
.output(zPostAdminUpsertNotificationResponse)
export const upsertNotification = {
post: post4,
}
export const admin = {
batchAddNotificationAccounts,
deleteExploreBanner,
insertExploreApps,
insertExploreBanner,
upsertNotification,
}
export const contract = {
admin,
}

View File

@ -0,0 +1,157 @@
// This file is auto-generated by @hey-api/openapi-ts
export type ClientOptions = {
baseUrl: `${string}://${string}/console/api` | (string & {})
}
export type InsertExploreAppPayload = {
app_id: string
can_trial?: boolean
category: string
copyright?: string | null
custom_disclaimer?: string | null
desc?: string | null
language: string
position: number
privacy_policy?: string | null
trial_limit?: number
}
export type InsertExploreBannerPayload = {
'category': string
'description': string
'img-src': string
'language'?: string
'link': string
'sort': number
'title': string
}
export type UpsertNotificationPayload = {
contents: Array<LangContentPayload>
end_time?: string | null
frequency?: string
notification_id?: string | null
start_time?: string | null
status?: string
}
export type LangContentPayload = {
body: string
lang: string
subtitle?: string | null
title: string
title_pic_url?: string | null
}
export type PostAdminBatchAddNotificationAccountsData = {
body?: never
path?: never
query?: never
url: '/admin/batch_add_notification_accounts'
}
export type PostAdminBatchAddNotificationAccountsResponses = {
200: {
[key: string]: unknown
}
}
export type PostAdminBatchAddNotificationAccountsResponse
= PostAdminBatchAddNotificationAccountsResponses[keyof PostAdminBatchAddNotificationAccountsResponses]
export type DeleteAdminDeleteExploreBannerByBannerIdData = {
body?: never
path: {
banner_id: string
}
query?: never
url: '/admin/delete-explore-banner/{banner_id}'
}
export type DeleteAdminDeleteExploreBannerByBannerIdResponses = {
204: {
[key: string]: unknown
}
}
export type DeleteAdminDeleteExploreBannerByBannerIdResponse
= DeleteAdminDeleteExploreBannerByBannerIdResponses[keyof DeleteAdminDeleteExploreBannerByBannerIdResponses]
export type PostAdminInsertExploreAppsData = {
body: InsertExploreAppPayload
path?: never
query?: never
url: '/admin/insert-explore-apps'
}
export type PostAdminInsertExploreAppsErrors = {
404: {
[key: string]: unknown
}
}
export type PostAdminInsertExploreAppsError
= PostAdminInsertExploreAppsErrors[keyof PostAdminInsertExploreAppsErrors]
export type PostAdminInsertExploreAppsResponses = {
200: {
[key: string]: unknown
}
201: {
[key: string]: unknown
}
}
export type PostAdminInsertExploreAppsResponse
= PostAdminInsertExploreAppsResponses[keyof PostAdminInsertExploreAppsResponses]
export type DeleteAdminInsertExploreAppsByAppIdData = {
body?: never
path: {
app_id: string
}
query?: never
url: '/admin/insert-explore-apps/{app_id}'
}
export type DeleteAdminInsertExploreAppsByAppIdResponses = {
204: {
[key: string]: unknown
}
}
export type DeleteAdminInsertExploreAppsByAppIdResponse
= DeleteAdminInsertExploreAppsByAppIdResponses[keyof DeleteAdminInsertExploreAppsByAppIdResponses]
export type PostAdminInsertExploreBannerData = {
body: InsertExploreBannerPayload
path?: never
query?: never
url: '/admin/insert-explore-banner'
}
export type PostAdminInsertExploreBannerResponses = {
201: {
[key: string]: unknown
}
}
export type PostAdminInsertExploreBannerResponse
= PostAdminInsertExploreBannerResponses[keyof PostAdminInsertExploreBannerResponses]
export type PostAdminUpsertNotificationData = {
body: UpsertNotificationPayload
path?: never
query?: never
url: '/admin/upsert_notification'
}
export type PostAdminUpsertNotificationResponses = {
200: {
[key: string]: unknown
}
}
export type PostAdminUpsertNotificationResponse
= PostAdminUpsertNotificationResponses[keyof PostAdminUpsertNotificationResponses]

View File

@ -0,0 +1,99 @@
// This file is auto-generated by @hey-api/openapi-ts
import * as z from 'zod'
/**
* InsertExploreAppPayload
*/
export const zInsertExploreAppPayload = z.object({
app_id: z.string(),
can_trial: z.boolean().optional().default(false),
category: z.string(),
copyright: z.string().nullish(),
custom_disclaimer: z.string().nullish(),
desc: z.string().nullish(),
language: z.string(),
position: z.int(),
privacy_policy: z.string().nullish(),
trial_limit: z.int().optional().default(0),
})
/**
* InsertExploreBannerPayload
*/
export const zInsertExploreBannerPayload = z.object({
'category': z.string(),
'description': z.string(),
'img-src': z.string(),
'language': z.string().optional().default('en-US'),
'link': z.string(),
'sort': z.int(),
'title': z.string(),
})
/**
* LangContentPayload
*/
export const zLangContentPayload = z.object({
body: z.string(),
lang: z.string(),
subtitle: z.string().nullish(),
title: z.string(),
title_pic_url: z.string().nullish(),
})
/**
* UpsertNotificationPayload
*/
export const zUpsertNotificationPayload = z.object({
contents: z.array(zLangContentPayload).min(1),
end_time: z.string().nullish(),
frequency: z.string().optional().default('once'),
notification_id: z.string().nullish(),
start_time: z.string().nullish(),
status: z.string().optional().default('active'),
})
/**
* Accounts added successfully
*/
export const zPostAdminBatchAddNotificationAccountsResponse = z.record(z.string(), z.unknown())
export const zDeleteAdminDeleteExploreBannerByBannerIdPath = z.object({
banner_id: z.string(),
})
/**
* Banner deleted successfully
*/
export const zDeleteAdminDeleteExploreBannerByBannerIdResponse = z.record(z.string(), z.unknown())
export const zPostAdminInsertExploreAppsBody = zInsertExploreAppPayload
export const zPostAdminInsertExploreAppsResponse = z.union([
z.record(z.string(), z.unknown()),
z.record(z.string(), z.unknown()),
])
export const zDeleteAdminInsertExploreAppsByAppIdPath = z.object({
app_id: z.string(),
})
/**
* App removed successfully
*/
export const zDeleteAdminInsertExploreAppsByAppIdResponse = z.record(z.string(), z.unknown())
export const zPostAdminInsertExploreBannerBody = zInsertExploreBannerPayload
/**
* Banner inserted successfully
*/
export const zPostAdminInsertExploreBannerResponse = z.record(z.string(), z.unknown())
export const zPostAdminUpsertNotificationBody = zUpsertNotificationPayload
/**
* Notification upserted successfully
*/
export const zPostAdminUpsertNotificationResponse = z.record(z.string(), z.unknown())

View File

@ -0,0 +1,25 @@
// This file is auto-generated by @hey-api/openapi-ts
import { oc } from '@orpc/contract'
import * as z from 'zod'
import { zGetAllWorkspacesQuery, zGetAllWorkspacesResponse } from './zod.gen'
export const get = oc
.route({
inputStructure: 'detailed',
method: 'GET',
operationId: 'getAllWorkspaces',
path: '/all-workspaces',
tags: ['console'],
})
.input(z.object({ query: zGetAllWorkspacesQuery.optional() }))
.output(zGetAllWorkspacesResponse)
export const allWorkspaces = {
get,
}
export const contract = {
allWorkspaces,
}

View File

@ -0,0 +1,23 @@
// This file is auto-generated by @hey-api/openapi-ts
export type ClientOptions = {
baseUrl: `${string}://${string}/console/api` | (string & {})
}
export type GetAllWorkspacesData = {
body?: never
path?: never
query?: {
limit?: number
page?: number
}
url: '/all-workspaces'
}
export type GetAllWorkspacesResponses = {
200: {
[key: string]: unknown
}
}
export type GetAllWorkspacesResponse = GetAllWorkspacesResponses[keyof GetAllWorkspacesResponses]

View File

@ -0,0 +1,13 @@
// This file is auto-generated by @hey-api/openapi-ts
import * as z from 'zod'
export const zGetAllWorkspacesQuery = z.object({
limit: z.int().gte(1).lte(100).optional().default(20),
page: z.int().gte(1).lte(99999).optional().default(1),
})
/**
* Success
*/
export const zGetAllWorkspacesResponse = z.record(z.string(), z.unknown())

View File

@ -0,0 +1,109 @@
// This file is auto-generated by @hey-api/openapi-ts
import { oc } from '@orpc/contract'
import * as z from 'zod'
import {
zDeleteApiBasedExtensionByIdPath,
zDeleteApiBasedExtensionByIdResponse,
zGetApiBasedExtensionByIdPath,
zGetApiBasedExtensionByIdResponse,
zGetApiBasedExtensionResponse,
zPostApiBasedExtensionBody,
zPostApiBasedExtensionByIdBody,
zPostApiBasedExtensionByIdPath,
zPostApiBasedExtensionByIdResponse,
zPostApiBasedExtensionResponse,
} from './zod.gen'
/**
* Delete API-based extension
*/
export const delete_ = oc
.route({
description: 'Delete API-based extension',
inputStructure: 'detailed',
method: 'DELETE',
operationId: 'deleteApiBasedExtensionById',
path: '/api-based-extension/{id}',
successStatus: 204,
tags: ['console'],
})
.input(z.object({ params: zDeleteApiBasedExtensionByIdPath }))
.output(zDeleteApiBasedExtensionByIdResponse)
/**
* Get API-based extension by ID
*/
export const get = oc
.route({
description: 'Get API-based extension by ID',
inputStructure: 'detailed',
method: 'GET',
operationId: 'getApiBasedExtensionById',
path: '/api-based-extension/{id}',
tags: ['console'],
})
.input(z.object({ params: zGetApiBasedExtensionByIdPath }))
.output(zGetApiBasedExtensionByIdResponse)
/**
* Update API-based extension
*/
export const post = oc
.route({
description: 'Update API-based extension',
inputStructure: 'detailed',
method: 'POST',
operationId: 'postApiBasedExtensionById',
path: '/api-based-extension/{id}',
tags: ['console'],
})
.input(z.object({ body: zPostApiBasedExtensionByIdBody, params: zPostApiBasedExtensionByIdPath }))
.output(zPostApiBasedExtensionByIdResponse)
export const byId = {
delete: delete_,
get,
post,
}
/**
* Get all API-based extensions for current tenant
*/
export const get2 = oc
.route({
description: 'Get all API-based extensions for current tenant',
inputStructure: 'detailed',
method: 'GET',
operationId: 'getApiBasedExtension',
path: '/api-based-extension',
tags: ['console'],
})
.output(zGetApiBasedExtensionResponse)
/**
* Create a new API-based extension
*/
export const post2 = oc
.route({
description: 'Create a new API-based extension',
inputStructure: 'detailed',
method: 'POST',
operationId: 'postApiBasedExtension',
path: '/api-based-extension',
successStatus: 201,
tags: ['console'],
})
.input(z.object({ body: zPostApiBasedExtensionBody }))
.output(zPostApiBasedExtensionResponse)
export const apiBasedExtension = {
get: get2,
post: post2,
byId,
}
export const contract = {
apiBasedExtension,
}

View File

@ -0,0 +1,99 @@
// This file is auto-generated by @hey-api/openapi-ts
export type ClientOptions = {
baseUrl: `${string}://${string}/console/api` | (string & {})
}
export type ApiBasedExtensionListResponse = Array<ApiBasedExtensionResponse>
export type ApiBasedExtensionPayload = {
api_endpoint: string
api_key: string
name: string
}
export type ApiBasedExtensionResponse = {
api_endpoint: string
api_key: string
created_at?: number | null
id: string
name: string
}
export type GetApiBasedExtensionData = {
body?: never
path?: never
query?: never
url: '/api-based-extension'
}
export type GetApiBasedExtensionResponses = {
200: ApiBasedExtensionListResponse
}
export type GetApiBasedExtensionResponse
= GetApiBasedExtensionResponses[keyof GetApiBasedExtensionResponses]
export type PostApiBasedExtensionData = {
body: ApiBasedExtensionPayload
path?: never
query?: never
url: '/api-based-extension'
}
export type PostApiBasedExtensionResponses = {
201: ApiBasedExtensionResponse
}
export type PostApiBasedExtensionResponse
= PostApiBasedExtensionResponses[keyof PostApiBasedExtensionResponses]
export type DeleteApiBasedExtensionByIdData = {
body?: never
path: {
id: string
}
query?: never
url: '/api-based-extension/{id}'
}
export type DeleteApiBasedExtensionByIdResponses = {
204: {
[key: string]: unknown
}
}
export type DeleteApiBasedExtensionByIdResponse
= DeleteApiBasedExtensionByIdResponses[keyof DeleteApiBasedExtensionByIdResponses]
export type GetApiBasedExtensionByIdData = {
body?: never
path: {
id: string
}
query?: never
url: '/api-based-extension/{id}'
}
export type GetApiBasedExtensionByIdResponses = {
200: ApiBasedExtensionResponse
}
export type GetApiBasedExtensionByIdResponse
= GetApiBasedExtensionByIdResponses[keyof GetApiBasedExtensionByIdResponses]
export type PostApiBasedExtensionByIdData = {
body: ApiBasedExtensionPayload
path: {
id: string
}
query?: never
url: '/api-based-extension/{id}'
}
export type PostApiBasedExtensionByIdResponses = {
200: ApiBasedExtensionResponse
}
export type PostApiBasedExtensionByIdResponse
= PostApiBasedExtensionByIdResponses[keyof PostApiBasedExtensionByIdResponses]

View File

@ -0,0 +1,66 @@
// This file is auto-generated by @hey-api/openapi-ts
import * as z from 'zod'
/**
* APIBasedExtensionPayload
*/
export const zApiBasedExtensionPayload = z.object({
api_endpoint: z.string(),
api_key: z.string(),
name: z.string(),
})
/**
* APIBasedExtensionResponse
*/
export const zApiBasedExtensionResponse = z.object({
api_endpoint: z.string(),
api_key: z.string(),
created_at: z.int().nullish(),
id: z.string(),
name: z.string(),
})
export const zApiBasedExtensionListResponse = z.array(zApiBasedExtensionResponse)
/**
* Success
*/
export const zGetApiBasedExtensionResponse = zApiBasedExtensionListResponse
export const zPostApiBasedExtensionBody = zApiBasedExtensionPayload
/**
* Extension created successfully
*/
export const zPostApiBasedExtensionResponse = zApiBasedExtensionResponse
export const zDeleteApiBasedExtensionByIdPath = z.object({
id: z.string(),
})
/**
* Extension deleted successfully
*/
export const zDeleteApiBasedExtensionByIdResponse = z.record(z.string(), z.unknown())
export const zGetApiBasedExtensionByIdPath = z.object({
id: z.string(),
})
/**
* Success
*/
export const zGetApiBasedExtensionByIdResponse = zApiBasedExtensionResponse
export const zPostApiBasedExtensionByIdBody = zApiBasedExtensionPayload
export const zPostApiBasedExtensionByIdPath = z.object({
id: z.string(),
})
/**
* Extension updated successfully
*/
export const zPostApiBasedExtensionByIdResponse = zApiBasedExtensionResponse

View File

@ -0,0 +1,66 @@
// This file is auto-generated by @hey-api/openapi-ts
import { oc } from '@orpc/contract'
import * as z from 'zod'
import {
zDeleteApiKeyAuthDataSourceByBindingIdPath,
zDeleteApiKeyAuthDataSourceByBindingIdResponse,
zGetApiKeyAuthDataSourceResponse,
zPostApiKeyAuthDataSourceBindingBody,
zPostApiKeyAuthDataSourceBindingResponse,
} from './zod.gen'
export const post = oc
.route({
inputStructure: 'detailed',
method: 'POST',
operationId: 'postApiKeyAuthDataSourceBinding',
path: '/api-key-auth/data-source/binding',
tags: ['console'],
})
.input(z.object({ body: zPostApiKeyAuthDataSourceBindingBody }))
.output(zPostApiKeyAuthDataSourceBindingResponse)
export const binding = {
post,
}
export const delete_ = oc
.route({
inputStructure: 'detailed',
method: 'DELETE',
operationId: 'deleteApiKeyAuthDataSourceByBindingId',
path: '/api-key-auth/data-source/{binding_id}',
tags: ['console'],
})
.input(z.object({ params: zDeleteApiKeyAuthDataSourceByBindingIdPath }))
.output(zDeleteApiKeyAuthDataSourceByBindingIdResponse)
export const byBindingId = {
delete: delete_,
}
export const get = oc
.route({
inputStructure: 'detailed',
method: 'GET',
operationId: 'getApiKeyAuthDataSource',
path: '/api-key-auth/data-source',
tags: ['console'],
})
.output(zGetApiKeyAuthDataSourceResponse)
export const dataSource = {
get,
binding,
byBindingId,
}
export const apiKeyAuth = {
dataSource,
}
export const contract = {
apiKeyAuth,
}

View File

@ -0,0 +1,63 @@
// This file is auto-generated by @hey-api/openapi-ts
export type ClientOptions = {
baseUrl: `${string}://${string}/console/api` | (string & {})
}
export type ApiKeyAuthBindingPayload = {
category: string
credentials: {
[key: string]: unknown
}
provider: string
}
export type GetApiKeyAuthDataSourceData = {
body?: never
path?: never
query?: never
url: '/api-key-auth/data-source'
}
export type GetApiKeyAuthDataSourceResponses = {
200: {
[key: string]: unknown
}
}
export type GetApiKeyAuthDataSourceResponse
= GetApiKeyAuthDataSourceResponses[keyof GetApiKeyAuthDataSourceResponses]
export type PostApiKeyAuthDataSourceBindingData = {
body: ApiKeyAuthBindingPayload
path?: never
query?: never
url: '/api-key-auth/data-source/binding'
}
export type PostApiKeyAuthDataSourceBindingResponses = {
200: {
[key: string]: unknown
}
}
export type PostApiKeyAuthDataSourceBindingResponse
= PostApiKeyAuthDataSourceBindingResponses[keyof PostApiKeyAuthDataSourceBindingResponses]
export type DeleteApiKeyAuthDataSourceByBindingIdData = {
body?: never
path: {
binding_id: string
}
query?: never
url: '/api-key-auth/data-source/{binding_id}'
}
export type DeleteApiKeyAuthDataSourceByBindingIdResponses = {
200: {
[key: string]: unknown
}
}
export type DeleteApiKeyAuthDataSourceByBindingIdResponse
= DeleteApiKeyAuthDataSourceByBindingIdResponses[keyof DeleteApiKeyAuthDataSourceByBindingIdResponses]

View File

@ -0,0 +1,33 @@
// This file is auto-generated by @hey-api/openapi-ts
import * as z from 'zod'
/**
* ApiKeyAuthBindingPayload
*/
export const zApiKeyAuthBindingPayload = z.object({
category: z.string(),
credentials: z.record(z.string(), z.unknown()),
provider: z.string(),
})
/**
* Success
*/
export const zGetApiKeyAuthDataSourceResponse = z.record(z.string(), z.unknown())
export const zPostApiKeyAuthDataSourceBindingBody = zApiKeyAuthBindingPayload
/**
* Success
*/
export const zPostApiKeyAuthDataSourceBindingResponse = z.record(z.string(), z.unknown())
export const zDeleteApiKeyAuthDataSourceByBindingIdPath = z.object({
binding_id: z.string(),
})
/**
* Success
*/
export const zDeleteApiKeyAuthDataSourceByBindingIdResponse = z.record(z.string(), z.unknown())

View File

@ -0,0 +1,33 @@
// This file is auto-generated by @hey-api/openapi-ts
import { oc } from '@orpc/contract'
import * as z from 'zod'
import { zGetAppPromptTemplatesQuery, zGetAppPromptTemplatesResponse } from './zod.gen'
/**
* Get advanced prompt templates based on app mode and model configuration
*/
export const get = oc
.route({
description: 'Get advanced prompt templates based on app mode and model configuration',
inputStructure: 'detailed',
method: 'GET',
operationId: 'getAppPromptTemplates',
path: '/app/prompt-templates',
tags: ['console'],
})
.input(z.object({ query: zGetAppPromptTemplatesQuery }))
.output(zGetAppPromptTemplatesResponse)
export const promptTemplates = {
get,
}
export const app = {
promptTemplates,
}
export const contract = {
app,
}

View File

@ -0,0 +1,35 @@
// This file is auto-generated by @hey-api/openapi-ts
export type ClientOptions = {
baseUrl: `${string}://${string}/console/api` | (string & {})
}
export type GetAppPromptTemplatesData = {
body?: never
path?: never
query: {
app_mode: string
has_context?: string
model_mode: string
model_name: string
}
url: '/app/prompt-templates'
}
export type GetAppPromptTemplatesErrors = {
400: {
[key: string]: unknown
}
}
export type GetAppPromptTemplatesError
= GetAppPromptTemplatesErrors[keyof GetAppPromptTemplatesErrors]
export type GetAppPromptTemplatesResponses = {
200: Array<{
[key: string]: unknown
}>
}
export type GetAppPromptTemplatesResponse
= GetAppPromptTemplatesResponses[keyof GetAppPromptTemplatesResponses]

View File

@ -0,0 +1,15 @@
// This file is auto-generated by @hey-api/openapi-ts
import * as z from 'zod'
export const zGetAppPromptTemplatesQuery = z.object({
app_mode: z.string(),
has_context: z.string().optional().default('true'),
model_mode: z.string(),
model_name: z.string(),
})
/**
* Prompt templates retrieved successfully
*/
export const zGetAppPromptTemplatesResponse = z.array(z.record(z.string(), z.unknown()))

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,226 @@
// This file is auto-generated by @hey-api/openapi-ts
import { oc } from '@orpc/contract'
import * as z from 'zod'
import {
zDeleteAuthPluginDatasourceByProviderIdCustomClientPath,
zDeleteAuthPluginDatasourceByProviderIdCustomClientResponse,
zGetAuthPluginDatasourceByProviderIdPath,
zGetAuthPluginDatasourceByProviderIdResponse,
zGetAuthPluginDatasourceDefaultListResponse,
zGetAuthPluginDatasourceListResponse,
zPostAuthPluginDatasourceByProviderIdBody,
zPostAuthPluginDatasourceByProviderIdCustomClientBody,
zPostAuthPluginDatasourceByProviderIdCustomClientPath,
zPostAuthPluginDatasourceByProviderIdCustomClientResponse,
zPostAuthPluginDatasourceByProviderIdDefaultBody,
zPostAuthPluginDatasourceByProviderIdDefaultPath,
zPostAuthPluginDatasourceByProviderIdDefaultResponse,
zPostAuthPluginDatasourceByProviderIdDeleteBody,
zPostAuthPluginDatasourceByProviderIdDeletePath,
zPostAuthPluginDatasourceByProviderIdDeleteResponse,
zPostAuthPluginDatasourceByProviderIdPath,
zPostAuthPluginDatasourceByProviderIdResponse,
zPostAuthPluginDatasourceByProviderIdUpdateBody,
zPostAuthPluginDatasourceByProviderIdUpdateNameBody,
zPostAuthPluginDatasourceByProviderIdUpdateNamePath,
zPostAuthPluginDatasourceByProviderIdUpdateNameResponse,
zPostAuthPluginDatasourceByProviderIdUpdatePath,
zPostAuthPluginDatasourceByProviderIdUpdateResponse,
} from './zod.gen'
export const get = oc
.route({
inputStructure: 'detailed',
method: 'GET',
operationId: 'getAuthPluginDatasourceDefaultList',
path: '/auth/plugin/datasource/default-list',
tags: ['console'],
})
.output(zGetAuthPluginDatasourceDefaultListResponse)
export const defaultList = {
get,
}
export const get2 = oc
.route({
inputStructure: 'detailed',
method: 'GET',
operationId: 'getAuthPluginDatasourceList',
path: '/auth/plugin/datasource/list',
tags: ['console'],
})
.output(zGetAuthPluginDatasourceListResponse)
export const list = {
get: get2,
}
export const delete_ = oc
.route({
inputStructure: 'detailed',
method: 'DELETE',
operationId: 'deleteAuthPluginDatasourceByProviderIdCustomClient',
path: '/auth/plugin/datasource/{provider_id}/custom-client',
tags: ['console'],
})
.input(z.object({ params: zDeleteAuthPluginDatasourceByProviderIdCustomClientPath }))
.output(zDeleteAuthPluginDatasourceByProviderIdCustomClientResponse)
export const post = oc
.route({
inputStructure: 'detailed',
method: 'POST',
operationId: 'postAuthPluginDatasourceByProviderIdCustomClient',
path: '/auth/plugin/datasource/{provider_id}/custom-client',
tags: ['console'],
})
.input(
z.object({
body: zPostAuthPluginDatasourceByProviderIdCustomClientBody,
params: zPostAuthPluginDatasourceByProviderIdCustomClientPath,
}),
)
.output(zPostAuthPluginDatasourceByProviderIdCustomClientResponse)
export const customClient = {
delete: delete_,
post,
}
export const post2 = oc
.route({
inputStructure: 'detailed',
method: 'POST',
operationId: 'postAuthPluginDatasourceByProviderIdDefault',
path: '/auth/plugin/datasource/{provider_id}/default',
tags: ['console'],
})
.input(
z.object({
body: zPostAuthPluginDatasourceByProviderIdDefaultBody,
params: zPostAuthPluginDatasourceByProviderIdDefaultPath,
}),
)
.output(zPostAuthPluginDatasourceByProviderIdDefaultResponse)
export const default_ = {
post: post2,
}
export const post3 = oc
.route({
inputStructure: 'detailed',
method: 'POST',
operationId: 'postAuthPluginDatasourceByProviderIdDelete',
path: '/auth/plugin/datasource/{provider_id}/delete',
tags: ['console'],
})
.input(
z.object({
body: zPostAuthPluginDatasourceByProviderIdDeleteBody,
params: zPostAuthPluginDatasourceByProviderIdDeletePath,
}),
)
.output(zPostAuthPluginDatasourceByProviderIdDeleteResponse)
export const delete2 = {
post: post3,
}
export const post4 = oc
.route({
inputStructure: 'detailed',
method: 'POST',
operationId: 'postAuthPluginDatasourceByProviderIdUpdate',
path: '/auth/plugin/datasource/{provider_id}/update',
tags: ['console'],
})
.input(
z.object({
body: zPostAuthPluginDatasourceByProviderIdUpdateBody,
params: zPostAuthPluginDatasourceByProviderIdUpdatePath,
}),
)
.output(zPostAuthPluginDatasourceByProviderIdUpdateResponse)
export const update = {
post: post4,
}
export const post5 = oc
.route({
inputStructure: 'detailed',
method: 'POST',
operationId: 'postAuthPluginDatasourceByProviderIdUpdateName',
path: '/auth/plugin/datasource/{provider_id}/update-name',
tags: ['console'],
})
.input(
z.object({
body: zPostAuthPluginDatasourceByProviderIdUpdateNameBody,
params: zPostAuthPluginDatasourceByProviderIdUpdateNamePath,
}),
)
.output(zPostAuthPluginDatasourceByProviderIdUpdateNameResponse)
export const updateName = {
post: post5,
}
export const get3 = oc
.route({
inputStructure: 'detailed',
method: 'GET',
operationId: 'getAuthPluginDatasourceByProviderId',
path: '/auth/plugin/datasource/{provider_id}',
tags: ['console'],
})
.input(z.object({ params: zGetAuthPluginDatasourceByProviderIdPath }))
.output(zGetAuthPluginDatasourceByProviderIdResponse)
export const post6 = oc
.route({
inputStructure: 'detailed',
method: 'POST',
operationId: 'postAuthPluginDatasourceByProviderId',
path: '/auth/plugin/datasource/{provider_id}',
tags: ['console'],
})
.input(
z.object({
body: zPostAuthPluginDatasourceByProviderIdBody,
params: zPostAuthPluginDatasourceByProviderIdPath,
}),
)
.output(zPostAuthPluginDatasourceByProviderIdResponse)
export const byProviderId = {
get: get3,
post: post6,
customClient,
default: default_,
delete: delete2,
update,
updateName,
}
export const datasource = {
defaultList,
list,
byProviderId,
}
export const plugin = {
datasource,
}
export const auth = {
plugin,
}
export const contract = {
auth,
}

Some files were not shown because too many files have changed in this diff Show More