mirror of
https://github.com/langgenius/dify.git
synced 2026-04-15 09:57:03 +08:00
Merge remote-tracking branch 'myori/main' into feat/collaboration2
This commit is contained in:
commit
5cac278366
82
.github/scripts/generate-i18n-changes.mjs
vendored
Normal file
82
.github/scripts/generate-i18n-changes.mjs
vendored
Normal file
@ -0,0 +1,82 @@
|
||||
import { execFileSync } from 'node:child_process'
|
||||
import fs from 'node:fs'
|
||||
import path from 'node:path'
|
||||
|
||||
const repoRoot = process.cwd()
|
||||
const baseSha = process.env.BASE_SHA || ''
|
||||
const headSha = process.env.HEAD_SHA || ''
|
||||
const files = (process.env.CHANGED_FILES || '').split(/\s+/).filter(Boolean)
|
||||
const outputPath = process.env.I18N_CHANGES_OUTPUT_PATH || '/tmp/i18n-changes.json'
|
||||
|
||||
const englishPath = fileStem => path.join(repoRoot, 'web', 'i18n', 'en-US', `${fileStem}.json`)
|
||||
|
||||
const readCurrentJson = (fileStem) => {
|
||||
const filePath = englishPath(fileStem)
|
||||
if (!fs.existsSync(filePath))
|
||||
return null
|
||||
|
||||
return JSON.parse(fs.readFileSync(filePath, 'utf8'))
|
||||
}
|
||||
|
||||
const readBaseJson = (fileStem) => {
|
||||
if (!baseSha)
|
||||
return null
|
||||
|
||||
try {
|
||||
const relativePath = `web/i18n/en-US/${fileStem}.json`
|
||||
const content = execFileSync('git', ['show', `${baseSha}:${relativePath}`], { encoding: 'utf8' })
|
||||
return JSON.parse(content)
|
||||
}
|
||||
catch {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
const compareJson = (beforeValue, afterValue) => JSON.stringify(beforeValue) === JSON.stringify(afterValue)
|
||||
|
||||
const changes = {}
|
||||
|
||||
for (const fileStem of files) {
|
||||
const currentJson = readCurrentJson(fileStem)
|
||||
const beforeJson = readBaseJson(fileStem) || {}
|
||||
const afterJson = currentJson || {}
|
||||
const added = {}
|
||||
const updated = {}
|
||||
const deleted = []
|
||||
|
||||
for (const [key, value] of Object.entries(afterJson)) {
|
||||
if (!(key in beforeJson)) {
|
||||
added[key] = value
|
||||
continue
|
||||
}
|
||||
|
||||
if (!compareJson(beforeJson[key], value)) {
|
||||
updated[key] = {
|
||||
before: beforeJson[key],
|
||||
after: value,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const key of Object.keys(beforeJson)) {
|
||||
if (!(key in afterJson))
|
||||
deleted.push(key)
|
||||
}
|
||||
|
||||
changes[fileStem] = {
|
||||
fileDeleted: currentJson === null,
|
||||
added,
|
||||
updated,
|
||||
deleted,
|
||||
}
|
||||
}
|
||||
|
||||
fs.writeFileSync(
|
||||
outputPath,
|
||||
JSON.stringify({
|
||||
baseSha,
|
||||
headSha,
|
||||
files,
|
||||
changes,
|
||||
})
|
||||
)
|
||||
101
.github/workflows/translate-i18n-claude.yml
vendored
101
.github/workflows/translate-i18n-claude.yml
vendored
@ -68,89 +68,7 @@ jobs:
|
||||
" web/i18n-config/languages.ts | sed 's/[[:space:]]*$//')
|
||||
|
||||
generate_changes_json() {
|
||||
node <<'NODE'
|
||||
const { execFileSync } = require('node:child_process')
|
||||
const fs = require('node:fs')
|
||||
const path = require('node:path')
|
||||
|
||||
const repoRoot = process.cwd()
|
||||
const baseSha = process.env.BASE_SHA || ''
|
||||
const headSha = process.env.HEAD_SHA || ''
|
||||
const files = (process.env.CHANGED_FILES || '').split(/\s+/).filter(Boolean)
|
||||
|
||||
const englishPath = fileStem => path.join(repoRoot, 'web', 'i18n', 'en-US', `${fileStem}.json`)
|
||||
|
||||
const readCurrentJson = (fileStem) => {
|
||||
const filePath = englishPath(fileStem)
|
||||
if (!fs.existsSync(filePath))
|
||||
return null
|
||||
|
||||
return JSON.parse(fs.readFileSync(filePath, 'utf8'))
|
||||
}
|
||||
|
||||
const readBaseJson = (fileStem) => {
|
||||
if (!baseSha)
|
||||
return null
|
||||
|
||||
try {
|
||||
const relativePath = `web/i18n/en-US/${fileStem}.json`
|
||||
const content = execFileSync('git', ['show', `${baseSha}:${relativePath}`], { encoding: 'utf8' })
|
||||
return JSON.parse(content)
|
||||
}
|
||||
catch (error) {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
const compareJson = (beforeValue, afterValue) => JSON.stringify(beforeValue) === JSON.stringify(afterValue)
|
||||
|
||||
const changes = {}
|
||||
|
||||
for (const fileStem of files) {
|
||||
const currentJson = readCurrentJson(fileStem)
|
||||
const beforeJson = readBaseJson(fileStem) || {}
|
||||
const afterJson = currentJson || {}
|
||||
const added = {}
|
||||
const updated = {}
|
||||
const deleted = []
|
||||
|
||||
for (const [key, value] of Object.entries(afterJson)) {
|
||||
if (!(key in beforeJson)) {
|
||||
added[key] = value
|
||||
continue
|
||||
}
|
||||
|
||||
if (!compareJson(beforeJson[key], value)) {
|
||||
updated[key] = {
|
||||
before: beforeJson[key],
|
||||
after: value,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const key of Object.keys(beforeJson)) {
|
||||
if (!(key in afterJson))
|
||||
deleted.push(key)
|
||||
}
|
||||
|
||||
changes[fileStem] = {
|
||||
fileDeleted: currentJson === null,
|
||||
added,
|
||||
updated,
|
||||
deleted,
|
||||
}
|
||||
}
|
||||
|
||||
fs.writeFileSync(
|
||||
'/tmp/i18n-changes.json',
|
||||
JSON.stringify({
|
||||
baseSha,
|
||||
headSha,
|
||||
files,
|
||||
changes,
|
||||
})
|
||||
)
|
||||
NODE
|
||||
node .github/scripts/generate-i18n-changes.mjs
|
||||
}
|
||||
|
||||
if [ "${{ github.event_name }}" = "repository_dispatch" ]; then
|
||||
@ -270,7 +188,7 @@ jobs:
|
||||
Tool rules:
|
||||
- Use Read for repository files.
|
||||
- Use Edit for JSON updates.
|
||||
- Use Bash only for `pnpm`.
|
||||
- Use Bash only for `vp`.
|
||||
- Do not use Bash for `git`, `gh`, or branch management.
|
||||
|
||||
Required execution plan:
|
||||
@ -292,7 +210,7 @@ jobs:
|
||||
- Read the current English JSON file for any file that still exists so wording, placeholders, and surrounding terminology stay accurate.
|
||||
- If `Structured change set available` is `false`, treat this as a scoped full sync and use the current English files plus scoped checks as the source of truth.
|
||||
4. Run a scoped pre-check before editing:
|
||||
- `pnpm --dir ${{ github.workspace }}/web run i18n:check ${{ steps.context.outputs.FILE_ARGS }} ${{ steps.context.outputs.LANG_ARGS }}`
|
||||
- `vp run dify-web#i18n:check ${{ steps.context.outputs.FILE_ARGS }} ${{ steps.context.outputs.LANG_ARGS }}`
|
||||
- Use this command as the source of truth for missing and extra keys inside the current scope.
|
||||
5. Apply translations.
|
||||
- For every target language and scoped file:
|
||||
@ -300,19 +218,19 @@ jobs:
|
||||
- If the locale file does not exist yet, create it with `Write` and then continue with `Edit` as needed.
|
||||
- ADD missing keys.
|
||||
- UPDATE stale translations when the English value changed.
|
||||
- DELETE removed keys. Prefer `pnpm --dir ${{ github.workspace }}/web run i18n:check ${{ steps.context.outputs.FILE_ARGS }} ${{ steps.context.outputs.LANG_ARGS }} --auto-remove` for extra keys so deletions stay in scope.
|
||||
- DELETE removed keys. Prefer `vp run dify-web#i18n:check ${{ steps.context.outputs.FILE_ARGS }} ${{ steps.context.outputs.LANG_ARGS }} --auto-remove` for extra keys so deletions stay in scope.
|
||||
- Preserve placeholders exactly: `{{variable}}`, `${variable}`, HTML tags, component tags, and variable names.
|
||||
- Match the existing terminology and register used by each locale.
|
||||
- Prefer one Edit per file when stable, but prioritize correctness over batching.
|
||||
6. Verify only the edited files.
|
||||
- Run `pnpm --dir ${{ github.workspace }}/web lint:fix --quiet -- <relative edited i18n file paths>`
|
||||
- Run `pnpm --dir ${{ github.workspace }}/web run i18n:check ${{ steps.context.outputs.FILE_ARGS }} ${{ steps.context.outputs.LANG_ARGS }}`
|
||||
- Run `vp run dify-web#lint:fix --quiet -- <relative edited i18n file paths under web/>`
|
||||
- Run `vp run dify-web#i18n:check ${{ steps.context.outputs.FILE_ARGS }} ${{ steps.context.outputs.LANG_ARGS }}`
|
||||
- If verification fails, fix the remaining problems before continuing.
|
||||
7. Stop after the scoped locale files are updated and verification passes.
|
||||
- Do not create branches, commits, or pull requests.
|
||||
claude_args: |
|
||||
--max-turns 120
|
||||
--allowedTools "Read,Write,Edit,Bash(pnpm *),Bash(pnpm:*),Glob,Grep"
|
||||
--allowedTools "Read,Write,Edit,Bash(vp *),Bash(vp:*),Glob,Grep"
|
||||
|
||||
- name: Prepare branch metadata
|
||||
id: pr_meta
|
||||
@ -354,6 +272,7 @@ jobs:
|
||||
- name: Create or update translation PR
|
||||
if: steps.pr_meta.outputs.has_changes == 'true'
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
BRANCH_NAME: ${{ steps.pr_meta.outputs.branch_name }}
|
||||
FILES_IN_SCOPE: ${{ steps.context.outputs.CHANGED_FILES }}
|
||||
TARGET_LANGS: ${{ steps.context.outputs.TARGET_LANGS }}
|
||||
@ -402,8 +321,8 @@ jobs:
|
||||
'',
|
||||
'## Verification',
|
||||
'',
|
||||
`- \`pnpm --dir web run i18n:check --file ${process.env.FILES_IN_SCOPE} --lang ${process.env.TARGET_LANGS}\``,
|
||||
`- \`pnpm --dir web lint:fix --quiet -- <edited i18n files>\``,
|
||||
`- \`vp run dify-web#i18n:check --file ${process.env.FILES_IN_SCOPE} --lang ${process.env.TARGET_LANGS}\``,
|
||||
`- \`vp run dify-web#lint:fix --quiet -- <edited i18n files under web/>\``,
|
||||
'',
|
||||
'## Notes',
|
||||
'',
|
||||
|
||||
83
.github/workflows/trigger-i18n-sync.yml
vendored
83
.github/workflows/trigger-i18n-sync.yml
vendored
@ -42,88 +42,7 @@ jobs:
|
||||
fi
|
||||
|
||||
export BASE_SHA HEAD_SHA CHANGED_FILES
|
||||
node <<'NODE'
|
||||
const { execFileSync } = require('node:child_process')
|
||||
const fs = require('node:fs')
|
||||
const path = require('node:path')
|
||||
|
||||
const repoRoot = process.cwd()
|
||||
const baseSha = process.env.BASE_SHA || ''
|
||||
const headSha = process.env.HEAD_SHA || ''
|
||||
const files = (process.env.CHANGED_FILES || '').split(/\s+/).filter(Boolean)
|
||||
|
||||
const englishPath = fileStem => path.join(repoRoot, 'web', 'i18n', 'en-US', `${fileStem}.json`)
|
||||
|
||||
const readCurrentJson = (fileStem) => {
|
||||
const filePath = englishPath(fileStem)
|
||||
if (!fs.existsSync(filePath))
|
||||
return null
|
||||
|
||||
return JSON.parse(fs.readFileSync(filePath, 'utf8'))
|
||||
}
|
||||
|
||||
const readBaseJson = (fileStem) => {
|
||||
if (!baseSha)
|
||||
return null
|
||||
|
||||
try {
|
||||
const relativePath = `web/i18n/en-US/${fileStem}.json`
|
||||
const content = execFileSync('git', ['show', `${baseSha}:${relativePath}`], { encoding: 'utf8' })
|
||||
return JSON.parse(content)
|
||||
}
|
||||
catch (error) {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
const compareJson = (beforeValue, afterValue) => JSON.stringify(beforeValue) === JSON.stringify(afterValue)
|
||||
|
||||
const changes = {}
|
||||
|
||||
for (const fileStem of files) {
|
||||
const beforeJson = readBaseJson(fileStem) || {}
|
||||
const afterJson = readCurrentJson(fileStem) || {}
|
||||
const added = {}
|
||||
const updated = {}
|
||||
const deleted = []
|
||||
|
||||
for (const [key, value] of Object.entries(afterJson)) {
|
||||
if (!(key in beforeJson)) {
|
||||
added[key] = value
|
||||
continue
|
||||
}
|
||||
|
||||
if (!compareJson(beforeJson[key], value)) {
|
||||
updated[key] = {
|
||||
before: beforeJson[key],
|
||||
after: value,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const key of Object.keys(beforeJson)) {
|
||||
if (!(key in afterJson))
|
||||
deleted.push(key)
|
||||
}
|
||||
|
||||
changes[fileStem] = {
|
||||
fileDeleted: readCurrentJson(fileStem) === null,
|
||||
added,
|
||||
updated,
|
||||
deleted,
|
||||
}
|
||||
}
|
||||
|
||||
fs.writeFileSync(
|
||||
'/tmp/i18n-changes.json',
|
||||
JSON.stringify({
|
||||
baseSha,
|
||||
headSha,
|
||||
files,
|
||||
changes,
|
||||
})
|
||||
)
|
||||
NODE
|
||||
node .github/scripts/generate-i18n-changes.mjs
|
||||
|
||||
if [ -n "$CHANGED_FILES" ]; then
|
||||
echo "has_changes=true" >> "$GITHUB_OUTPUT"
|
||||
|
||||
@ -81,8 +81,8 @@ if $web_modified; then
|
||||
|
||||
if $web_ts_modified; then
|
||||
echo "Running TypeScript type-check:tsgo"
|
||||
if ! pnpm run type-check:tsgo; then
|
||||
echo "Type check failed. Please run 'pnpm run type-check:tsgo' to fix the errors."
|
||||
if ! npm run type-check:tsgo; then
|
||||
echo "Type check failed. Please run 'npm run type-check:tsgo' to fix the errors."
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
@ -90,8 +90,8 @@ if $web_modified; then
|
||||
fi
|
||||
|
||||
echo "Running knip"
|
||||
if ! pnpm run knip; then
|
||||
echo "Knip check failed. Please run 'pnpm run knip' to fix the errors."
|
||||
if ! npm run knip; then
|
||||
echo "Knip check failed. Please run 'npm run knip' to fix the errors."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
@ -74,6 +74,13 @@ REDIS_USE_CLUSTERS=false
|
||||
REDIS_CLUSTERS=
|
||||
REDIS_CLUSTERS_PASSWORD=
|
||||
|
||||
REDIS_RETRY_RETRIES=3
|
||||
REDIS_RETRY_BACKOFF_BASE=1.0
|
||||
REDIS_RETRY_BACKOFF_CAP=10.0
|
||||
REDIS_SOCKET_TIMEOUT=5.0
|
||||
REDIS_SOCKET_CONNECT_TIMEOUT=5.0
|
||||
REDIS_HEALTH_CHECK_INTERVAL=30
|
||||
|
||||
# celery configuration
|
||||
CELERY_BROKER_URL=redis://:difyai123456@localhost:${REDIS_PORT}/1
|
||||
CELERY_BACKEND=redis
|
||||
|
||||
31
api/configs/middleware/cache/redis_config.py
vendored
31
api/configs/middleware/cache/redis_config.py
vendored
@ -117,6 +117,37 @@ class RedisConfig(BaseSettings):
|
||||
default=None,
|
||||
)
|
||||
|
||||
REDIS_RETRY_RETRIES: NonNegativeInt = Field(
|
||||
description="Maximum number of retries per Redis command on "
|
||||
"transient failures (ConnectionError, TimeoutError, socket.timeout)",
|
||||
default=3,
|
||||
)
|
||||
|
||||
REDIS_RETRY_BACKOFF_BASE: PositiveFloat = Field(
|
||||
description="Base delay in seconds for exponential backoff between retries",
|
||||
default=1.0,
|
||||
)
|
||||
|
||||
REDIS_RETRY_BACKOFF_CAP: PositiveFloat = Field(
|
||||
description="Maximum backoff delay in seconds between retries",
|
||||
default=10.0,
|
||||
)
|
||||
|
||||
REDIS_SOCKET_TIMEOUT: PositiveFloat | None = Field(
|
||||
description="Socket timeout in seconds for Redis read/write operations",
|
||||
default=5.0,
|
||||
)
|
||||
|
||||
REDIS_SOCKET_CONNECT_TIMEOUT: PositiveFloat | None = Field(
|
||||
description="Socket timeout in seconds for Redis connection establishment",
|
||||
default=5.0,
|
||||
)
|
||||
|
||||
REDIS_HEALTH_CHECK_INTERVAL: NonNegativeInt = Field(
|
||||
description="Interval in seconds between Redis connection health checks (0 to disable)",
|
||||
default=30,
|
||||
)
|
||||
|
||||
@field_validator("REDIS_MAX_CONNECTIONS", mode="before")
|
||||
@classmethod
|
||||
def _empty_string_to_none_for_max_conns(cls, v):
|
||||
|
||||
@ -48,11 +48,27 @@ class SavedMessageCreatePayload(BaseModel):
|
||||
# --- Workflow schemas ---
|
||||
|
||||
|
||||
class DefaultBlockConfigQuery(BaseModel):
|
||||
q: str | None = None
|
||||
|
||||
|
||||
class WorkflowListQuery(BaseModel):
|
||||
page: int = Field(default=1, ge=1, le=99999)
|
||||
limit: int = Field(default=10, ge=1, le=100)
|
||||
user_id: str | None = None
|
||||
named_only: bool = False
|
||||
|
||||
|
||||
class WorkflowRunPayload(BaseModel):
|
||||
inputs: dict[str, Any]
|
||||
files: list[dict[str, Any]] | None = None
|
||||
|
||||
|
||||
class WorkflowUpdatePayload(BaseModel):
|
||||
marked_name: str | None = Field(default=None, max_length=20)
|
||||
marked_comment: str | None = Field(default=None, max_length=100)
|
||||
|
||||
|
||||
# --- Audio schemas ---
|
||||
|
||||
|
||||
|
||||
@ -92,11 +92,13 @@ class AppImportApi(Resource):
|
||||
EnterpriseService.WebAppAuth.update_app_access_mode(result.app_id, "private")
|
||||
# Return appropriate status code based on result
|
||||
status = result.status
|
||||
if status == ImportStatus.FAILED:
|
||||
return result.model_dump(mode="json"), 400
|
||||
elif status == ImportStatus.PENDING:
|
||||
return result.model_dump(mode="json"), 202
|
||||
return result.model_dump(mode="json"), 200
|
||||
match status:
|
||||
case ImportStatus.FAILED:
|
||||
return result.model_dump(mode="json"), 400
|
||||
case ImportStatus.PENDING:
|
||||
return result.model_dump(mode="json"), 202
|
||||
case ImportStatus.COMPLETED | ImportStatus.COMPLETED_WITH_WARNINGS:
|
||||
return result.model_dump(mode="json"), 200
|
||||
|
||||
|
||||
@console_ns.route("/apps/imports/<string:import_id>/confirm")
|
||||
|
||||
@ -14,6 +14,7 @@ from sqlalchemy.orm import sessionmaker
|
||||
from werkzeug.exceptions import BadRequest, Forbidden, InternalServerError, NotFound
|
||||
|
||||
import services
|
||||
from controllers.common.controller_schemas import DefaultBlockConfigQuery, WorkflowListQuery, WorkflowUpdatePayload
|
||||
from controllers.console import console_ns
|
||||
from controllers.console.app.error import ConversationCompletedError, DraftWorkflowNotExist, DraftWorkflowNotSync
|
||||
from controllers.console.app.workflow_run import workflow_run_node_execution_model
|
||||
@ -144,10 +145,6 @@ class PublishWorkflowPayload(BaseModel):
|
||||
marked_comment: str | None = Field(default=None, max_length=100)
|
||||
|
||||
|
||||
class DefaultBlockConfigQuery(BaseModel):
|
||||
q: str | None = None
|
||||
|
||||
|
||||
class ConvertToWorkflowPayload(BaseModel):
|
||||
name: str | None = None
|
||||
icon_type: str | None = None
|
||||
@ -155,18 +152,6 @@ class ConvertToWorkflowPayload(BaseModel):
|
||||
icon_background: str | None = None
|
||||
|
||||
|
||||
class WorkflowListQuery(BaseModel):
|
||||
page: int = Field(default=1, ge=1, le=99999)
|
||||
limit: int = Field(default=10, ge=1, le=100)
|
||||
user_id: str | None = None
|
||||
named_only: bool = False
|
||||
|
||||
|
||||
class WorkflowUpdatePayload(BaseModel):
|
||||
marked_name: str | None = Field(default=None, max_length=20)
|
||||
marked_comment: str | None = Field(default=None, max_length=100)
|
||||
|
||||
|
||||
class WorkflowFeaturesPayload(BaseModel):
|
||||
features: dict[str, Any] = Field(..., description="Workflow feature configuration")
|
||||
|
||||
|
||||
@ -403,24 +403,27 @@ class VariableApi(Resource):
|
||||
|
||||
new_value = None
|
||||
if raw_value is not None:
|
||||
if variable.value_type == SegmentType.FILE:
|
||||
if not isinstance(raw_value, dict):
|
||||
raise InvalidArgumentError(description=f"expected dict for file, got {type(raw_value)}")
|
||||
raw_value = build_from_mapping(
|
||||
mapping=raw_value,
|
||||
tenant_id=app_model.tenant_id,
|
||||
access_controller=_file_access_controller,
|
||||
)
|
||||
elif variable.value_type == SegmentType.ARRAY_FILE:
|
||||
if not isinstance(raw_value, list):
|
||||
raise InvalidArgumentError(description=f"expected list for files, got {type(raw_value)}")
|
||||
if len(raw_value) > 0 and not isinstance(raw_value[0], dict):
|
||||
raise InvalidArgumentError(description=f"expected dict for files[0], got {type(raw_value)}")
|
||||
raw_value = build_from_mappings(
|
||||
mappings=raw_value,
|
||||
tenant_id=app_model.tenant_id,
|
||||
access_controller=_file_access_controller,
|
||||
)
|
||||
match variable.value_type:
|
||||
case SegmentType.FILE:
|
||||
if not isinstance(raw_value, dict):
|
||||
raise InvalidArgumentError(description=f"expected dict for file, got {type(raw_value)}")
|
||||
raw_value = build_from_mapping(
|
||||
mapping=raw_value,
|
||||
tenant_id=app_model.tenant_id,
|
||||
access_controller=_file_access_controller,
|
||||
)
|
||||
case SegmentType.ARRAY_FILE:
|
||||
if not isinstance(raw_value, list):
|
||||
raise InvalidArgumentError(description=f"expected list for files, got {type(raw_value)}")
|
||||
if len(raw_value) > 0 and not isinstance(raw_value[0], dict):
|
||||
raise InvalidArgumentError(description=f"expected dict for files[0], got {type(raw_value)}")
|
||||
raw_value = build_from_mappings(
|
||||
mappings=raw_value,
|
||||
tenant_id=app_model.tenant_id,
|
||||
access_controller=_file_access_controller,
|
||||
)
|
||||
case _:
|
||||
pass
|
||||
new_value = build_segment_with_type(variable.value_type, raw_value)
|
||||
draft_var_srv.update_variable(variable, name=new_name, value=new_value)
|
||||
db.session.commit()
|
||||
|
||||
@ -223,24 +223,27 @@ class RagPipelineVariableApi(Resource):
|
||||
|
||||
new_value = None
|
||||
if raw_value is not None:
|
||||
if variable.value_type == SegmentType.FILE:
|
||||
if not isinstance(raw_value, dict):
|
||||
raise InvalidArgumentError(description=f"expected dict for file, got {type(raw_value)}")
|
||||
raw_value = build_from_mapping(
|
||||
mapping=raw_value,
|
||||
tenant_id=pipeline.tenant_id,
|
||||
access_controller=_file_access_controller,
|
||||
)
|
||||
elif variable.value_type == SegmentType.ARRAY_FILE:
|
||||
if not isinstance(raw_value, list):
|
||||
raise InvalidArgumentError(description=f"expected list for files, got {type(raw_value)}")
|
||||
if len(raw_value) > 0 and not isinstance(raw_value[0], dict):
|
||||
raise InvalidArgumentError(description=f"expected dict for files[0], got {type(raw_value)}")
|
||||
raw_value = build_from_mappings(
|
||||
mappings=raw_value,
|
||||
tenant_id=pipeline.tenant_id,
|
||||
access_controller=_file_access_controller,
|
||||
)
|
||||
match variable.value_type:
|
||||
case SegmentType.FILE:
|
||||
if not isinstance(raw_value, dict):
|
||||
raise InvalidArgumentError(description=f"expected dict for file, got {type(raw_value)}")
|
||||
raw_value = build_from_mapping(
|
||||
mapping=raw_value,
|
||||
tenant_id=pipeline.tenant_id,
|
||||
access_controller=_file_access_controller,
|
||||
)
|
||||
case SegmentType.ARRAY_FILE:
|
||||
if not isinstance(raw_value, list):
|
||||
raise InvalidArgumentError(description=f"expected list for files, got {type(raw_value)}")
|
||||
if len(raw_value) > 0 and not isinstance(raw_value[0], dict):
|
||||
raise InvalidArgumentError(description=f"expected dict for files[0], got {type(raw_value)}")
|
||||
raw_value = build_from_mappings(
|
||||
mappings=raw_value,
|
||||
tenant_id=pipeline.tenant_id,
|
||||
access_controller=_file_access_controller,
|
||||
)
|
||||
case _:
|
||||
pass
|
||||
new_value = build_segment_with_type(variable.value_type, raw_value)
|
||||
draft_var_srv.update_variable(variable, name=new_name, value=new_value)
|
||||
db.session.commit()
|
||||
|
||||
@ -83,11 +83,13 @@ class RagPipelineImportApi(Resource):
|
||||
|
||||
# Return appropriate status code based on result
|
||||
status = result.status
|
||||
if status == ImportStatus.FAILED:
|
||||
return result.model_dump(mode="json"), 400
|
||||
elif status == ImportStatus.PENDING:
|
||||
return result.model_dump(mode="json"), 202
|
||||
return result.model_dump(mode="json"), 200
|
||||
match status:
|
||||
case ImportStatus.FAILED:
|
||||
return result.model_dump(mode="json"), 400
|
||||
case ImportStatus.PENDING:
|
||||
return result.model_dump(mode="json"), 202
|
||||
case ImportStatus.COMPLETED | ImportStatus.COMPLETED_WITH_WARNINGS:
|
||||
return result.model_dump(mode="json"), 200
|
||||
|
||||
|
||||
@console_ns.route("/rag/pipelines/imports/<string:import_id>/confirm")
|
||||
|
||||
@ -10,6 +10,7 @@ from sqlalchemy.orm import sessionmaker
|
||||
from werkzeug.exceptions import BadRequest, Forbidden, InternalServerError, NotFound
|
||||
|
||||
import services
|
||||
from controllers.common.controller_schemas import DefaultBlockConfigQuery, WorkflowListQuery, WorkflowUpdatePayload
|
||||
from controllers.common.schema import register_schema_models
|
||||
from controllers.console import console_ns
|
||||
from controllers.console.app.error import (
|
||||
@ -94,22 +95,6 @@ class PublishedWorkflowRunPayload(DraftWorkflowRunPayload):
|
||||
original_document_id: str | None = None
|
||||
|
||||
|
||||
class DefaultBlockConfigQuery(BaseModel):
|
||||
q: str | None = None
|
||||
|
||||
|
||||
class WorkflowListQuery(BaseModel):
|
||||
page: int = Field(default=1, ge=1, le=99999)
|
||||
limit: int = Field(default=10, ge=1, le=100)
|
||||
user_id: str | None = None
|
||||
named_only: bool = False
|
||||
|
||||
|
||||
class WorkflowUpdatePayload(BaseModel):
|
||||
marked_name: str | None = Field(default=None, max_length=20)
|
||||
marked_comment: str | None = Field(default=None, max_length=100)
|
||||
|
||||
|
||||
class NodeIdQuery(BaseModel):
|
||||
node_id: str
|
||||
|
||||
|
||||
@ -168,12 +168,13 @@ class ConsoleWorkflowEventsApi(Resource):
|
||||
else:
|
||||
msg_generator = MessageGenerator()
|
||||
generator: BaseAppGenerator
|
||||
if app.mode == AppMode.ADVANCED_CHAT:
|
||||
generator = AdvancedChatAppGenerator()
|
||||
elif app.mode == AppMode.WORKFLOW:
|
||||
generator = WorkflowAppGenerator()
|
||||
else:
|
||||
raise InvalidArgumentError(f"cannot subscribe to workflow run, workflow_run_id={workflow_run.id}")
|
||||
match app.mode:
|
||||
case AppMode.ADVANCED_CHAT:
|
||||
generator = AdvancedChatAppGenerator()
|
||||
case AppMode.WORKFLOW:
|
||||
generator = WorkflowAppGenerator()
|
||||
case _:
|
||||
raise InvalidArgumentError(f"cannot subscribe to workflow run, workflow_run_id={workflow_run.id}")
|
||||
|
||||
include_state_snapshot = request.args.get("include_state_snapshot", "false").lower() == "true"
|
||||
|
||||
|
||||
@ -138,12 +138,15 @@ def exchange_token_for_existing_web_user(app_code: str, enterprise_user_decoded:
|
||||
if not app_model or app_model.status != "normal" or not app_model.enable_site:
|
||||
raise NotFound()
|
||||
|
||||
if auth_type == WebAppAuthType.PUBLIC:
|
||||
return _exchange_for_public_app_token(app_model, site, enterprise_user_decoded)
|
||||
elif auth_type == WebAppAuthType.EXTERNAL and user_auth_type != "external":
|
||||
raise WebAppAuthRequiredError("Please login as external user.")
|
||||
elif auth_type == WebAppAuthType.INTERNAL and user_auth_type != "internal":
|
||||
raise WebAppAuthRequiredError("Please login as internal user.")
|
||||
match auth_type:
|
||||
case WebAppAuthType.PUBLIC:
|
||||
return _exchange_for_public_app_token(app_model, site, enterprise_user_decoded)
|
||||
case WebAppAuthType.EXTERNAL:
|
||||
if user_auth_type != "external":
|
||||
raise WebAppAuthRequiredError("Please login as external user.")
|
||||
case WebAppAuthType.INTERNAL:
|
||||
if user_auth_type != "internal":
|
||||
raise WebAppAuthRequiredError("Please login as internal user.")
|
||||
|
||||
end_user = None
|
||||
if end_user_id:
|
||||
|
||||
@ -72,12 +72,13 @@ class WorkflowEventsApi(WebApiResource):
|
||||
app_mode = AppMode.value_of(app_model.mode)
|
||||
msg_generator = MessageGenerator()
|
||||
generator: BaseAppGenerator
|
||||
if app_mode == AppMode.ADVANCED_CHAT:
|
||||
generator = AdvancedChatAppGenerator()
|
||||
elif app_mode == AppMode.WORKFLOW:
|
||||
generator = WorkflowAppGenerator()
|
||||
else:
|
||||
raise InvalidArgumentError(f"cannot subscribe to workflow run, workflow_run_id={workflow_run.id}")
|
||||
match app_mode:
|
||||
case AppMode.ADVANCED_CHAT:
|
||||
generator = AdvancedChatAppGenerator()
|
||||
case AppMode.WORKFLOW:
|
||||
generator = WorkflowAppGenerator()
|
||||
case _:
|
||||
raise InvalidArgumentError(f"cannot subscribe to workflow run, workflow_run_id={workflow_run.id}")
|
||||
|
||||
include_state_snapshot = request.args.get("include_state_snapshot", "false").lower() == "true"
|
||||
|
||||
|
||||
@ -10,7 +10,7 @@ from graphon.runtime import GraphRuntimeState, VariablePool
|
||||
from graphon.variable_loader import VariableLoader
|
||||
from graphon.variables.variables import Variable
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy.orm import Session, sessionmaker
|
||||
|
||||
from core.app.apps.advanced_chat.app_config_manager import AdvancedChatAppConfig
|
||||
from core.app.apps.base_app_queue_manager import AppQueueManager
|
||||
@ -363,7 +363,7 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner):
|
||||
|
||||
:return: List of conversation variables ready for use
|
||||
"""
|
||||
with Session(db.engine) as session:
|
||||
with sessionmaker(bind=db.engine).begin() as session:
|
||||
existing_variables = self._load_existing_conversation_variables(session)
|
||||
|
||||
if not existing_variables:
|
||||
@ -376,7 +376,6 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner):
|
||||
# Convert to Variable objects for use in the workflow
|
||||
conversation_variables = [var.to_variable() for var in existing_variables]
|
||||
|
||||
session.commit()
|
||||
return cast(list[Variable], conversation_variables)
|
||||
|
||||
def _load_existing_conversation_variables(self, session: Session) -> list[ConversationVariable]:
|
||||
|
||||
@ -16,7 +16,7 @@ from graphon.model_runtime.utils.encoders import jsonable_encoder
|
||||
from graphon.nodes import BuiltinNodeTypes
|
||||
from graphon.runtime import GraphRuntimeState
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy.orm import Session, sessionmaker
|
||||
|
||||
from constants.tts_auto_play_timeout import TTS_AUTO_PLAY_TIMEOUT, TTS_AUTO_PLAY_YIELD_CPU_TIME
|
||||
from core.app.apps.base_app_queue_manager import AppQueueManager, PublishFrom
|
||||
@ -328,13 +328,8 @@ class AdvancedChatAppGenerateTaskPipeline(GraphRuntimeStateSupport):
|
||||
@contextmanager
|
||||
def _database_session(self):
|
||||
"""Context manager for database sessions."""
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
try:
|
||||
yield session
|
||||
session.commit()
|
||||
except Exception:
|
||||
session.rollback()
|
||||
raise
|
||||
with sessionmaker(bind=db.engine, expire_on_commit=False).begin() as session:
|
||||
yield session
|
||||
|
||||
def _ensure_workflow_initialized(self):
|
||||
"""Fluent validation for workflow state."""
|
||||
|
||||
@ -7,7 +7,7 @@ from typing import Union
|
||||
from graphon.entities import WorkflowStartReason
|
||||
from graphon.enums import WorkflowExecutionStatus
|
||||
from graphon.runtime import GraphRuntimeState
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy.orm import Session, sessionmaker
|
||||
|
||||
from constants.tts_auto_play_timeout import TTS_AUTO_PLAY_TIMEOUT, TTS_AUTO_PLAY_YIELD_CPU_TIME
|
||||
from core.app.apps.base_app_queue_manager import AppQueueManager
|
||||
@ -252,13 +252,8 @@ class WorkflowAppGenerateTaskPipeline(GraphRuntimeStateSupport):
|
||||
@contextmanager
|
||||
def _database_session(self):
|
||||
"""Context manager for database sessions."""
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
try:
|
||||
yield session
|
||||
session.commit()
|
||||
except Exception:
|
||||
session.rollback()
|
||||
raise
|
||||
with sessionmaker(bind=db.engine, expire_on_commit=False).begin() as session:
|
||||
yield session
|
||||
|
||||
def _ensure_workflow_initialized(self):
|
||||
"""Fluent validation for workflow state."""
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
from graphon.model_runtime.entities.llm_entities import LLMUsage
|
||||
from sqlalchemy import update
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from configs import dify_config
|
||||
from core.entities.model_entities import ModelStatus
|
||||
@ -57,37 +57,37 @@ def deduct_llm_quota(*, tenant_id: str, model_instance: ModelInstance, usage: LL
|
||||
used_quota = 1
|
||||
|
||||
if used_quota is not None and system_configuration.current_quota_type is not None:
|
||||
if system_configuration.current_quota_type == ProviderQuotaType.TRIAL:
|
||||
from services.credit_pool_service import CreditPoolService
|
||||
match system_configuration.current_quota_type:
|
||||
case ProviderQuotaType.TRIAL:
|
||||
from services.credit_pool_service import CreditPoolService
|
||||
|
||||
CreditPoolService.check_and_deduct_credits(
|
||||
tenant_id=tenant_id,
|
||||
credits_required=used_quota,
|
||||
)
|
||||
elif system_configuration.current_quota_type == ProviderQuotaType.PAID:
|
||||
from services.credit_pool_service import CreditPoolService
|
||||
|
||||
CreditPoolService.check_and_deduct_credits(
|
||||
tenant_id=tenant_id,
|
||||
credits_required=used_quota,
|
||||
pool_type="paid",
|
||||
)
|
||||
else:
|
||||
with Session(db.engine) as session:
|
||||
stmt = (
|
||||
update(Provider)
|
||||
.where(
|
||||
Provider.tenant_id == tenant_id,
|
||||
# TODO: Use provider name with prefix after the data migration.
|
||||
Provider.provider_name == ModelProviderID(model_instance.provider).provider_name,
|
||||
Provider.provider_type == ProviderType.SYSTEM.value,
|
||||
Provider.quota_type == system_configuration.current_quota_type,
|
||||
Provider.quota_limit > Provider.quota_used,
|
||||
)
|
||||
.values(
|
||||
quota_used=Provider.quota_used + used_quota,
|
||||
last_used=naive_utc_now(),
|
||||
)
|
||||
CreditPoolService.check_and_deduct_credits(
|
||||
tenant_id=tenant_id,
|
||||
credits_required=used_quota,
|
||||
)
|
||||
session.execute(stmt)
|
||||
session.commit()
|
||||
case ProviderQuotaType.PAID:
|
||||
from services.credit_pool_service import CreditPoolService
|
||||
|
||||
CreditPoolService.check_and_deduct_credits(
|
||||
tenant_id=tenant_id,
|
||||
credits_required=used_quota,
|
||||
pool_type="paid",
|
||||
)
|
||||
case ProviderQuotaType.FREE:
|
||||
with sessionmaker(bind=db.engine).begin() as session:
|
||||
stmt = (
|
||||
update(Provider)
|
||||
.where(
|
||||
Provider.tenant_id == tenant_id,
|
||||
# TODO: Use provider name with prefix after the data migration.
|
||||
Provider.provider_name == ModelProviderID(model_instance.provider).provider_name,
|
||||
Provider.provider_type == ProviderType.SYSTEM.value,
|
||||
Provider.quota_type == system_configuration.current_quota_type,
|
||||
Provider.quota_limit > Provider.quota_used,
|
||||
)
|
||||
.values(
|
||||
quota_used=Provider.quota_used + used_quota,
|
||||
last_used=naive_utc_now(),
|
||||
)
|
||||
)
|
||||
session.execute(stmt)
|
||||
|
||||
@ -12,7 +12,7 @@ from graphon.model_runtime.entities.message_entities import (
|
||||
)
|
||||
from graphon.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy.orm import Session, sessionmaker
|
||||
|
||||
from constants.tts_auto_play_timeout import TTS_AUTO_PLAY_TIMEOUT, TTS_AUTO_PLAY_YIELD_CPU_TIME
|
||||
from core.app.apps.base_app_queue_manager import AppQueueManager, PublishFrom
|
||||
@ -266,9 +266,8 @@ class EasyUIBasedGenerateTaskPipeline(BasedGenerateTaskPipeline):
|
||||
event = message.event
|
||||
|
||||
if isinstance(event, QueueErrorEvent):
|
||||
with Session(db.engine) as session:
|
||||
with sessionmaker(bind=db.engine).begin() as session:
|
||||
err = self.handle_error(event=event, session=session, message_id=self._message_id)
|
||||
session.commit()
|
||||
yield self.error_to_stream_response(err)
|
||||
break
|
||||
elif isinstance(event, QueueStopEvent | QueueMessageEndEvent):
|
||||
@ -288,10 +287,9 @@ class EasyUIBasedGenerateTaskPipeline(BasedGenerateTaskPipeline):
|
||||
answer=output_moderation_answer
|
||||
)
|
||||
|
||||
with Session(db.engine) as session:
|
||||
with sessionmaker(bind=db.engine).begin() as session:
|
||||
# Save message
|
||||
self._save_message(session=session, trace_manager=trace_manager)
|
||||
session.commit()
|
||||
message_end_resp = self._message_end_to_stream_response()
|
||||
yield message_end_resp
|
||||
elif isinstance(event, QueueRetrieverResourcesEvent):
|
||||
|
||||
@ -40,41 +40,44 @@ def prepare_file_dict(message_file: MessageFile, upload_files_map: dict[str, Upl
|
||||
size = 0
|
||||
extension = ""
|
||||
|
||||
if message_file.transfer_method == FileTransferMethod.REMOTE_URL:
|
||||
url = message_file.url
|
||||
if message_file.url:
|
||||
filename = message_file.url.split("/")[-1].split("?")[0]
|
||||
if "." in filename:
|
||||
extension = "." + filename.rsplit(".", 1)[1]
|
||||
elif message_file.transfer_method == FileTransferMethod.LOCAL_FILE:
|
||||
if upload_file:
|
||||
url = file_helpers.get_signed_file_url(upload_file_id=str(upload_file.id))
|
||||
filename = upload_file.name
|
||||
mime_type = upload_file.mime_type or "application/octet-stream"
|
||||
size = upload_file.size or 0
|
||||
extension = f".{upload_file.extension}" if upload_file.extension else ""
|
||||
elif message_file.upload_file_id:
|
||||
url = file_helpers.get_signed_file_url(upload_file_id=str(message_file.upload_file_id))
|
||||
elif message_file.transfer_method == FileTransferMethod.TOOL_FILE and message_file.url:
|
||||
if message_file.url.startswith(("http://", "https://")):
|
||||
match message_file.transfer_method:
|
||||
case FileTransferMethod.REMOTE_URL:
|
||||
url = message_file.url
|
||||
filename = message_file.url.split("/")[-1].split("?")[0]
|
||||
if "." in filename:
|
||||
extension = "." + filename.rsplit(".", 1)[1]
|
||||
else:
|
||||
url_parts = message_file.url.split("/")
|
||||
if url_parts:
|
||||
file_part = url_parts[-1].split("?")[0]
|
||||
if "." in file_part:
|
||||
tool_file_id, ext = file_part.rsplit(".", 1)
|
||||
extension = f".{ext}"
|
||||
if len(extension) > MAX_TOOL_FILE_EXTENSION_LENGTH:
|
||||
if message_file.url:
|
||||
filename = message_file.url.split("/")[-1].split("?")[0]
|
||||
if "." in filename:
|
||||
extension = "." + filename.rsplit(".", 1)[1]
|
||||
case FileTransferMethod.LOCAL_FILE:
|
||||
if upload_file:
|
||||
url = file_helpers.get_signed_file_url(upload_file_id=str(upload_file.id))
|
||||
filename = upload_file.name
|
||||
mime_type = upload_file.mime_type or "application/octet-stream"
|
||||
size = upload_file.size or 0
|
||||
extension = f".{upload_file.extension}" if upload_file.extension else ""
|
||||
elif message_file.upload_file_id:
|
||||
url = file_helpers.get_signed_file_url(upload_file_id=str(message_file.upload_file_id))
|
||||
case FileTransferMethod.TOOL_FILE if message_file.url:
|
||||
if message_file.url.startswith(("http://", "https://")):
|
||||
url = message_file.url
|
||||
filename = message_file.url.split("/")[-1].split("?")[0]
|
||||
if "." in filename:
|
||||
extension = "." + filename.rsplit(".", 1)[1]
|
||||
else:
|
||||
url_parts = message_file.url.split("/")
|
||||
if url_parts:
|
||||
file_part = url_parts[-1].split("?")[0]
|
||||
if "." in file_part:
|
||||
tool_file_id, ext = file_part.rsplit(".", 1)
|
||||
extension = f".{ext}"
|
||||
if len(extension) > MAX_TOOL_FILE_EXTENSION_LENGTH:
|
||||
extension = ".bin"
|
||||
else:
|
||||
tool_file_id = file_part
|
||||
extension = ".bin"
|
||||
else:
|
||||
tool_file_id = file_part
|
||||
extension = ".bin"
|
||||
url = sign_tool_file(tool_file_id=tool_file_id, extension=extension)
|
||||
filename = file_part
|
||||
url = sign_tool_file(tool_file_id=tool_file_id, extension=extension)
|
||||
filename = file_part
|
||||
case FileTransferMethod.TOOL_FILE | FileTransferMethod.DATASOURCE_FILE:
|
||||
pass
|
||||
|
||||
transfer_method_value = message_file.transfer_method.value
|
||||
remote_url = message_file.url if message_file.transfer_method == FileTransferMethod.REMOTE_URL else ""
|
||||
|
||||
@ -187,15 +187,16 @@ def build_parameter_schema(
|
||||
|
||||
def prepare_tool_arguments(app: App, arguments: dict[str, Any]) -> ToolArgumentsDict:
|
||||
"""Prepare arguments based on app mode"""
|
||||
if app.mode == AppMode.WORKFLOW:
|
||||
return {"inputs": arguments}
|
||||
elif app.mode == AppMode.COMPLETION:
|
||||
return {"query": "", "inputs": arguments}
|
||||
else:
|
||||
# Chat modes - create a copy to avoid modifying original dict
|
||||
args_copy = arguments.copy()
|
||||
query = args_copy.pop("query", "")
|
||||
return {"query": query, "inputs": args_copy}
|
||||
match app.mode:
|
||||
case AppMode.WORKFLOW:
|
||||
return {"inputs": arguments}
|
||||
case AppMode.COMPLETION:
|
||||
return {"query": "", "inputs": arguments}
|
||||
case _:
|
||||
# Chat modes - create a copy to avoid modifying original dict
|
||||
args_copy = arguments.copy()
|
||||
query = args_copy.pop("query", "")
|
||||
return {"query": query, "inputs": args_copy}
|
||||
|
||||
|
||||
def extract_answer_from_response(app: App, response: Any) -> str:
|
||||
@ -229,17 +230,13 @@ def process_streaming_response(response: RateLimitGenerator) -> str:
|
||||
|
||||
def process_mapping_response(app: App, response: Mapping) -> str:
|
||||
"""Process mapping response based on app mode"""
|
||||
if app.mode in {
|
||||
AppMode.ADVANCED_CHAT,
|
||||
AppMode.COMPLETION,
|
||||
AppMode.CHAT,
|
||||
AppMode.AGENT_CHAT,
|
||||
}:
|
||||
return response.get("answer", "")
|
||||
elif app.mode == AppMode.WORKFLOW:
|
||||
return json.dumps(response["data"]["outputs"], ensure_ascii=False)
|
||||
else:
|
||||
raise ValueError("Invalid app mode: " + str(app.mode))
|
||||
match app.mode:
|
||||
case AppMode.ADVANCED_CHAT | AppMode.COMPLETION | AppMode.CHAT | AppMode.AGENT_CHAT:
|
||||
return response.get("answer", "")
|
||||
case AppMode.WORKFLOW:
|
||||
return json.dumps(response["data"]["outputs"], ensure_ascii=False)
|
||||
case _:
|
||||
raise ValueError("Invalid app mode: " + str(app.mode))
|
||||
|
||||
|
||||
def convert_input_form_to_parameters(
|
||||
|
||||
@ -72,17 +72,18 @@ class PluginAppBackwardsInvocation(BaseBackwardsInvocation):
|
||||
|
||||
conversation_id = conversation_id or ""
|
||||
|
||||
if app.mode in {AppMode.ADVANCED_CHAT, AppMode.AGENT_CHAT, AppMode.CHAT}:
|
||||
if not query:
|
||||
raise ValueError("missing query")
|
||||
match app.mode:
|
||||
case AppMode.ADVANCED_CHAT | AppMode.AGENT_CHAT | AppMode.CHAT:
|
||||
if not query:
|
||||
raise ValueError("missing query")
|
||||
|
||||
return cls.invoke_chat_app(app, user, conversation_id, query, stream, inputs, files)
|
||||
elif app.mode == AppMode.WORKFLOW:
|
||||
return cls.invoke_workflow_app(app, user, stream, inputs, files)
|
||||
elif app.mode == AppMode.COMPLETION:
|
||||
return cls.invoke_completion_app(app, user, stream, inputs, files)
|
||||
|
||||
raise ValueError("unexpected app type")
|
||||
return cls.invoke_chat_app(app, user, conversation_id, query, stream, inputs, files)
|
||||
case AppMode.WORKFLOW:
|
||||
return cls.invoke_workflow_app(app, user, stream, inputs, files)
|
||||
case AppMode.COMPLETION:
|
||||
return cls.invoke_completion_app(app, user, stream, inputs, files)
|
||||
case _:
|
||||
raise ValueError("unexpected app type")
|
||||
|
||||
@classmethod
|
||||
def invoke_chat_app(
|
||||
@ -98,60 +99,61 @@ class PluginAppBackwardsInvocation(BaseBackwardsInvocation):
|
||||
"""
|
||||
invoke chat app
|
||||
"""
|
||||
if app.mode == AppMode.ADVANCED_CHAT:
|
||||
workflow = app.workflow
|
||||
if not workflow:
|
||||
match app.mode:
|
||||
case AppMode.ADVANCED_CHAT:
|
||||
workflow = app.workflow
|
||||
if not workflow:
|
||||
raise ValueError("unexpected app type")
|
||||
|
||||
pause_config = PauseStateLayerConfig(
|
||||
session_factory=db.engine,
|
||||
state_owner_user_id=workflow.created_by,
|
||||
)
|
||||
|
||||
return AdvancedChatAppGenerator().generate(
|
||||
app_model=app,
|
||||
workflow=workflow,
|
||||
user=user,
|
||||
args={
|
||||
"inputs": inputs,
|
||||
"query": query,
|
||||
"files": files,
|
||||
"conversation_id": conversation_id,
|
||||
},
|
||||
invoke_from=InvokeFrom.SERVICE_API,
|
||||
workflow_run_id=str(uuid.uuid4()),
|
||||
streaming=stream,
|
||||
pause_state_config=pause_config,
|
||||
)
|
||||
case AppMode.AGENT_CHAT:
|
||||
return AgentChatAppGenerator().generate(
|
||||
app_model=app,
|
||||
user=user,
|
||||
args={
|
||||
"inputs": inputs,
|
||||
"query": query,
|
||||
"files": files,
|
||||
"conversation_id": conversation_id,
|
||||
},
|
||||
invoke_from=InvokeFrom.SERVICE_API,
|
||||
streaming=stream,
|
||||
)
|
||||
case AppMode.CHAT:
|
||||
return ChatAppGenerator().generate(
|
||||
app_model=app,
|
||||
user=user,
|
||||
args={
|
||||
"inputs": inputs,
|
||||
"query": query,
|
||||
"files": files,
|
||||
"conversation_id": conversation_id,
|
||||
},
|
||||
invoke_from=InvokeFrom.SERVICE_API,
|
||||
streaming=stream,
|
||||
)
|
||||
case _:
|
||||
raise ValueError("unexpected app type")
|
||||
|
||||
pause_config = PauseStateLayerConfig(
|
||||
session_factory=db.engine,
|
||||
state_owner_user_id=workflow.created_by,
|
||||
)
|
||||
|
||||
return AdvancedChatAppGenerator().generate(
|
||||
app_model=app,
|
||||
workflow=workflow,
|
||||
user=user,
|
||||
args={
|
||||
"inputs": inputs,
|
||||
"query": query,
|
||||
"files": files,
|
||||
"conversation_id": conversation_id,
|
||||
},
|
||||
invoke_from=InvokeFrom.SERVICE_API,
|
||||
workflow_run_id=str(uuid.uuid4()),
|
||||
streaming=stream,
|
||||
pause_state_config=pause_config,
|
||||
)
|
||||
elif app.mode == AppMode.AGENT_CHAT:
|
||||
return AgentChatAppGenerator().generate(
|
||||
app_model=app,
|
||||
user=user,
|
||||
args={
|
||||
"inputs": inputs,
|
||||
"query": query,
|
||||
"files": files,
|
||||
"conversation_id": conversation_id,
|
||||
},
|
||||
invoke_from=InvokeFrom.SERVICE_API,
|
||||
streaming=stream,
|
||||
)
|
||||
elif app.mode == AppMode.CHAT:
|
||||
return ChatAppGenerator().generate(
|
||||
app_model=app,
|
||||
user=user,
|
||||
args={
|
||||
"inputs": inputs,
|
||||
"query": query,
|
||||
"files": files,
|
||||
"conversation_id": conversation_id,
|
||||
},
|
||||
invoke_from=InvokeFrom.SERVICE_API,
|
||||
streaming=stream,
|
||||
)
|
||||
else:
|
||||
raise ValueError("unexpected app type")
|
||||
|
||||
@classmethod
|
||||
def invoke_workflow_app(
|
||||
cls,
|
||||
|
||||
@ -209,7 +209,10 @@ class PluginInstaller(BasePluginClient):
|
||||
"GET",
|
||||
f"plugin/{tenant_id}/management/decode/from_identifier",
|
||||
PluginDecodeResponse,
|
||||
params={"plugin_unique_identifier": plugin_unique_identifier},
|
||||
params={
|
||||
"plugin_unique_identifier": plugin_unique_identifier,
|
||||
"PluginUniqueIdentifier": plugin_unique_identifier, # compat with daemon <= 0.5.4
|
||||
},
|
||||
)
|
||||
|
||||
def fetch_plugin_installation_by_ids(
|
||||
|
||||
@ -961,36 +961,37 @@ class ProviderManager:
|
||||
raise ValueError("quota_used is None")
|
||||
if provider_record.quota_limit is None:
|
||||
raise ValueError("quota_limit is None")
|
||||
if provider_quota.quota_type == ProviderQuotaType.TRIAL and trail_pool is not None:
|
||||
quota_configuration = QuotaConfiguration(
|
||||
quota_type=provider_quota.quota_type,
|
||||
quota_unit=provider_hosting_configuration.quota_unit or QuotaUnit.TOKENS,
|
||||
quota_used=trail_pool.quota_used,
|
||||
quota_limit=trail_pool.quota_limit,
|
||||
is_valid=trail_pool.quota_limit > trail_pool.quota_used or trail_pool.quota_limit == -1,
|
||||
restrict_models=provider_quota.restrict_models,
|
||||
)
|
||||
match provider_quota.quota_type:
|
||||
case ProviderQuotaType.TRIAL if trail_pool is not None:
|
||||
quota_configuration = QuotaConfiguration(
|
||||
quota_type=provider_quota.quota_type,
|
||||
quota_unit=provider_hosting_configuration.quota_unit or QuotaUnit.TOKENS,
|
||||
quota_used=trail_pool.quota_used,
|
||||
quota_limit=trail_pool.quota_limit,
|
||||
is_valid=trail_pool.quota_limit > trail_pool.quota_used or trail_pool.quota_limit == -1,
|
||||
restrict_models=provider_quota.restrict_models,
|
||||
)
|
||||
|
||||
elif provider_quota.quota_type == ProviderQuotaType.PAID and paid_pool is not None:
|
||||
quota_configuration = QuotaConfiguration(
|
||||
quota_type=provider_quota.quota_type,
|
||||
quota_unit=provider_hosting_configuration.quota_unit or QuotaUnit.TOKENS,
|
||||
quota_used=paid_pool.quota_used,
|
||||
quota_limit=paid_pool.quota_limit,
|
||||
is_valid=paid_pool.quota_limit > paid_pool.quota_used or paid_pool.quota_limit == -1,
|
||||
restrict_models=provider_quota.restrict_models,
|
||||
)
|
||||
case ProviderQuotaType.PAID if paid_pool is not None:
|
||||
quota_configuration = QuotaConfiguration(
|
||||
quota_type=provider_quota.quota_type,
|
||||
quota_unit=provider_hosting_configuration.quota_unit or QuotaUnit.TOKENS,
|
||||
quota_used=paid_pool.quota_used,
|
||||
quota_limit=paid_pool.quota_limit,
|
||||
is_valid=paid_pool.quota_limit > paid_pool.quota_used or paid_pool.quota_limit == -1,
|
||||
restrict_models=provider_quota.restrict_models,
|
||||
)
|
||||
|
||||
else:
|
||||
quota_configuration = QuotaConfiguration(
|
||||
quota_type=provider_quota.quota_type,
|
||||
quota_unit=provider_hosting_configuration.quota_unit or QuotaUnit.TOKENS,
|
||||
quota_used=provider_record.quota_used,
|
||||
quota_limit=provider_record.quota_limit,
|
||||
is_valid=provider_record.quota_limit > provider_record.quota_used
|
||||
or provider_record.quota_limit == -1,
|
||||
restrict_models=provider_quota.restrict_models,
|
||||
)
|
||||
case _:
|
||||
quota_configuration = QuotaConfiguration(
|
||||
quota_type=provider_quota.quota_type,
|
||||
quota_unit=provider_hosting_configuration.quota_unit or QuotaUnit.TOKENS,
|
||||
quota_used=provider_record.quota_used,
|
||||
quota_limit=provider_record.quota_limit,
|
||||
is_valid=provider_record.quota_limit > provider_record.quota_used
|
||||
or provider_record.quota_limit == -1,
|
||||
restrict_models=provider_quota.restrict_models,
|
||||
)
|
||||
|
||||
quota_configurations.append(quota_configuration)
|
||||
|
||||
|
||||
@ -37,11 +37,12 @@ class AnalyticdbVector(BaseVector):
|
||||
|
||||
def create(self, texts: list[Document], embeddings: list[list[float]], **kwargs):
|
||||
dimension = len(embeddings[0])
|
||||
self.analyticdb_vector._create_collection_if_not_exists(dimension)
|
||||
self.analyticdb_vector.create_collection_if_not_exists(dimension)
|
||||
self.analyticdb_vector.add_texts(texts, embeddings)
|
||||
|
||||
def add_texts(self, documents: list[Document], embeddings: list[list[float]], **kwargs):
|
||||
def add_texts(self, documents: list[Document], embeddings: list[list[float]], **kwargs) -> list[str]:
|
||||
self.analyticdb_vector.add_texts(documents, embeddings)
|
||||
return []
|
||||
|
||||
def text_exists(self, id: str) -> bool:
|
||||
return self.analyticdb_vector.text_exists(id)
|
||||
|
||||
@ -123,7 +123,7 @@ class AnalyticdbVectorOpenAPI:
|
||||
else:
|
||||
raise ValueError(f"failed to create namespace {self.config.namespace}: {e}")
|
||||
|
||||
def _create_collection_if_not_exists(self, embedding_dimension: int):
|
||||
def create_collection_if_not_exists(self, embedding_dimension: int):
|
||||
from alibabacloud_gpdb20160503 import models as gpdb_20160503_models
|
||||
from Tea.exceptions import TeaException
|
||||
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
import json
|
||||
import uuid
|
||||
from collections.abc import Iterator
|
||||
from contextlib import contextmanager
|
||||
from typing import Any
|
||||
|
||||
@ -74,7 +75,7 @@ class AnalyticdbVectorBySql:
|
||||
)
|
||||
|
||||
@contextmanager
|
||||
def _get_cursor(self):
|
||||
def _get_cursor(self) -> Iterator[Any]:
|
||||
assert self.pool is not None, "Connection pool is not initialized"
|
||||
conn = self.pool.getconn()
|
||||
cur = conn.cursor()
|
||||
@ -130,7 +131,7 @@ class AnalyticdbVectorBySql:
|
||||
)
|
||||
cur.execute(f"CREATE SCHEMA IF NOT EXISTS {self.config.namespace}")
|
||||
|
||||
def _create_collection_if_not_exists(self, embedding_dimension: int):
|
||||
def create_collection_if_not_exists(self, embedding_dimension: int):
|
||||
cache_key = f"vector_indexing_{self._collection_name}"
|
||||
lock_name = f"{cache_key}_lock"
|
||||
with redis_client.lock(lock_name, timeout=20):
|
||||
|
||||
@ -2,7 +2,7 @@ import json
|
||||
from typing import Any, TypedDict
|
||||
|
||||
import chromadb
|
||||
from chromadb import QueryResult, Settings
|
||||
from chromadb import QueryResult, Settings # pyright: ignore[reportPrivateImportUsage]
|
||||
from pydantic import BaseModel
|
||||
|
||||
from configs import dify_config
|
||||
@ -106,14 +106,15 @@ class ChromaVector(BaseVector):
|
||||
def search_by_vector(self, query_vector: list[float], **kwargs: Any) -> list[Document]:
|
||||
collection = self._client.get_or_create_collection(self._collection_name)
|
||||
document_ids_filter = kwargs.get("document_ids_filter")
|
||||
results: QueryResult
|
||||
if document_ids_filter:
|
||||
results: QueryResult = collection.query(
|
||||
results = collection.query(
|
||||
query_embeddings=query_vector,
|
||||
n_results=kwargs.get("top_k", 4),
|
||||
where={"document_id": {"$in": document_ids_filter}}, # type: ignore
|
||||
)
|
||||
else:
|
||||
results: QueryResult = collection.query(query_embeddings=query_vector, n_results=kwargs.get("top_k", 4)) # type: ignore
|
||||
results = collection.query(query_embeddings=query_vector, n_results=kwargs.get("top_k", 4)) # type: ignore
|
||||
score_threshold = float(kwargs.get("score_threshold") or 0.0)
|
||||
|
||||
# Check if results contain data
|
||||
@ -165,8 +166,8 @@ class ChromaVectorFactory(AbstractVectorFactory):
|
||||
config=ChromaConfig(
|
||||
host=dify_config.CHROMA_HOST or "",
|
||||
port=dify_config.CHROMA_PORT,
|
||||
tenant=dify_config.CHROMA_TENANT or chromadb.DEFAULT_TENANT,
|
||||
database=dify_config.CHROMA_DATABASE or chromadb.DEFAULT_DATABASE,
|
||||
tenant=dify_config.CHROMA_TENANT or chromadb.DEFAULT_TENANT, # pyright: ignore[reportPrivateImportUsage]
|
||||
database=dify_config.CHROMA_DATABASE or chromadb.DEFAULT_DATABASE, # pyright: ignore[reportPrivateImportUsage]
|
||||
auth_provider=dify_config.CHROMA_AUTH_PROVIDER,
|
||||
auth_credentials=dify_config.CHROMA_AUTH_CREDENTIALS,
|
||||
),
|
||||
|
||||
@ -3,7 +3,7 @@ import os
|
||||
import uuid
|
||||
from collections.abc import Generator, Iterable, Sequence
|
||||
from itertools import islice
|
||||
from typing import TYPE_CHECKING, Any
|
||||
from typing import TYPE_CHECKING, Any, cast
|
||||
|
||||
import qdrant_client
|
||||
from flask import current_app
|
||||
@ -32,7 +32,6 @@ from extensions.ext_redis import redis_client
|
||||
from models.dataset import Dataset, DatasetCollectionBinding
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from qdrant_client import grpc # noqa
|
||||
from qdrant_client.conversions import common_types
|
||||
from qdrant_client.http import models as rest
|
||||
|
||||
@ -180,7 +179,7 @@ class QdrantVector(BaseVector):
|
||||
for batch_ids, points in self._generate_rest_batches(
|
||||
texts, embeddings, filtered_metadatas, uuids, 64, self._group_id
|
||||
):
|
||||
self._client.upsert(collection_name=self._collection_name, points=points)
|
||||
self._client.upsert(collection_name=self._collection_name, points=cast("common_types.Points", points))
|
||||
added_ids.extend(batch_ids)
|
||||
|
||||
return added_ids
|
||||
@ -472,7 +471,7 @@ class QdrantVector(BaseVector):
|
||||
|
||||
def _reload_if_needed(self):
|
||||
if isinstance(self._client, QdrantLocal):
|
||||
self._client._load()
|
||||
self._client._load() # pyright: ignore[reportPrivateUsage]
|
||||
|
||||
@classmethod
|
||||
def _document_from_scored_point(
|
||||
|
||||
@ -26,7 +26,7 @@ from extensions.ext_redis import redis_client
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
Base = declarative_base() # type: Any
|
||||
Base: Any = declarative_base()
|
||||
|
||||
|
||||
class RelytConfig(BaseModel):
|
||||
|
||||
@ -19,12 +19,15 @@ class UnstructuredWordExtractor(BaseExtractor):
|
||||
|
||||
def extract(self) -> list[Document]:
|
||||
from unstructured.__version__ import __version__ as __unstructured_version__
|
||||
from unstructured.file_utils.filetype import FileType, detect_filetype
|
||||
from unstructured.file_utils.filetype import ( # pyright: ignore[reportPrivateImportUsage]
|
||||
FileType,
|
||||
detect_filetype,
|
||||
)
|
||||
|
||||
unstructured_version = tuple(int(x) for x in __unstructured_version__.split("."))
|
||||
# check the file extension
|
||||
try:
|
||||
import magic # noqa: F401
|
||||
import magic # noqa: F401 # pyright: ignore[reportUnusedImport]
|
||||
|
||||
is_doc = detect_filetype(self._file_path) == FileType.DOC
|
||||
except ImportError:
|
||||
|
||||
@ -15,7 +15,7 @@ from graphon.model_runtime.entities.message_entities import PromptMessage, Promp
|
||||
from graphon.model_runtime.entities.model_entities import ModelFeature, ModelType
|
||||
from graphon.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel
|
||||
from sqlalchemy import and_, func, literal, or_, select
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from core.app.app_config.entities import (
|
||||
DatasetEntity,
|
||||
@ -884,7 +884,7 @@ class DatasetRetrieval:
|
||||
self._send_trace_task(message_id, documents, timer)
|
||||
return
|
||||
|
||||
with Session(db.engine) as session:
|
||||
with sessionmaker(bind=db.engine).begin() as session:
|
||||
# Collect all document_ids and batch fetch DatasetDocuments
|
||||
document_ids = {
|
||||
doc.metadata["document_id"]
|
||||
@ -975,7 +975,6 @@ class DatasetRetrieval:
|
||||
{DocumentSegment.hit_count: DocumentSegment.hit_count + 1},
|
||||
synchronize_session=False,
|
||||
)
|
||||
session.commit()
|
||||
|
||||
self._send_trace_task(message_id, documents, timer)
|
||||
|
||||
|
||||
@ -205,16 +205,160 @@ class ToolManager:
|
||||
|
||||
:return: the tool
|
||||
"""
|
||||
if provider_type == ToolProviderType.BUILT_IN:
|
||||
# check if the builtin tool need credentials
|
||||
provider_controller = cls.get_builtin_provider(provider_id, tenant_id)
|
||||
match provider_type:
|
||||
case ToolProviderType.BUILT_IN:
|
||||
provider_controller = cls.get_builtin_provider(provider_id, tenant_id)
|
||||
|
||||
builtin_tool = provider_controller.get_tool(tool_name)
|
||||
if not builtin_tool:
|
||||
raise ToolProviderNotFoundError(f"builtin tool {tool_name} not found")
|
||||
builtin_tool = provider_controller.get_tool(tool_name)
|
||||
if not builtin_tool:
|
||||
raise ToolProviderNotFoundError(f"builtin tool {tool_name} not found")
|
||||
|
||||
if not provider_controller.need_credentials:
|
||||
return builtin_tool.fork_tool_runtime(
|
||||
runtime=ToolRuntime(
|
||||
tenant_id=tenant_id,
|
||||
user_id=user_id,
|
||||
credentials={},
|
||||
invoke_from=invoke_from,
|
||||
tool_invoke_from=tool_invoke_from,
|
||||
)
|
||||
)
|
||||
builtin_provider = None
|
||||
if isinstance(provider_controller, PluginToolProviderController):
|
||||
provider_id_entity = ToolProviderID(provider_id)
|
||||
if is_valid_uuid(credential_id):
|
||||
try:
|
||||
builtin_provider_stmt = select(BuiltinToolProvider).where(
|
||||
BuiltinToolProvider.tenant_id == tenant_id,
|
||||
BuiltinToolProvider.id == credential_id,
|
||||
)
|
||||
builtin_provider = db.session.scalar(builtin_provider_stmt)
|
||||
except Exception as e:
|
||||
builtin_provider = None
|
||||
logger.info("Error getting builtin provider %s:%s", credential_id, e, exc_info=True)
|
||||
if builtin_provider is None:
|
||||
raise ToolProviderNotFoundError(f"provider has been deleted: {credential_id}")
|
||||
|
||||
if builtin_provider is None:
|
||||
with Session(db.engine) as session:
|
||||
builtin_provider = session.scalar(
|
||||
sa.select(BuiltinToolProvider)
|
||||
.where(
|
||||
BuiltinToolProvider.tenant_id == tenant_id,
|
||||
(BuiltinToolProvider.provider == str(provider_id_entity))
|
||||
| (BuiltinToolProvider.provider == provider_id_entity.provider_name),
|
||||
)
|
||||
.order_by(BuiltinToolProvider.is_default.desc(), BuiltinToolProvider.created_at.asc())
|
||||
)
|
||||
if builtin_provider is None:
|
||||
raise ToolProviderNotFoundError(f"no default provider for {provider_id}")
|
||||
else:
|
||||
builtin_provider = db.session.scalar(
|
||||
select(BuiltinToolProvider)
|
||||
.where(
|
||||
BuiltinToolProvider.tenant_id == tenant_id, (BuiltinToolProvider.provider == provider_id)
|
||||
)
|
||||
.order_by(BuiltinToolProvider.is_default.desc(), BuiltinToolProvider.created_at.asc())
|
||||
.limit(1)
|
||||
)
|
||||
|
||||
if builtin_provider is None:
|
||||
raise ToolProviderNotFoundError(f"builtin provider {provider_id} not found")
|
||||
|
||||
from core.helper.credential_utils import check_credential_policy_compliance
|
||||
|
||||
check_credential_policy_compliance(
|
||||
credential_id=builtin_provider.id,
|
||||
provider=provider_id,
|
||||
credential_type=PluginCredentialType.TOOL,
|
||||
check_existence=False,
|
||||
)
|
||||
|
||||
encrypter, cache = create_provider_encrypter(
|
||||
tenant_id=tenant_id,
|
||||
config=[
|
||||
x.to_basic_provider_config()
|
||||
for x in provider_controller.get_credentials_schema_by_type(builtin_provider.credential_type)
|
||||
],
|
||||
cache=ToolProviderCredentialsCache(
|
||||
tenant_id=tenant_id, provider=provider_id, credential_id=builtin_provider.id
|
||||
),
|
||||
)
|
||||
|
||||
decrypted_credentials: Mapping[str, Any] = encrypter.decrypt(builtin_provider.credentials)
|
||||
|
||||
if builtin_provider.expires_at != -1 and (builtin_provider.expires_at - 60) < int(time.time()):
|
||||
# TODO: circular import
|
||||
from core.plugin.impl.oauth import OAuthHandler
|
||||
from services.tools.builtin_tools_manage_service import BuiltinToolManageService
|
||||
|
||||
tool_provider = ToolProviderID(provider_id)
|
||||
provider_name = tool_provider.provider_name
|
||||
redirect_uri = f"{dify_config.CONSOLE_API_URL}/console/api/oauth/plugin/{provider_id}/tool/callback"
|
||||
system_credentials = BuiltinToolManageService.get_oauth_client(tenant_id, provider_id)
|
||||
|
||||
oauth_handler = OAuthHandler()
|
||||
refreshed_credentials = oauth_handler.refresh_credentials(
|
||||
tenant_id=tenant_id,
|
||||
user_id=builtin_provider.user_id,
|
||||
plugin_id=tool_provider.plugin_id,
|
||||
provider=provider_name,
|
||||
redirect_uri=redirect_uri,
|
||||
system_credentials=system_credentials or {},
|
||||
credentials=decrypted_credentials,
|
||||
)
|
||||
# update the credentials
|
||||
builtin_provider.encrypted_credentials = json.dumps(
|
||||
encrypter.encrypt(refreshed_credentials.credentials)
|
||||
)
|
||||
builtin_provider.expires_at = refreshed_credentials.expires_at
|
||||
db.session.commit()
|
||||
decrypted_credentials = refreshed_credentials.credentials
|
||||
cache.delete()
|
||||
|
||||
if not provider_controller.need_credentials:
|
||||
return builtin_tool.fork_tool_runtime(
|
||||
runtime=ToolRuntime(
|
||||
tenant_id=tenant_id,
|
||||
user_id=user_id,
|
||||
credentials=dict(decrypted_credentials),
|
||||
credential_type=builtin_provider.credential_type,
|
||||
runtime_parameters={},
|
||||
invoke_from=invoke_from,
|
||||
tool_invoke_from=tool_invoke_from,
|
||||
)
|
||||
)
|
||||
|
||||
case ToolProviderType.API:
|
||||
api_provider, credentials = cls.get_api_provider_controller(tenant_id, provider_id)
|
||||
encrypter, _ = create_tool_provider_encrypter(
|
||||
tenant_id=tenant_id,
|
||||
controller=api_provider,
|
||||
)
|
||||
return api_provider.get_tool(tool_name).fork_tool_runtime(
|
||||
runtime=ToolRuntime(
|
||||
tenant_id=tenant_id,
|
||||
user_id=user_id,
|
||||
credentials=dict(encrypter.decrypt(credentials)),
|
||||
invoke_from=invoke_from,
|
||||
tool_invoke_from=tool_invoke_from,
|
||||
)
|
||||
)
|
||||
case ToolProviderType.WORKFLOW:
|
||||
workflow_provider_stmt = select(WorkflowToolProvider).where(
|
||||
WorkflowToolProvider.tenant_id == tenant_id, WorkflowToolProvider.id == provider_id
|
||||
)
|
||||
with Session(db.engine, expire_on_commit=False) as session, session.begin():
|
||||
workflow_provider = session.scalar(workflow_provider_stmt)
|
||||
|
||||
if workflow_provider is None:
|
||||
raise ToolProviderNotFoundError(f"workflow provider {provider_id} not found")
|
||||
|
||||
controller = ToolTransformService.workflow_provider_to_controller(db_provider=workflow_provider)
|
||||
controller_tools: list[WorkflowTool] = controller.get_tools(tenant_id=workflow_provider.tenant_id)
|
||||
if controller_tools is None or len(controller_tools) == 0:
|
||||
raise ToolProviderNotFoundError(f"workflow provider {provider_id} not found")
|
||||
|
||||
return controller.get_tools(tenant_id=workflow_provider.tenant_id)[0].fork_tool_runtime(
|
||||
runtime=ToolRuntime(
|
||||
tenant_id=tenant_id,
|
||||
user_id=user_id,
|
||||
@ -223,177 +367,28 @@ class ToolManager:
|
||||
tool_invoke_from=tool_invoke_from,
|
||||
)
|
||||
)
|
||||
builtin_provider = None
|
||||
if isinstance(provider_controller, PluginToolProviderController):
|
||||
provider_id_entity = ToolProviderID(provider_id)
|
||||
# get specific credentials
|
||||
if is_valid_uuid(credential_id):
|
||||
try:
|
||||
builtin_provider_stmt = select(BuiltinToolProvider).where(
|
||||
BuiltinToolProvider.tenant_id == tenant_id,
|
||||
BuiltinToolProvider.id == credential_id,
|
||||
)
|
||||
builtin_provider = db.session.scalar(builtin_provider_stmt)
|
||||
except Exception as e:
|
||||
builtin_provider = None
|
||||
logger.info("Error getting builtin provider %s:%s", credential_id, e, exc_info=True)
|
||||
# if the provider has been deleted, raise an error
|
||||
if builtin_provider is None:
|
||||
raise ToolProviderNotFoundError(f"provider has been deleted: {credential_id}")
|
||||
|
||||
# fallback to the default provider
|
||||
if builtin_provider is None:
|
||||
# use the default provider
|
||||
with Session(db.engine) as session:
|
||||
builtin_provider = session.scalar(
|
||||
sa.select(BuiltinToolProvider)
|
||||
.where(
|
||||
BuiltinToolProvider.tenant_id == tenant_id,
|
||||
(BuiltinToolProvider.provider == str(provider_id_entity))
|
||||
| (BuiltinToolProvider.provider == provider_id_entity.provider_name),
|
||||
)
|
||||
.order_by(BuiltinToolProvider.is_default.desc(), BuiltinToolProvider.created_at.asc())
|
||||
)
|
||||
if builtin_provider is None:
|
||||
raise ToolProviderNotFoundError(f"no default provider for {provider_id}")
|
||||
else:
|
||||
builtin_provider = db.session.scalar(
|
||||
select(BuiltinToolProvider)
|
||||
.where(BuiltinToolProvider.tenant_id == tenant_id, (BuiltinToolProvider.provider == provider_id))
|
||||
.order_by(BuiltinToolProvider.is_default.desc(), BuiltinToolProvider.created_at.asc())
|
||||
.limit(1)
|
||||
)
|
||||
|
||||
if builtin_provider is None:
|
||||
raise ToolProviderNotFoundError(f"builtin provider {provider_id} not found")
|
||||
|
||||
# check if the credential is allowed to be used
|
||||
from core.helper.credential_utils import check_credential_policy_compliance
|
||||
|
||||
check_credential_policy_compliance(
|
||||
credential_id=builtin_provider.id,
|
||||
provider=provider_id,
|
||||
credential_type=PluginCredentialType.TOOL,
|
||||
check_existence=False,
|
||||
)
|
||||
|
||||
encrypter, cache = create_provider_encrypter(
|
||||
tenant_id=tenant_id,
|
||||
config=[
|
||||
x.to_basic_provider_config()
|
||||
for x in provider_controller.get_credentials_schema_by_type(builtin_provider.credential_type)
|
||||
],
|
||||
cache=ToolProviderCredentialsCache(
|
||||
tenant_id=tenant_id, provider=provider_id, credential_id=builtin_provider.id
|
||||
),
|
||||
)
|
||||
|
||||
# decrypt the credentials
|
||||
decrypted_credentials: Mapping[str, Any] = encrypter.decrypt(builtin_provider.credentials)
|
||||
|
||||
# check if the credentials is expired
|
||||
if builtin_provider.expires_at != -1 and (builtin_provider.expires_at - 60) < int(time.time()):
|
||||
# TODO: circular import
|
||||
from core.plugin.impl.oauth import OAuthHandler
|
||||
from services.tools.builtin_tools_manage_service import BuiltinToolManageService
|
||||
|
||||
# refresh the credentials
|
||||
tool_provider = ToolProviderID(provider_id)
|
||||
provider_name = tool_provider.provider_name
|
||||
redirect_uri = f"{dify_config.CONSOLE_API_URL}/console/api/oauth/plugin/{provider_id}/tool/callback"
|
||||
system_credentials = BuiltinToolManageService.get_oauth_client(tenant_id, provider_id)
|
||||
|
||||
oauth_handler = OAuthHandler()
|
||||
# refresh the credentials
|
||||
refreshed_credentials = oauth_handler.refresh_credentials(
|
||||
tenant_id=tenant_id,
|
||||
user_id=builtin_provider.user_id,
|
||||
plugin_id=tool_provider.plugin_id,
|
||||
provider=provider_name,
|
||||
redirect_uri=redirect_uri,
|
||||
system_credentials=system_credentials or {},
|
||||
credentials=decrypted_credentials,
|
||||
)
|
||||
# update the credentials
|
||||
builtin_provider.encrypted_credentials = json.dumps(
|
||||
encrypter.encrypt(refreshed_credentials.credentials)
|
||||
)
|
||||
builtin_provider.expires_at = refreshed_credentials.expires_at
|
||||
db.session.commit()
|
||||
decrypted_credentials = refreshed_credentials.credentials
|
||||
cache.delete()
|
||||
|
||||
return builtin_tool.fork_tool_runtime(
|
||||
runtime=ToolRuntime(
|
||||
tenant_id=tenant_id,
|
||||
user_id=user_id,
|
||||
credentials=dict(decrypted_credentials),
|
||||
credential_type=builtin_provider.credential_type,
|
||||
runtime_parameters={},
|
||||
invoke_from=invoke_from,
|
||||
tool_invoke_from=tool_invoke_from,
|
||||
)
|
||||
)
|
||||
|
||||
elif provider_type == ToolProviderType.API:
|
||||
api_provider, credentials = cls.get_api_provider_controller(tenant_id, provider_id)
|
||||
encrypter, _ = create_tool_provider_encrypter(
|
||||
tenant_id=tenant_id,
|
||||
controller=api_provider,
|
||||
)
|
||||
return api_provider.get_tool(tool_name).fork_tool_runtime(
|
||||
runtime=ToolRuntime(
|
||||
tenant_id=tenant_id,
|
||||
user_id=user_id,
|
||||
credentials=dict(encrypter.decrypt(credentials)),
|
||||
invoke_from=invoke_from,
|
||||
tool_invoke_from=tool_invoke_from,
|
||||
)
|
||||
)
|
||||
elif provider_type == ToolProviderType.WORKFLOW:
|
||||
workflow_provider_stmt = select(WorkflowToolProvider).where(
|
||||
WorkflowToolProvider.tenant_id == tenant_id, WorkflowToolProvider.id == provider_id
|
||||
)
|
||||
with Session(db.engine, expire_on_commit=False) as session, session.begin():
|
||||
workflow_provider = session.scalar(workflow_provider_stmt)
|
||||
|
||||
if workflow_provider is None:
|
||||
raise ToolProviderNotFoundError(f"workflow provider {provider_id} not found")
|
||||
|
||||
controller = ToolTransformService.workflow_provider_to_controller(db_provider=workflow_provider)
|
||||
controller_tools: list[WorkflowTool] = controller.get_tools(tenant_id=workflow_provider.tenant_id)
|
||||
if controller_tools is None or len(controller_tools) == 0:
|
||||
raise ToolProviderNotFoundError(f"workflow provider {provider_id} not found")
|
||||
|
||||
return controller.get_tools(tenant_id=workflow_provider.tenant_id)[0].fork_tool_runtime(
|
||||
runtime=ToolRuntime(
|
||||
tenant_id=tenant_id,
|
||||
user_id=user_id,
|
||||
credentials={},
|
||||
invoke_from=invoke_from,
|
||||
tool_invoke_from=tool_invoke_from,
|
||||
)
|
||||
)
|
||||
elif provider_type == ToolProviderType.APP:
|
||||
raise NotImplementedError("app provider not implemented")
|
||||
elif provider_type == ToolProviderType.PLUGIN:
|
||||
plugin_tool = cls.get_plugin_provider(provider_id, tenant_id).get_tool(tool_name)
|
||||
runtime = getattr(plugin_tool, "runtime", None)
|
||||
if runtime is not None:
|
||||
runtime.user_id = user_id
|
||||
runtime.invoke_from = invoke_from
|
||||
runtime.tool_invoke_from = tool_invoke_from
|
||||
return plugin_tool
|
||||
elif provider_type == ToolProviderType.MCP:
|
||||
mcp_tool = cls.get_mcp_provider_controller(tenant_id, provider_id).get_tool(tool_name)
|
||||
runtime = getattr(mcp_tool, "runtime", None)
|
||||
if runtime is not None:
|
||||
runtime.user_id = user_id
|
||||
runtime.invoke_from = invoke_from
|
||||
runtime.tool_invoke_from = tool_invoke_from
|
||||
return mcp_tool
|
||||
else:
|
||||
raise ToolProviderNotFoundError(f"provider type {provider_type.value} not found")
|
||||
case ToolProviderType.APP:
|
||||
raise NotImplementedError("app provider not implemented")
|
||||
case ToolProviderType.PLUGIN:
|
||||
plugin_tool = cls.get_plugin_provider(provider_id, tenant_id).get_tool(tool_name)
|
||||
runtime = getattr(plugin_tool, "runtime", None)
|
||||
if runtime is not None:
|
||||
runtime.user_id = user_id
|
||||
runtime.invoke_from = invoke_from
|
||||
runtime.tool_invoke_from = tool_invoke_from
|
||||
return plugin_tool
|
||||
case ToolProviderType.MCP:
|
||||
mcp_tool = cls.get_mcp_provider_controller(tenant_id, provider_id).get_tool(tool_name)
|
||||
runtime = getattr(mcp_tool, "runtime", None)
|
||||
if runtime is not None:
|
||||
runtime.user_id = user_id
|
||||
runtime.invoke_from = invoke_from
|
||||
runtime.tool_invoke_from = tool_invoke_from
|
||||
return mcp_tool
|
||||
case ToolProviderType.DATASET_RETRIEVAL:
|
||||
raise ToolProviderNotFoundError(f"provider type {provider_type.value} not found")
|
||||
case _:
|
||||
raise ToolProviderNotFoundError(f"provider type {provider_type} not found")
|
||||
|
||||
@classmethod
|
||||
def get_agent_tool_runtime(
|
||||
@ -1027,31 +1022,31 @@ class ToolManager:
|
||||
:param provider_id: the id of the provider
|
||||
:return:
|
||||
"""
|
||||
provider_type = provider_type
|
||||
provider_id = provider_id
|
||||
if provider_type == ToolProviderType.BUILT_IN:
|
||||
provider = ToolManager.get_builtin_provider(provider_id, tenant_id)
|
||||
if isinstance(provider, PluginToolProviderController):
|
||||
match provider_type:
|
||||
case ToolProviderType.BUILT_IN:
|
||||
provider = ToolManager.get_builtin_provider(provider_id, tenant_id)
|
||||
if isinstance(provider, PluginToolProviderController):
|
||||
try:
|
||||
return cls.generate_plugin_tool_icon_url(tenant_id, provider.entity.identity.icon)
|
||||
except Exception:
|
||||
return {"background": "#252525", "content": "\ud83d\ude01"}
|
||||
return cls.generate_builtin_tool_icon_url(provider_id)
|
||||
case ToolProviderType.API:
|
||||
return cls.generate_api_tool_icon_url(tenant_id, provider_id)
|
||||
case ToolProviderType.WORKFLOW:
|
||||
return cls.generate_workflow_tool_icon_url(tenant_id, provider_id)
|
||||
case ToolProviderType.PLUGIN:
|
||||
provider = ToolManager.get_plugin_provider(provider_id, tenant_id)
|
||||
try:
|
||||
return cls.generate_plugin_tool_icon_url(tenant_id, provider.entity.identity.icon)
|
||||
except Exception:
|
||||
return {"background": "#252525", "content": "\ud83d\ude01"}
|
||||
return cls.generate_builtin_tool_icon_url(provider_id)
|
||||
elif provider_type == ToolProviderType.API:
|
||||
return cls.generate_api_tool_icon_url(tenant_id, provider_id)
|
||||
elif provider_type == ToolProviderType.WORKFLOW:
|
||||
return cls.generate_workflow_tool_icon_url(tenant_id, provider_id)
|
||||
elif provider_type == ToolProviderType.PLUGIN:
|
||||
provider = ToolManager.get_plugin_provider(provider_id, tenant_id)
|
||||
try:
|
||||
return cls.generate_plugin_tool_icon_url(tenant_id, provider.entity.identity.icon)
|
||||
except Exception:
|
||||
return {"background": "#252525", "content": "\ud83d\ude01"}
|
||||
raise ValueError(f"plugin provider {provider_id} not found")
|
||||
elif provider_type == ToolProviderType.MCP:
|
||||
return cls.generate_mcp_tool_icon_url(tenant_id, provider_id)
|
||||
else:
|
||||
raise ValueError(f"provider type {provider_type} not found")
|
||||
case ToolProviderType.MCP:
|
||||
return cls.generate_mcp_tool_icon_url(tenant_id, provider_id)
|
||||
case ToolProviderType.APP | ToolProviderType.DATASET_RETRIEVAL:
|
||||
raise ValueError(f"provider type {provider_type} not found")
|
||||
case _:
|
||||
raise ValueError(f"provider type {provider_type} not found")
|
||||
|
||||
@classmethod
|
||||
def _convert_tool_parameters_type(
|
||||
|
||||
@ -305,14 +305,15 @@ class WorkflowTool(Tool):
|
||||
"transfer_method": file.transfer_method.value,
|
||||
"type": file.type.value,
|
||||
}
|
||||
if file.transfer_method == FileTransferMethod.TOOL_FILE:
|
||||
file_dict["tool_file_id"] = resolve_file_record_id(file.reference)
|
||||
elif file.transfer_method == FileTransferMethod.LOCAL_FILE:
|
||||
file_dict["upload_file_id"] = resolve_file_record_id(file.reference)
|
||||
elif file.transfer_method == FileTransferMethod.DATASOURCE_FILE:
|
||||
file_dict["datasource_file_id"] = resolve_file_record_id(file.reference)
|
||||
elif file.transfer_method == FileTransferMethod.REMOTE_URL:
|
||||
file_dict["url"] = file.generate_url()
|
||||
match file.transfer_method:
|
||||
case FileTransferMethod.TOOL_FILE:
|
||||
file_dict["tool_file_id"] = resolve_file_record_id(file.reference)
|
||||
case FileTransferMethod.LOCAL_FILE:
|
||||
file_dict["upload_file_id"] = resolve_file_record_id(file.reference)
|
||||
case FileTransferMethod.DATASOURCE_FILE:
|
||||
file_dict["datasource_file_id"] = resolve_file_record_id(file.reference)
|
||||
case FileTransferMethod.REMOTE_URL:
|
||||
file_dict["url"] = file.generate_url()
|
||||
|
||||
files.append(file_dict)
|
||||
except Exception:
|
||||
@ -357,8 +358,11 @@ class WorkflowTool(Tool):
|
||||
def _update_file_mapping(self, file_dict: dict):
|
||||
file_id = resolve_file_record_id(file_dict.get("reference") or file_dict.get("related_id"))
|
||||
transfer_method = FileTransferMethod.value_of(file_dict.get("transfer_method"))
|
||||
if transfer_method == FileTransferMethod.TOOL_FILE:
|
||||
file_dict["tool_file_id"] = file_id
|
||||
elif transfer_method == FileTransferMethod.LOCAL_FILE:
|
||||
file_dict["upload_file_id"] = file_id
|
||||
match transfer_method:
|
||||
case FileTransferMethod.TOOL_FILE:
|
||||
file_dict["tool_file_id"] = file_id
|
||||
case FileTransferMethod.LOCAL_FILE:
|
||||
file_dict["upload_file_id"] = file_id
|
||||
case FileTransferMethod.REMOTE_URL | FileTransferMethod.DATASOURCE_FILE:
|
||||
pass
|
||||
return file_dict
|
||||
|
||||
@ -4,7 +4,7 @@ from graphon.entities.base_node_data import BaseNodeData
|
||||
from graphon.enums import NodeType
|
||||
from pydantic import BaseModel
|
||||
|
||||
from core.rag.entities import WeightedScoreConfig
|
||||
from core.rag.entities.retrieval_settings import WeightedScoreConfig
|
||||
from core.rag.index_processor.index_processor_base import SummaryIndexSettingDict
|
||||
from core.rag.retrieval.retrieval_methods import RetrievalMethod
|
||||
from core.workflow.nodes.knowledge_index import KNOWLEDGE_INDEX_NODE_TYPE
|
||||
|
||||
@ -155,24 +155,25 @@ class TriggerWebhookNode(Node[WebhookData]):
|
||||
outputs[param_name] = raw_data
|
||||
continue
|
||||
|
||||
if param_type == SegmentType.FILE:
|
||||
# Get File object (already processed by webhook controller)
|
||||
files = webhook_data.get("files", {})
|
||||
if files and isinstance(files, dict):
|
||||
file = files.get(param_name)
|
||||
if file and isinstance(file, dict):
|
||||
file_var = self.generate_file_var(param_name, file)
|
||||
if file_var:
|
||||
outputs[param_name] = file_var
|
||||
match param_type:
|
||||
case SegmentType.FILE:
|
||||
# Get File object (already processed by webhook controller)
|
||||
files = webhook_data.get("files", {})
|
||||
if files and isinstance(files, dict):
|
||||
file = files.get(param_name)
|
||||
if file and isinstance(file, dict):
|
||||
file_var = self.generate_file_var(param_name, file)
|
||||
if file_var:
|
||||
outputs[param_name] = file_var
|
||||
else:
|
||||
outputs[param_name] = files
|
||||
else:
|
||||
outputs[param_name] = files
|
||||
else:
|
||||
outputs[param_name] = files
|
||||
else:
|
||||
outputs[param_name] = files
|
||||
else:
|
||||
# Get regular body parameter
|
||||
outputs[param_name] = webhook_data.get("body", {}).get(param_name)
|
||||
case _:
|
||||
# Get regular body parameter
|
||||
outputs[param_name] = webhook_data.get("body", {}).get(param_name)
|
||||
|
||||
# Include raw webhook data for debugging/advanced use
|
||||
outputs["_webhook_raw"] = webhook_data
|
||||
|
||||
@ -7,10 +7,12 @@ from typing import TYPE_CHECKING, Any, Union
|
||||
|
||||
import redis
|
||||
from redis import RedisError
|
||||
from redis.backoff import ExponentialWithJitterBackoff # type: ignore
|
||||
from redis.cache import CacheConfig
|
||||
from redis.client import PubSub
|
||||
from redis.cluster import ClusterNode, RedisCluster
|
||||
from redis.connection import Connection, SSLConnection
|
||||
from redis.retry import Retry
|
||||
from redis.sentinel import Sentinel
|
||||
|
||||
from configs import dify_config
|
||||
@ -158,8 +160,41 @@ def _get_cache_configuration() -> CacheConfig | None:
|
||||
return CacheConfig()
|
||||
|
||||
|
||||
def _get_retry_policy() -> Retry:
|
||||
"""Build the shared retry policy for Redis connections."""
|
||||
return Retry(
|
||||
backoff=ExponentialWithJitterBackoff(
|
||||
base=dify_config.REDIS_RETRY_BACKOFF_BASE,
|
||||
cap=dify_config.REDIS_RETRY_BACKOFF_CAP,
|
||||
),
|
||||
retries=dify_config.REDIS_RETRY_RETRIES,
|
||||
)
|
||||
|
||||
|
||||
def _get_connection_health_params() -> dict[str, Any]:
|
||||
"""Get connection health and retry parameters for standalone and Sentinel Redis clients."""
|
||||
return {
|
||||
"retry": _get_retry_policy(),
|
||||
"socket_timeout": dify_config.REDIS_SOCKET_TIMEOUT,
|
||||
"socket_connect_timeout": dify_config.REDIS_SOCKET_CONNECT_TIMEOUT,
|
||||
"health_check_interval": dify_config.REDIS_HEALTH_CHECK_INTERVAL,
|
||||
}
|
||||
|
||||
|
||||
def _get_cluster_connection_health_params() -> dict[str, Any]:
|
||||
"""Get retry and timeout parameters for Redis Cluster clients.
|
||||
|
||||
RedisCluster does not support ``health_check_interval`` as a constructor
|
||||
keyword (it is silently stripped by ``cleanup_kwargs``), so it is excluded
|
||||
here. Only ``retry``, ``socket_timeout``, and ``socket_connect_timeout``
|
||||
are passed through.
|
||||
"""
|
||||
params = _get_connection_health_params()
|
||||
return {k: v for k, v in params.items() if k != "health_check_interval"}
|
||||
|
||||
|
||||
def _get_base_redis_params() -> dict[str, Any]:
|
||||
"""Get base Redis connection parameters."""
|
||||
"""Get base Redis connection parameters including retry and health policy."""
|
||||
return {
|
||||
"username": dify_config.REDIS_USERNAME,
|
||||
"password": dify_config.REDIS_PASSWORD or None,
|
||||
@ -169,6 +204,7 @@ def _get_base_redis_params() -> dict[str, Any]:
|
||||
"decode_responses": False,
|
||||
"protocol": dify_config.REDIS_SERIALIZATION_PROTOCOL,
|
||||
"cache_config": _get_cache_configuration(),
|
||||
**_get_connection_health_params(),
|
||||
}
|
||||
|
||||
|
||||
@ -215,6 +251,7 @@ def _create_cluster_client() -> Union[redis.Redis, RedisCluster]:
|
||||
"password": dify_config.REDIS_CLUSTERS_PASSWORD,
|
||||
"protocol": dify_config.REDIS_SERIALIZATION_PROTOCOL,
|
||||
"cache_config": _get_cache_configuration(),
|
||||
**_get_cluster_connection_health_params(),
|
||||
}
|
||||
if dify_config.REDIS_MAX_CONNECTIONS:
|
||||
cluster_kwargs["max_connections"] = dify_config.REDIS_MAX_CONNECTIONS
|
||||
@ -226,7 +263,8 @@ def _create_standalone_client(redis_params: dict[str, Any]) -> Union[redis.Redis
|
||||
"""Create standalone Redis client."""
|
||||
connection_class, ssl_kwargs = _get_ssl_configuration()
|
||||
|
||||
redis_params.update(
|
||||
params = {**redis_params}
|
||||
params.update(
|
||||
{
|
||||
"host": dify_config.REDIS_HOST,
|
||||
"port": dify_config.REDIS_PORT,
|
||||
@ -235,28 +273,31 @@ def _create_standalone_client(redis_params: dict[str, Any]) -> Union[redis.Redis
|
||||
)
|
||||
|
||||
if dify_config.REDIS_MAX_CONNECTIONS:
|
||||
redis_params["max_connections"] = dify_config.REDIS_MAX_CONNECTIONS
|
||||
params["max_connections"] = dify_config.REDIS_MAX_CONNECTIONS
|
||||
|
||||
if ssl_kwargs:
|
||||
redis_params.update(ssl_kwargs)
|
||||
params.update(ssl_kwargs)
|
||||
|
||||
pool = redis.ConnectionPool(**redis_params)
|
||||
pool = redis.ConnectionPool(**params)
|
||||
client: redis.Redis = redis.Redis(connection_pool=pool)
|
||||
return client
|
||||
|
||||
|
||||
def _create_pubsub_client(pubsub_url: str, use_clusters: bool) -> redis.Redis | RedisCluster:
|
||||
max_conns = dify_config.REDIS_MAX_CONNECTIONS
|
||||
if use_clusters:
|
||||
if max_conns:
|
||||
return RedisCluster.from_url(pubsub_url, max_connections=max_conns)
|
||||
else:
|
||||
return RedisCluster.from_url(pubsub_url)
|
||||
|
||||
if use_clusters:
|
||||
health_params = _get_cluster_connection_health_params()
|
||||
kwargs: dict[str, Any] = {**health_params}
|
||||
if max_conns:
|
||||
kwargs["max_connections"] = max_conns
|
||||
return RedisCluster.from_url(pubsub_url, **kwargs)
|
||||
|
||||
health_params = _get_connection_health_params()
|
||||
kwargs = {**health_params}
|
||||
if max_conns:
|
||||
return redis.Redis.from_url(pubsub_url, max_connections=max_conns)
|
||||
else:
|
||||
return redis.Redis.from_url(pubsub_url)
|
||||
kwargs["max_connections"] = max_conns
|
||||
return redis.Redis.from_url(pubsub_url, **kwargs)
|
||||
|
||||
|
||||
def init_app(app: DifyApp):
|
||||
|
||||
@ -1632,52 +1632,53 @@ class Message(Base):
|
||||
|
||||
files: list[File] = []
|
||||
for message_file in message_files:
|
||||
if message_file.transfer_method == FileTransferMethod.LOCAL_FILE:
|
||||
if message_file.upload_file_id is None:
|
||||
raise ValueError(f"MessageFile {message_file.id} is a local file but has no upload_file_id")
|
||||
file = file_factory.build_from_mapping(
|
||||
mapping={
|
||||
match message_file.transfer_method:
|
||||
case FileTransferMethod.LOCAL_FILE:
|
||||
if message_file.upload_file_id is None:
|
||||
raise ValueError(f"MessageFile {message_file.id} is a local file but has no upload_file_id")
|
||||
file = file_factory.build_from_mapping(
|
||||
mapping={
|
||||
"id": message_file.id,
|
||||
"type": message_file.type,
|
||||
"transfer_method": message_file.transfer_method,
|
||||
"upload_file_id": message_file.upload_file_id,
|
||||
},
|
||||
tenant_id=current_app.tenant_id,
|
||||
access_controller=_get_file_access_controller(),
|
||||
)
|
||||
case FileTransferMethod.REMOTE_URL:
|
||||
if message_file.url is None:
|
||||
raise ValueError(f"MessageFile {message_file.id} is a remote url but has no url")
|
||||
file = file_factory.build_from_mapping(
|
||||
mapping={
|
||||
"id": message_file.id,
|
||||
"type": message_file.type,
|
||||
"transfer_method": message_file.transfer_method,
|
||||
"upload_file_id": message_file.upload_file_id,
|
||||
"url": message_file.url,
|
||||
},
|
||||
tenant_id=current_app.tenant_id,
|
||||
access_controller=_get_file_access_controller(),
|
||||
)
|
||||
case FileTransferMethod.TOOL_FILE:
|
||||
if message_file.upload_file_id is None:
|
||||
assert message_file.url is not None
|
||||
message_file.upload_file_id = message_file.url.split("/")[-1].split(".")[0]
|
||||
mapping = {
|
||||
"id": message_file.id,
|
||||
"type": message_file.type,
|
||||
"transfer_method": message_file.transfer_method,
|
||||
"upload_file_id": message_file.upload_file_id,
|
||||
},
|
||||
tenant_id=current_app.tenant_id,
|
||||
access_controller=_get_file_access_controller(),
|
||||
)
|
||||
elif message_file.transfer_method == FileTransferMethod.REMOTE_URL:
|
||||
if message_file.url is None:
|
||||
raise ValueError(f"MessageFile {message_file.id} is a remote url but has no url")
|
||||
file = file_factory.build_from_mapping(
|
||||
mapping={
|
||||
"id": message_file.id,
|
||||
"type": message_file.type,
|
||||
"transfer_method": message_file.transfer_method,
|
||||
"upload_file_id": message_file.upload_file_id,
|
||||
"url": message_file.url,
|
||||
},
|
||||
tenant_id=current_app.tenant_id,
|
||||
access_controller=_get_file_access_controller(),
|
||||
)
|
||||
elif message_file.transfer_method == FileTransferMethod.TOOL_FILE:
|
||||
if message_file.upload_file_id is None:
|
||||
assert message_file.url is not None
|
||||
message_file.upload_file_id = message_file.url.split("/")[-1].split(".")[0]
|
||||
mapping = {
|
||||
"id": message_file.id,
|
||||
"type": message_file.type,
|
||||
"transfer_method": message_file.transfer_method,
|
||||
"tool_file_id": message_file.upload_file_id,
|
||||
}
|
||||
file = file_factory.build_from_mapping(
|
||||
mapping=mapping,
|
||||
tenant_id=current_app.tenant_id,
|
||||
access_controller=_get_file_access_controller(),
|
||||
)
|
||||
else:
|
||||
raise ValueError(
|
||||
f"MessageFile {message_file.id} has an invalid transfer_method {message_file.transfer_method}"
|
||||
)
|
||||
"tool_file_id": message_file.upload_file_id,
|
||||
}
|
||||
file = file_factory.build_from_mapping(
|
||||
mapping=mapping,
|
||||
tenant_id=current_app.tenant_id,
|
||||
access_controller=_get_file_access_controller(),
|
||||
)
|
||||
case FileTransferMethod.DATASOURCE_FILE:
|
||||
raise ValueError(
|
||||
f"MessageFile {message_file.id} has an invalid transfer_method {message_file.transfer_method}"
|
||||
)
|
||||
files.append(file)
|
||||
|
||||
result = cast(
|
||||
|
||||
@ -1625,21 +1625,22 @@ class WorkflowDraftVariable(Base):
|
||||
# Rebuild them through the file factory so tenant ownership, signed URLs,
|
||||
# and storage-backed metadata come from canonical records instead of the
|
||||
# serialized JSON blob.
|
||||
if segment_type == SegmentType.FILE:
|
||||
if isinstance(value, File):
|
||||
return build_segment_with_type(segment_type, value)
|
||||
elif isinstance(value, dict):
|
||||
file = self._rebuild_file_types(value)
|
||||
return build_segment_with_type(segment_type, file)
|
||||
else:
|
||||
raise TypeMismatchError(f"expected dict or File for FileSegment, got {type(value)}")
|
||||
if segment_type == SegmentType.ARRAY_FILE:
|
||||
if not isinstance(value, list):
|
||||
raise TypeMismatchError(f"expected list for ArrayFileSegment, got {type(value)}")
|
||||
file_list = self._rebuild_file_types(value)
|
||||
return build_segment_with_type(segment_type=segment_type, value=file_list)
|
||||
|
||||
return build_segment_with_type(segment_type=segment_type, value=value)
|
||||
match segment_type:
|
||||
case SegmentType.FILE:
|
||||
if isinstance(value, File):
|
||||
return build_segment_with_type(segment_type, value)
|
||||
elif isinstance(value, dict):
|
||||
file = self._rebuild_file_types(value)
|
||||
return build_segment_with_type(segment_type, file)
|
||||
else:
|
||||
raise TypeMismatchError(f"expected dict or File for FileSegment, got {type(value)}")
|
||||
case SegmentType.ARRAY_FILE:
|
||||
if not isinstance(value, list):
|
||||
raise TypeMismatchError(f"expected list for ArrayFileSegment, got {type(value)}")
|
||||
file_list = self._rebuild_file_types(value)
|
||||
return build_segment_with_type(segment_type=segment_type, value=file_list)
|
||||
case _:
|
||||
return build_segment_with_type(segment_type=segment_type, value=value)
|
||||
|
||||
@staticmethod
|
||||
def rebuild_file_types(value: Any):
|
||||
@ -1672,21 +1673,22 @@ class WorkflowDraftVariable(Base):
|
||||
# Extends `variable_factory.build_segment_with_type` functionality by
|
||||
# reconstructing `FileSegment`` or `ArrayFileSegment`` objects from
|
||||
# their serialized dictionary or list representations, respectively.
|
||||
if segment_type == SegmentType.FILE:
|
||||
if isinstance(value, File):
|
||||
return build_segment_with_type(segment_type, value)
|
||||
elif isinstance(value, dict):
|
||||
file = cls.rebuild_file_types(value)
|
||||
return build_segment_with_type(segment_type, file)
|
||||
else:
|
||||
raise TypeMismatchError(f"expected dict or File for FileSegment, got {type(value)}")
|
||||
if segment_type == SegmentType.ARRAY_FILE:
|
||||
if not isinstance(value, list):
|
||||
raise TypeMismatchError(f"expected list for ArrayFileSegment, got {type(value)}")
|
||||
file_list = cls.rebuild_file_types(value)
|
||||
return build_segment_with_type(segment_type=segment_type, value=file_list)
|
||||
|
||||
return build_segment_with_type(segment_type=segment_type, value=value)
|
||||
match segment_type:
|
||||
case SegmentType.FILE:
|
||||
if isinstance(value, File):
|
||||
return build_segment_with_type(segment_type, value)
|
||||
elif isinstance(value, dict):
|
||||
file = cls.rebuild_file_types(value)
|
||||
return build_segment_with_type(segment_type, file)
|
||||
else:
|
||||
raise TypeMismatchError(f"expected dict or File for FileSegment, got {type(value)}")
|
||||
case SegmentType.ARRAY_FILE:
|
||||
if not isinstance(value, list):
|
||||
raise TypeMismatchError(f"expected list for ArrayFileSegment, got {type(value)}")
|
||||
file_list = cls.rebuild_file_types(value)
|
||||
return build_segment_with_type(segment_type=segment_type, value=file_list)
|
||||
case _:
|
||||
return build_segment_with_type(segment_type=segment_type, value=value)
|
||||
|
||||
def get_value(self) -> Segment:
|
||||
"""Decode the serialized value into its corresponding `Segment` object.
|
||||
|
||||
@ -4,6 +4,7 @@ import time
|
||||
from collections.abc import Sequence
|
||||
|
||||
import click
|
||||
from sqlalchemy import delete, select
|
||||
from sqlalchemy.orm import Session, sessionmaker
|
||||
|
||||
import app
|
||||
@ -113,11 +114,9 @@ def _delete_batch(
|
||||
try:
|
||||
with session.begin_nested():
|
||||
workflow_run_ids = [run.id for run in workflow_runs]
|
||||
message_data = (
|
||||
session.query(Message.id, Message.conversation_id)
|
||||
.where(Message.workflow_run_id.in_(workflow_run_ids))
|
||||
.all()
|
||||
)
|
||||
message_data = session.execute(
|
||||
select(Message.id, Message.conversation_id).where(Message.workflow_run_id.in_(workflow_run_ids))
|
||||
).all()
|
||||
message_id_list = [msg.id for msg in message_data]
|
||||
conversation_id_list = list({msg.conversation_id for msg in message_data if msg.conversation_id})
|
||||
if message_id_list:
|
||||
@ -132,23 +131,19 @@ def _delete_batch(
|
||||
SavedMessage,
|
||||
]
|
||||
for model in message_related_models:
|
||||
session.query(model).where(model.message_id.in_(message_id_list)).delete(synchronize_session=False) # type: ignore
|
||||
session.execute(delete(model).where(model.message_id.in_(message_id_list))) # type: ignore
|
||||
# error: "DeclarativeAttributeIntercept" has no attribute "message_id". But this type is only in lib
|
||||
# and these 6 types all have the message_id field.
|
||||
|
||||
session.query(Message).where(Message.workflow_run_id.in_(workflow_run_ids)).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
session.execute(delete(Message).where(Message.workflow_run_id.in_(workflow_run_ids)))
|
||||
|
||||
if conversation_id_list:
|
||||
session.query(ConversationVariable).where(
|
||||
ConversationVariable.conversation_id.in_(conversation_id_list)
|
||||
).delete(synchronize_session=False)
|
||||
|
||||
session.query(Conversation).where(Conversation.id.in_(conversation_id_list)).delete(
|
||||
synchronize_session=False
|
||||
session.execute(
|
||||
delete(ConversationVariable).where(ConversationVariable.conversation_id.in_(conversation_id_list))
|
||||
)
|
||||
|
||||
session.execute(delete(Conversation).where(Conversation.id.in_(conversation_id_list)))
|
||||
|
||||
def _delete_node_executions(active_session: Session, runs: Sequence[WorkflowRun]) -> tuple[int, int]:
|
||||
run_ids = [run.id for run in runs]
|
||||
repo = DifyAPIRepositoryFactory.create_api_workflow_node_execution_repository(
|
||||
|
||||
@ -120,7 +120,7 @@ class ClearFreePlanTenantExpiredLogs:
|
||||
apps = db.session.scalars(select(App).where(App.tenant_id == tenant_id)).all()
|
||||
app_ids = [app.id for app in apps]
|
||||
while True:
|
||||
with Session(db.engine).no_autoflush as session:
|
||||
with sessionmaker(bind=db.engine, autoflush=False).begin() as session:
|
||||
messages = (
|
||||
session.query(Message)
|
||||
.where(
|
||||
@ -152,7 +152,6 @@ class ClearFreePlanTenantExpiredLogs:
|
||||
).delete(synchronize_session=False)
|
||||
|
||||
cls._clear_message_related_tables(session, tenant_id, message_ids)
|
||||
session.commit()
|
||||
|
||||
click.echo(
|
||||
click.style(
|
||||
@ -161,7 +160,7 @@ class ClearFreePlanTenantExpiredLogs:
|
||||
)
|
||||
|
||||
while True:
|
||||
with Session(db.engine).no_autoflush as session:
|
||||
with sessionmaker(bind=db.engine, autoflush=False).begin() as session:
|
||||
conversations = (
|
||||
session.query(Conversation)
|
||||
.where(
|
||||
@ -190,7 +189,6 @@ class ClearFreePlanTenantExpiredLogs:
|
||||
session.query(Conversation).where(
|
||||
Conversation.id.in_(conversation_ids),
|
||||
).delete(synchronize_session=False)
|
||||
session.commit()
|
||||
|
||||
click.echo(
|
||||
click.style(
|
||||
@ -294,7 +292,7 @@ class ClearFreePlanTenantExpiredLogs:
|
||||
break
|
||||
|
||||
while True:
|
||||
with Session(db.engine).no_autoflush as session:
|
||||
with sessionmaker(bind=db.engine, autoflush=False).begin() as session:
|
||||
workflow_app_logs = (
|
||||
session.query(WorkflowAppLog)
|
||||
.where(
|
||||
@ -326,7 +324,6 @@ class ClearFreePlanTenantExpiredLogs:
|
||||
session.query(WorkflowAppLog).where(WorkflowAppLog.id.in_(workflow_app_log_ids)).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
session.commit()
|
||||
|
||||
click.echo(
|
||||
click.style(
|
||||
|
||||
@ -2,7 +2,7 @@ import logging
|
||||
from collections.abc import Mapping
|
||||
|
||||
from sqlalchemy import case, select
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||
from extensions.ext_database import db
|
||||
@ -24,7 +24,7 @@ class EndUserService:
|
||||
when an end-user ID is known.
|
||||
"""
|
||||
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
with sessionmaker(bind=db.engine, expire_on_commit=False).begin() as session:
|
||||
return session.scalar(
|
||||
select(EndUser)
|
||||
.where(
|
||||
@ -54,7 +54,7 @@ class EndUserService:
|
||||
if not user_id:
|
||||
user_id = DefaultEndUserSessionID.DEFAULT_SESSION_ID
|
||||
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
with sessionmaker(bind=db.engine, expire_on_commit=False).begin() as session:
|
||||
# Query with ORDER BY to prioritize exact type matches while maintaining backward compatibility
|
||||
# This single query approach is more efficient than separate queries
|
||||
end_user = session.scalar(
|
||||
@ -82,7 +82,6 @@ class EndUserService:
|
||||
user_id,
|
||||
)
|
||||
end_user.type = type
|
||||
session.commit()
|
||||
else:
|
||||
# Create new end user if none exists
|
||||
end_user = EndUser(
|
||||
@ -94,7 +93,6 @@ class EndUserService:
|
||||
external_user_id=user_id,
|
||||
)
|
||||
session.add(end_user)
|
||||
session.commit()
|
||||
|
||||
return end_user
|
||||
|
||||
@ -135,7 +133,7 @@ class EndUserService:
|
||||
if not unique_app_ids:
|
||||
return result
|
||||
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
with sessionmaker(bind=db.engine, expire_on_commit=False).begin() as session:
|
||||
# Fetch existing end users for all target apps in a single query
|
||||
existing_end_users: list[EndUser] = list(
|
||||
session.scalars(
|
||||
@ -174,7 +172,6 @@ class EndUserService:
|
||||
)
|
||||
|
||||
session.add_all(new_end_users)
|
||||
session.commit()
|
||||
|
||||
for eu in new_end_users:
|
||||
result[eu.app_id] = eu
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from extensions.ext_database import db
|
||||
from models.account import TenantPluginAutoUpgradeStrategy
|
||||
@ -7,7 +7,7 @@ from models.account import TenantPluginAutoUpgradeStrategy
|
||||
class PluginAutoUpgradeService:
|
||||
@staticmethod
|
||||
def get_strategy(tenant_id: str) -> TenantPluginAutoUpgradeStrategy | None:
|
||||
with Session(db.engine) as session:
|
||||
with sessionmaker(bind=db.engine).begin() as session:
|
||||
return (
|
||||
session.query(TenantPluginAutoUpgradeStrategy)
|
||||
.where(TenantPluginAutoUpgradeStrategy.tenant_id == tenant_id)
|
||||
@ -23,7 +23,7 @@ class PluginAutoUpgradeService:
|
||||
exclude_plugins: list[str],
|
||||
include_plugins: list[str],
|
||||
) -> bool:
|
||||
with Session(db.engine) as session:
|
||||
with sessionmaker(bind=db.engine).begin() as session:
|
||||
exist_strategy = (
|
||||
session.query(TenantPluginAutoUpgradeStrategy)
|
||||
.where(TenantPluginAutoUpgradeStrategy.tenant_id == tenant_id)
|
||||
@ -46,12 +46,11 @@ class PluginAutoUpgradeService:
|
||||
exist_strategy.exclude_plugins = exclude_plugins
|
||||
exist_strategy.include_plugins = include_plugins
|
||||
|
||||
session.commit()
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def exclude_plugin(tenant_id: str, plugin_id: str) -> bool:
|
||||
with Session(db.engine) as session:
|
||||
with sessionmaker(bind=db.engine).begin() as session:
|
||||
exist_strategy = (
|
||||
session.query(TenantPluginAutoUpgradeStrategy)
|
||||
.where(TenantPluginAutoUpgradeStrategy.tenant_id == tenant_id)
|
||||
@ -83,5 +82,4 @@ class PluginAutoUpgradeService:
|
||||
exist_strategy.upgrade_mode = TenantPluginAutoUpgradeStrategy.UpgradeMode.EXCLUDE
|
||||
exist_strategy.exclude_plugins = [plugin_id]
|
||||
|
||||
session.commit()
|
||||
return True
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from extensions.ext_database import db
|
||||
from models.account import TenantPluginPermission
|
||||
@ -7,7 +7,7 @@ from models.account import TenantPluginPermission
|
||||
class PluginPermissionService:
|
||||
@staticmethod
|
||||
def get_permission(tenant_id: str) -> TenantPluginPermission | None:
|
||||
with Session(db.engine) as session:
|
||||
with sessionmaker(bind=db.engine).begin() as session:
|
||||
return session.query(TenantPluginPermission).where(TenantPluginPermission.tenant_id == tenant_id).first()
|
||||
|
||||
@staticmethod
|
||||
@ -16,7 +16,7 @@ class PluginPermissionService:
|
||||
install_permission: TenantPluginPermission.InstallPermission,
|
||||
debug_permission: TenantPluginPermission.DebugPermission,
|
||||
):
|
||||
with Session(db.engine) as session:
|
||||
with sessionmaker(bind=db.engine).begin() as session:
|
||||
permission = (
|
||||
session.query(TenantPluginPermission).where(TenantPluginPermission.tenant_id == tenant_id).first()
|
||||
)
|
||||
@ -30,5 +30,4 @@ class PluginPermissionService:
|
||||
permission.install_permission = install_permission
|
||||
permission.debug_permission = debug_permission
|
||||
|
||||
session.commit()
|
||||
return True
|
||||
|
||||
@ -5,7 +5,6 @@ import logging
|
||||
import uuid
|
||||
from collections.abc import Mapping
|
||||
from datetime import UTC, datetime
|
||||
from enum import StrEnum
|
||||
from typing import cast
|
||||
from urllib.parse import urlparse
|
||||
from uuid import uuid4
|
||||
@ -38,6 +37,7 @@ from models import Account
|
||||
from models.dataset import Dataset, DatasetCollectionBinding, Pipeline
|
||||
from models.enums import CollectionBindingType, DatasetRuntimeMode
|
||||
from models.workflow import Workflow, WorkflowType
|
||||
from services.app_dsl_service import ImportMode, ImportStatus
|
||||
from services.entities.knowledge_entities.rag_pipeline_entities import (
|
||||
IconInfo,
|
||||
KnowledgeConfiguration,
|
||||
@ -54,18 +54,6 @@ DSL_MAX_SIZE = 10 * 1024 * 1024 # 10MB
|
||||
CURRENT_DSL_VERSION = "0.1.0"
|
||||
|
||||
|
||||
class ImportMode(StrEnum):
|
||||
YAML_CONTENT = "yaml-content"
|
||||
YAML_URL = "yaml-url"
|
||||
|
||||
|
||||
class ImportStatus(StrEnum):
|
||||
COMPLETED = "completed"
|
||||
COMPLETED_WITH_WARNINGS = "completed-with-warnings"
|
||||
PENDING = "pending"
|
||||
FAILED = "failed"
|
||||
|
||||
|
||||
class RagPipelineImportInfo(BaseModel):
|
||||
id: str
|
||||
status: ImportStatus
|
||||
|
||||
@ -8,7 +8,7 @@ from typing import TYPE_CHECKING, TypedDict, cast
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy import delete, select, tuple_
|
||||
from sqlalchemy.engine import CursorResult
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy.orm import Session, sessionmaker
|
||||
|
||||
from configs import dify_config
|
||||
from extensions.ext_database import db
|
||||
@ -369,7 +369,7 @@ class MessagesCleanService:
|
||||
batch_deleted_messages = 0
|
||||
|
||||
# Step 1: Fetch a batch of messages using cursor
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
with sessionmaker(bind=db.engine, expire_on_commit=False).begin() as session:
|
||||
fetch_messages_start = time.monotonic()
|
||||
msg_stmt = (
|
||||
select(Message.id, Message.app_id, Message.created_at)
|
||||
@ -477,7 +477,7 @@ class MessagesCleanService:
|
||||
|
||||
# Step 4: Batch delete messages and their relations
|
||||
if not self._dry_run:
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
with sessionmaker(bind=db.engine, expire_on_commit=False).begin() as session:
|
||||
delete_relations_start = time.monotonic()
|
||||
# Delete related records first
|
||||
self._batch_delete_message_relations(session, message_ids_to_delete)
|
||||
@ -489,9 +489,7 @@ class MessagesCleanService:
|
||||
delete_result = cast(CursorResult, session.execute(delete_stmt))
|
||||
messages_deleted = delete_result.rowcount
|
||||
delete_messages_ms = int((time.monotonic() - delete_messages_start) * 1000)
|
||||
commit_start = time.monotonic()
|
||||
session.commit()
|
||||
commit_ms = int((time.monotonic() - commit_start) * 1000)
|
||||
commit_ms = 0
|
||||
|
||||
stats["total_deleted"] += messages_deleted
|
||||
batch_deleted_messages = messages_deleted
|
||||
|
||||
@ -8,7 +8,7 @@ This service centralizes all AppTrigger-related business logic.
|
||||
import logging
|
||||
|
||||
from sqlalchemy import update
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from extensions.ext_database import db
|
||||
from models.enums import AppTriggerStatus
|
||||
@ -34,13 +34,12 @@ class AppTriggerService:
|
||||
|
||||
"""
|
||||
try:
|
||||
with Session(db.engine) as session:
|
||||
with sessionmaker(bind=db.engine).begin() as session:
|
||||
session.execute(
|
||||
update(AppTrigger)
|
||||
.where(AppTrigger.tenant_id == tenant_id, AppTrigger.status == AppTriggerStatus.ENABLED)
|
||||
.values(status=AppTriggerStatus.RATE_LIMITED)
|
||||
)
|
||||
session.commit()
|
||||
logger.info("Marked all enabled triggers as rate limited for tenant %s", tenant_id)
|
||||
except Exception:
|
||||
logger.exception("Failed to mark all enabled triggers as rate limited for tenant %s", tenant_id)
|
||||
|
||||
@ -6,7 +6,7 @@ from collections.abc import Mapping
|
||||
from typing import Any
|
||||
|
||||
from sqlalchemy import desc, func
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy.orm import Session, sessionmaker
|
||||
|
||||
from configs import dify_config
|
||||
from constants import HIDDEN_VALUE, UNKNOWN_VALUE
|
||||
@ -146,7 +146,7 @@ class TriggerProviderService:
|
||||
"""
|
||||
try:
|
||||
provider_controller = TriggerManager.get_trigger_provider(tenant_id, provider_id)
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
with sessionmaker(bind=db.engine, expire_on_commit=False).begin() as session:
|
||||
# Use distributed lock to prevent race conditions
|
||||
lock_key = f"trigger_provider_create_lock:{tenant_id}_{provider_id}"
|
||||
with redis_client.lock(lock_key, timeout=20):
|
||||
@ -205,7 +205,6 @@ class TriggerProviderService:
|
||||
subscription.id = subscription_id or str(uuid.uuid4())
|
||||
|
||||
session.add(subscription)
|
||||
session.commit()
|
||||
|
||||
return {
|
||||
"result": "success",
|
||||
@ -241,7 +240,7 @@ class TriggerProviderService:
|
||||
:param expires_at: Optional new expiration timestamp
|
||||
:return: Success response with updated subscription info
|
||||
"""
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
with sessionmaker(bind=db.engine, expire_on_commit=False).begin() as session:
|
||||
# Use distributed lock to prevent race conditions on the same subscription
|
||||
lock_key = f"trigger_subscription_update_lock:{tenant_id}_{subscription_id}"
|
||||
with redis_client.lock(lock_key, timeout=20):
|
||||
@ -302,8 +301,6 @@ class TriggerProviderService:
|
||||
if expires_at is not None:
|
||||
subscription.expires_at = expires_at
|
||||
|
||||
session.commit()
|
||||
|
||||
# Clear subscription cache
|
||||
delete_cache_for_subscription(
|
||||
tenant_id=tenant_id,
|
||||
@ -404,7 +401,7 @@ class TriggerProviderService:
|
||||
:param subscription_id: Subscription instance ID
|
||||
:return: New token info
|
||||
"""
|
||||
with Session(db.engine) as session:
|
||||
with sessionmaker(bind=db.engine).begin() as session:
|
||||
subscription = session.query(TriggerSubscription).filter_by(tenant_id=tenant_id, id=subscription_id).first()
|
||||
|
||||
if not subscription:
|
||||
@ -448,7 +445,6 @@ class TriggerProviderService:
|
||||
# Update credentials
|
||||
subscription.credentials = dict(encrypter.encrypt(dict(refreshed_credentials.credentials)))
|
||||
subscription.credential_expires_at = refreshed_credentials.expires_at
|
||||
session.commit()
|
||||
|
||||
# Clear cache
|
||||
cache.delete()
|
||||
@ -478,7 +474,7 @@ class TriggerProviderService:
|
||||
"""
|
||||
now_ts: int = int(now if now is not None else _time.time())
|
||||
|
||||
with Session(db.engine) as session:
|
||||
with sessionmaker(bind=db.engine).begin() as session:
|
||||
subscription: TriggerSubscription | None = (
|
||||
session.query(TriggerSubscription).filter_by(tenant_id=tenant_id, id=subscription_id).first()
|
||||
)
|
||||
@ -531,7 +527,6 @@ class TriggerProviderService:
|
||||
# Persist refreshed properties and expires_at
|
||||
subscription.properties = dict(properties_encrypter.encrypt(dict(refreshed.properties)))
|
||||
subscription.expires_at = int(refreshed.expires_at)
|
||||
session.commit()
|
||||
properties_cache.delete()
|
||||
|
||||
logger.info(
|
||||
@ -639,7 +634,7 @@ class TriggerProviderService:
|
||||
tenant_id=tenant_id, provider_id=provider_id
|
||||
)
|
||||
|
||||
with Session(db.engine) as session:
|
||||
with sessionmaker(bind=db.engine).begin() as session:
|
||||
# Find existing custom client params
|
||||
custom_client = (
|
||||
session.query(TriggerOAuthTenantClient)
|
||||
@ -683,8 +678,6 @@ class TriggerProviderService:
|
||||
if enabled is not None:
|
||||
custom_client.enabled = enabled
|
||||
|
||||
session.commit()
|
||||
|
||||
return {"result": "success"}
|
||||
|
||||
@classmethod
|
||||
@ -733,13 +726,12 @@ class TriggerProviderService:
|
||||
:param provider_id: Provider identifier
|
||||
:return: Success response
|
||||
"""
|
||||
with Session(db.engine) as session:
|
||||
with sessionmaker(bind=db.engine).begin() as session:
|
||||
session.query(TriggerOAuthTenantClient).filter_by(
|
||||
tenant_id=tenant_id,
|
||||
provider=provider_id.provider_name,
|
||||
plugin_id=provider_id.plugin_id,
|
||||
).delete()
|
||||
session.commit()
|
||||
|
||||
return {"result": "success"}
|
||||
|
||||
|
||||
@ -8,7 +8,7 @@ from flask import Request, Response
|
||||
from graphon.entities.graph_config import NodeConfigDict
|
||||
from pydantic import BaseModel
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from core.plugin.entities.plugin_daemon import CredentialType
|
||||
from core.plugin.entities.request import TriggerDispatchResponse, TriggerInvokeEventResponse
|
||||
@ -215,7 +215,7 @@ class TriggerService:
|
||||
not_found_in_cache.append(node_info)
|
||||
continue
|
||||
|
||||
with Session(db.engine) as session:
|
||||
with sessionmaker(bind=db.engine, expire_on_commit=False).begin() as session:
|
||||
try:
|
||||
# lock the concurrent plugin trigger creation
|
||||
redis_client.lock(f"{cls.__PLUGIN_TRIGGER_NODE_CACHE_KEY__}:apps:{app.id}:lock", timeout=10)
|
||||
@ -260,7 +260,6 @@ class TriggerService:
|
||||
cache.model_dump_json(),
|
||||
ex=60 * 60,
|
||||
)
|
||||
session.commit()
|
||||
|
||||
# Update existing records if subscription_id changed
|
||||
for node_info in nodes_in_graph:
|
||||
@ -290,14 +289,12 @@ class TriggerService:
|
||||
cache.model_dump_json(),
|
||||
ex=60 * 60,
|
||||
)
|
||||
session.commit()
|
||||
|
||||
# delete the nodes not found in the graph
|
||||
for node_id in nodes_id_in_db:
|
||||
if node_id not in nodes_id_in_graph:
|
||||
session.delete(nodes_id_in_db[node_id])
|
||||
redis_client.delete(f"{cls.__PLUGIN_TRIGGER_NODE_CACHE_KEY__}:{app.id}:{node_id}")
|
||||
session.commit()
|
||||
except Exception:
|
||||
logger.exception("Failed to sync plugin trigger relationships for app %s", app.id)
|
||||
raise
|
||||
|
||||
@ -12,7 +12,7 @@ from graphon.file import FileTransferMethod
|
||||
from graphon.variables.types import ArrayValidation, SegmentType
|
||||
from pydantic import BaseModel
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy.orm import Session, sessionmaker
|
||||
from werkzeug.datastructures import FileStorage
|
||||
from werkzeug.exceptions import RequestEntityTooLarge
|
||||
|
||||
@ -597,21 +597,38 @@ class WebhookService:
|
||||
Raises:
|
||||
ValueError: If the value cannot be converted to the specified type
|
||||
"""
|
||||
if param_type == SegmentType.STRING:
|
||||
return value
|
||||
elif param_type == SegmentType.NUMBER:
|
||||
if not cls._can_convert_to_number(value):
|
||||
raise ValueError(f"Cannot convert '{value}' to number")
|
||||
numeric_value = float(value)
|
||||
return int(numeric_value) if numeric_value.is_integer() else numeric_value
|
||||
elif param_type == SegmentType.BOOLEAN:
|
||||
lower_value = value.lower()
|
||||
bool_map = {"true": True, "false": False, "1": True, "0": False, "yes": True, "no": False}
|
||||
if lower_value not in bool_map:
|
||||
raise ValueError(f"Cannot convert '{value}' to boolean")
|
||||
return bool_map[lower_value]
|
||||
else:
|
||||
raise ValueError(f"Unsupported type '{param_type}' for form data parameter '{param_name}'")
|
||||
match param_type:
|
||||
case SegmentType.STRING:
|
||||
return value
|
||||
case SegmentType.NUMBER:
|
||||
if not cls._can_convert_to_number(value):
|
||||
raise ValueError(f"Cannot convert '{value}' to number")
|
||||
numeric_value = float(value)
|
||||
return int(numeric_value) if numeric_value.is_integer() else numeric_value
|
||||
case SegmentType.BOOLEAN:
|
||||
lower_value = value.lower()
|
||||
bool_map = {"true": True, "false": False, "1": True, "0": False, "yes": True, "no": False}
|
||||
if lower_value not in bool_map:
|
||||
raise ValueError(f"Cannot convert '{value}' to boolean")
|
||||
return bool_map[lower_value]
|
||||
case (
|
||||
SegmentType.OBJECT
|
||||
| SegmentType.FILE
|
||||
| SegmentType.ARRAY_ANY
|
||||
| SegmentType.ARRAY_STRING
|
||||
| SegmentType.ARRAY_NUMBER
|
||||
| SegmentType.ARRAY_OBJECT
|
||||
| SegmentType.ARRAY_FILE
|
||||
| SegmentType.ARRAY_BOOLEAN
|
||||
| SegmentType.SECRET
|
||||
| SegmentType.INTEGER
|
||||
| SegmentType.FLOAT
|
||||
| SegmentType.NONE
|
||||
| SegmentType.GROUP
|
||||
):
|
||||
raise ValueError(f"Unsupported type '{param_type}' for form data parameter '{param_name}'")
|
||||
case _:
|
||||
raise ValueError(f"Unsupported type '{param_type}' for form data parameter '{param_name}'")
|
||||
|
||||
@classmethod
|
||||
def _validate_json_value(cls, param_name: str, value: Any, param_type: SegmentType | str) -> Any:
|
||||
@ -912,7 +929,7 @@ class WebhookService:
|
||||
logger.warning("Failed to acquire lock for webhook sync, app %s", app.id)
|
||||
raise RuntimeError("Failed to acquire lock for webhook trigger synchronization")
|
||||
|
||||
with Session(db.engine) as session:
|
||||
with sessionmaker(bind=db.engine, expire_on_commit=False).begin() as session:
|
||||
# fetch the non-cached nodes from DB
|
||||
all_records = session.scalars(
|
||||
select(WorkflowWebhookTrigger).where(
|
||||
@ -941,14 +958,12 @@ class WebhookService:
|
||||
redis_client.set(
|
||||
f"{cls.__WEBHOOK_NODE_CACHE_KEY__}:{app.id}:{node_id}", cache.model_dump_json(), ex=60 * 60
|
||||
)
|
||||
session.commit()
|
||||
|
||||
# delete the nodes not found in the graph
|
||||
for node_id in nodes_id_in_db:
|
||||
if node_id not in nodes_id_in_graph:
|
||||
session.delete(nodes_id_in_db[node_id])
|
||||
redis_client.delete(f"{cls.__WEBHOOK_NODE_CACHE_KEY__}:{app.id}:{node_id}")
|
||||
session.commit()
|
||||
except Exception:
|
||||
logger.exception("Failed to sync webhook relationships for app %s", app.id)
|
||||
raise
|
||||
|
||||
@ -3,6 +3,7 @@ import time
|
||||
|
||||
import click
|
||||
from celery import shared_task
|
||||
from sqlalchemy import delete, select, update
|
||||
|
||||
from core.db.session_factory import session_factory
|
||||
from core.rag.index_processor.constant.doc_type import DocType
|
||||
@ -30,7 +31,9 @@ def add_document_to_index_task(dataset_document_id: str):
|
||||
start_at = time.perf_counter()
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
dataset_document = session.query(DatasetDocument).where(DatasetDocument.id == dataset_document_id).first()
|
||||
dataset_document = session.scalar(
|
||||
select(DatasetDocument).where(DatasetDocument.id == dataset_document_id).limit(1)
|
||||
)
|
||||
if not dataset_document:
|
||||
logger.info(click.style(f"Document not found: {dataset_document_id}", fg="red"))
|
||||
return
|
||||
@ -45,15 +48,14 @@ def add_document_to_index_task(dataset_document_id: str):
|
||||
if not dataset:
|
||||
raise Exception(f"Document {dataset_document.id} dataset {dataset_document.dataset_id} doesn't exist.")
|
||||
|
||||
segments = (
|
||||
session.query(DocumentSegment)
|
||||
segments = session.scalars(
|
||||
select(DocumentSegment)
|
||||
.where(
|
||||
DocumentSegment.document_id == dataset_document.id,
|
||||
DocumentSegment.status == SegmentStatus.COMPLETED,
|
||||
)
|
||||
.order_by(DocumentSegment.position.asc())
|
||||
.all()
|
||||
)
|
||||
).all()
|
||||
|
||||
documents = []
|
||||
multimodal_documents = []
|
||||
@ -104,18 +106,15 @@ def add_document_to_index_task(dataset_document_id: str):
|
||||
index_processor.load(dataset, documents, multimodal_documents=multimodal_documents)
|
||||
|
||||
# delete auto disable log
|
||||
session.query(DatasetAutoDisableLog).where(
|
||||
DatasetAutoDisableLog.document_id == dataset_document.id
|
||||
).delete()
|
||||
session.execute(
|
||||
delete(DatasetAutoDisableLog).where(DatasetAutoDisableLog.document_id == dataset_document.id)
|
||||
)
|
||||
|
||||
# update segment to enable
|
||||
session.query(DocumentSegment).where(DocumentSegment.document_id == dataset_document.id).update(
|
||||
{
|
||||
DocumentSegment.enabled: True,
|
||||
DocumentSegment.disabled_at: None,
|
||||
DocumentSegment.disabled_by: None,
|
||||
DocumentSegment.updated_at: naive_utc_now(),
|
||||
}
|
||||
session.execute(
|
||||
update(DocumentSegment)
|
||||
.where(DocumentSegment.document_id == dataset_document.id)
|
||||
.values(enabled=True, disabled_at=None, disabled_by=None, updated_at=naive_utc_now())
|
||||
)
|
||||
session.commit()
|
||||
|
||||
|
||||
@ -1,9 +1,11 @@
|
||||
import logging
|
||||
import time
|
||||
from typing import cast
|
||||
|
||||
import click
|
||||
from celery import shared_task
|
||||
from sqlalchemy import delete, select
|
||||
from sqlalchemy.engine import CursorResult
|
||||
|
||||
from core.db.session_factory import session_factory
|
||||
from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
|
||||
@ -92,14 +94,16 @@ def batch_clean_document_task(document_ids: list[str], dataset_id: str, doc_form
|
||||
# ============ Step 3: Delete metadata binding (separate short transaction) ============
|
||||
try:
|
||||
with session_factory.create_session() as session:
|
||||
deleted_count = int(
|
||||
session.query(DatasetMetadataBinding)
|
||||
.where(
|
||||
DatasetMetadataBinding.dataset_id == dataset_id,
|
||||
DatasetMetadataBinding.document_id.in_(document_ids),
|
||||
)
|
||||
.delete(synchronize_session=False)
|
||||
result = cast(
|
||||
CursorResult,
|
||||
session.execute(
|
||||
delete(DatasetMetadataBinding).where(
|
||||
DatasetMetadataBinding.dataset_id == dataset_id,
|
||||
DatasetMetadataBinding.document_id.in_(document_ids),
|
||||
)
|
||||
),
|
||||
)
|
||||
deleted_count = result.rowcount
|
||||
session.commit()
|
||||
logger.debug("Deleted %d metadata bindings for dataset_id: %s", deleted_count, dataset_id)
|
||||
except Exception:
|
||||
|
||||
@ -32,7 +32,7 @@ def clean_document_task(document_id: str, dataset_id: str, doc_form: str, file_i
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
try:
|
||||
dataset = session.query(Dataset).where(Dataset.id == dataset_id).first()
|
||||
dataset = session.scalar(select(Dataset).where(Dataset.id == dataset_id).limit(1))
|
||||
|
||||
if not dataset:
|
||||
raise Exception("Document has no dataset")
|
||||
@ -63,7 +63,7 @@ def clean_document_task(document_id: str, dataset_id: str, doc_form: str, file_i
|
||||
if index_node_ids:
|
||||
index_processor = IndexProcessorFactory(doc_form).init_index_processor()
|
||||
with session_factory.create_session() as session:
|
||||
dataset = session.query(Dataset).where(Dataset.id == dataset_id).first()
|
||||
dataset = session.scalar(select(Dataset).where(Dataset.id == dataset_id).limit(1))
|
||||
if dataset:
|
||||
index_processor.clean(
|
||||
dataset, index_node_ids, with_keywords=True, delete_child_chunks=True, delete_summaries=True
|
||||
@ -94,7 +94,7 @@ def clean_document_task(document_id: str, dataset_id: str, doc_form: str, file_i
|
||||
|
||||
with session_factory.create_session() as session, session.begin():
|
||||
if file_id:
|
||||
file = session.query(UploadFile).where(UploadFile.id == file_id).first()
|
||||
file = session.scalar(select(UploadFile).where(UploadFile.id == file_id).limit(1))
|
||||
if file:
|
||||
try:
|
||||
storage.delete(file.key)
|
||||
@ -124,10 +124,12 @@ def clean_document_task(document_id: str, dataset_id: str, doc_form: str, file_i
|
||||
|
||||
with session_factory.create_session() as session, session.begin():
|
||||
# delete dataset metadata binding
|
||||
session.query(DatasetMetadataBinding).where(
|
||||
DatasetMetadataBinding.dataset_id == dataset_id,
|
||||
DatasetMetadataBinding.document_id == document_id,
|
||||
).delete()
|
||||
session.execute(
|
||||
delete(DatasetMetadataBinding).where(
|
||||
DatasetMetadataBinding.dataset_id == dataset_id,
|
||||
DatasetMetadataBinding.document_id == document_id,
|
||||
)
|
||||
)
|
||||
|
||||
end_at = time.perf_counter()
|
||||
logger.info(
|
||||
|
||||
@ -3,6 +3,7 @@ import time
|
||||
|
||||
import click
|
||||
from celery import shared_task
|
||||
from sqlalchemy import delete
|
||||
|
||||
from core.db.session_factory import session_factory
|
||||
from models import ConversationVariable
|
||||
@ -29,29 +30,21 @@ def delete_conversation_related_data(conversation_id: str):
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
try:
|
||||
session.query(MessageAnnotation).where(MessageAnnotation.conversation_id == conversation_id).delete(
|
||||
synchronize_session=False
|
||||
session.execute(delete(MessageAnnotation).where(MessageAnnotation.conversation_id == conversation_id))
|
||||
|
||||
session.execute(delete(MessageFeedback).where(MessageFeedback.conversation_id == conversation_id))
|
||||
|
||||
session.execute(
|
||||
delete(ToolConversationVariables).where(ToolConversationVariables.conversation_id == conversation_id)
|
||||
)
|
||||
|
||||
session.query(MessageFeedback).where(MessageFeedback.conversation_id == conversation_id).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
session.execute(delete(ToolFile).where(ToolFile.conversation_id == conversation_id))
|
||||
|
||||
session.query(ToolConversationVariables).where(
|
||||
ToolConversationVariables.conversation_id == conversation_id
|
||||
).delete(synchronize_session=False)
|
||||
session.execute(delete(ConversationVariable).where(ConversationVariable.conversation_id == conversation_id))
|
||||
|
||||
session.query(ToolFile).where(ToolFile.conversation_id == conversation_id).delete(synchronize_session=False)
|
||||
session.execute(delete(Message).where(Message.conversation_id == conversation_id))
|
||||
|
||||
session.query(ConversationVariable).where(ConversationVariable.conversation_id == conversation_id).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
|
||||
session.query(Message).where(Message.conversation_id == conversation_id).delete(synchronize_session=False)
|
||||
|
||||
session.query(PinnedConversation).where(PinnedConversation.conversation_id == conversation_id).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
session.execute(delete(PinnedConversation).where(PinnedConversation.conversation_id == conversation_id))
|
||||
|
||||
session.commit()
|
||||
|
||||
|
||||
@ -3,7 +3,7 @@ import time
|
||||
|
||||
import click
|
||||
from celery import shared_task
|
||||
from sqlalchemy import delete
|
||||
from sqlalchemy import delete, select
|
||||
|
||||
from core.db.session_factory import session_factory
|
||||
from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
|
||||
@ -29,12 +29,12 @@ def delete_segment_from_index_task(
|
||||
start_at = time.perf_counter()
|
||||
with session_factory.create_session() as session:
|
||||
try:
|
||||
dataset = session.query(Dataset).where(Dataset.id == dataset_id).first()
|
||||
dataset = session.scalar(select(Dataset).where(Dataset.id == dataset_id).limit(1))
|
||||
if not dataset:
|
||||
logging.warning("Dataset %s not found, skipping index cleanup", dataset_id)
|
||||
return
|
||||
|
||||
dataset_document = session.query(Document).where(Document.id == document_id).first()
|
||||
dataset_document = session.scalar(select(Document).where(Document.id == document_id).limit(1))
|
||||
if not dataset_document:
|
||||
return
|
||||
|
||||
@ -60,11 +60,9 @@ def delete_segment_from_index_task(
|
||||
)
|
||||
if dataset.is_multimodal:
|
||||
# delete segment attachment binding
|
||||
segment_attachment_bindings = (
|
||||
session.query(SegmentAttachmentBinding)
|
||||
.where(SegmentAttachmentBinding.segment_id.in_(segment_ids))
|
||||
.all()
|
||||
)
|
||||
segment_attachment_bindings = session.scalars(
|
||||
select(SegmentAttachmentBinding).where(SegmentAttachmentBinding.segment_id.in_(segment_ids))
|
||||
).all()
|
||||
if segment_attachment_bindings:
|
||||
attachment_ids = [binding.attachment_id for binding in segment_attachment_bindings]
|
||||
index_processor.clean(dataset=dataset, node_ids=attachment_ids, with_keywords=False)
|
||||
@ -77,7 +75,7 @@ def delete_segment_from_index_task(
|
||||
session.execute(segment_attachment_bind_delete_stmt)
|
||||
|
||||
# delete upload file
|
||||
session.query(UploadFile).where(UploadFile.id.in_(attachment_ids)).delete(synchronize_session=False)
|
||||
session.execute(delete(UploadFile).where(UploadFile.id.in_(attachment_ids)))
|
||||
session.commit()
|
||||
|
||||
end_at = time.perf_counter()
|
||||
|
||||
@ -3,7 +3,7 @@ import time
|
||||
|
||||
import click
|
||||
from celery import shared_task
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy import select, update
|
||||
|
||||
from core.db.session_factory import session_factory
|
||||
from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
|
||||
@ -27,12 +27,12 @@ def disable_segments_from_index_task(segment_ids: list, dataset_id: str, documen
|
||||
start_at = time.perf_counter()
|
||||
|
||||
with session_factory.create_session() as session:
|
||||
dataset = session.query(Dataset).where(Dataset.id == dataset_id).first()
|
||||
dataset = session.scalar(select(Dataset).where(Dataset.id == dataset_id).limit(1))
|
||||
if not dataset:
|
||||
logger.info(click.style(f"Dataset {dataset_id} not found, pass.", fg="cyan"))
|
||||
return
|
||||
|
||||
dataset_document = session.query(DatasetDocument).where(DatasetDocument.id == document_id).first()
|
||||
dataset_document = session.scalar(select(DatasetDocument).where(DatasetDocument.id == document_id).limit(1))
|
||||
|
||||
if not dataset_document:
|
||||
logger.info(click.style(f"Document {document_id} not found, pass.", fg="cyan"))
|
||||
@ -58,11 +58,9 @@ def disable_segments_from_index_task(segment_ids: list, dataset_id: str, documen
|
||||
index_node_ids = [segment.index_node_id for segment in segments]
|
||||
if dataset.is_multimodal:
|
||||
segment_ids = [segment.id for segment in segments]
|
||||
segment_attachment_bindings = (
|
||||
session.query(SegmentAttachmentBinding)
|
||||
.where(SegmentAttachmentBinding.segment_id.in_(segment_ids))
|
||||
.all()
|
||||
)
|
||||
segment_attachment_bindings = session.scalars(
|
||||
select(SegmentAttachmentBinding).where(SegmentAttachmentBinding.segment_id.in_(segment_ids))
|
||||
).all()
|
||||
if segment_attachment_bindings:
|
||||
attachment_ids = [binding.attachment_id for binding in segment_attachment_bindings]
|
||||
index_node_ids.extend(attachment_ids)
|
||||
@ -87,16 +85,14 @@ def disable_segments_from_index_task(segment_ids: list, dataset_id: str, documen
|
||||
logger.info(click.style(f"Segments removed from index latency: {end_at - start_at}", fg="green"))
|
||||
except Exception:
|
||||
# update segment error msg
|
||||
session.query(DocumentSegment).where(
|
||||
DocumentSegment.id.in_(segment_ids),
|
||||
DocumentSegment.dataset_id == dataset_id,
|
||||
DocumentSegment.document_id == document_id,
|
||||
).update(
|
||||
{
|
||||
"disabled_at": None,
|
||||
"disabled_by": None,
|
||||
"enabled": True,
|
||||
}
|
||||
session.execute(
|
||||
update(DocumentSegment)
|
||||
.where(
|
||||
DocumentSegment.id.in_(segment_ids),
|
||||
DocumentSegment.dataset_id == dataset_id,
|
||||
DocumentSegment.document_id == document_id,
|
||||
)
|
||||
.values(disabled_at=None, disabled_by=None, enabled=True)
|
||||
)
|
||||
session.commit()
|
||||
finally:
|
||||
|
||||
@ -47,7 +47,7 @@ def regenerate_summary_index_task(
|
||||
|
||||
try:
|
||||
with session_factory.create_session() as session:
|
||||
dataset = session.query(Dataset).filter_by(id=dataset_id).first()
|
||||
dataset = session.scalar(select(Dataset).where(Dataset.id == dataset_id).limit(1))
|
||||
if not dataset:
|
||||
logger.error(click.style(f"Dataset not found: {dataset_id}", fg="red"))
|
||||
return
|
||||
@ -84,8 +84,8 @@ def regenerate_summary_index_task(
|
||||
# For embedding_model change: directly query all segments with existing summaries
|
||||
# Don't require document indexing_status == "completed"
|
||||
# Include summaries with status "completed" or "error" (if they have content)
|
||||
segments_with_summaries = (
|
||||
session.query(DocumentSegment, DocumentSegmentSummary)
|
||||
segments_with_summaries = session.execute(
|
||||
select(DocumentSegment, DocumentSegmentSummary)
|
||||
.join(
|
||||
DocumentSegmentSummary,
|
||||
DocumentSegment.id == DocumentSegmentSummary.chunk_id,
|
||||
@ -110,8 +110,7 @@ def regenerate_summary_index_task(
|
||||
DatasetDocument.doc_form != IndexStructureType.QA_INDEX, # Skip qa_model documents
|
||||
)
|
||||
.order_by(DocumentSegment.document_id.asc(), DocumentSegment.position.asc())
|
||||
.all()
|
||||
)
|
||||
).all()
|
||||
|
||||
if not segments_with_summaries:
|
||||
logger.info(
|
||||
@ -215,8 +214,8 @@ def regenerate_summary_index_task(
|
||||
|
||||
try:
|
||||
# Get all segments with existing summaries
|
||||
segments = (
|
||||
session.query(DocumentSegment)
|
||||
segments = session.scalars(
|
||||
select(DocumentSegment)
|
||||
.join(
|
||||
DocumentSegmentSummary,
|
||||
DocumentSegment.id == DocumentSegmentSummary.chunk_id,
|
||||
@ -229,8 +228,7 @@ def regenerate_summary_index_task(
|
||||
DocumentSegmentSummary.dataset_id == dataset_id,
|
||||
)
|
||||
.order_by(DocumentSegment.position.asc())
|
||||
.all()
|
||||
)
|
||||
).all()
|
||||
|
||||
if not segments:
|
||||
continue
|
||||
@ -245,13 +243,13 @@ def regenerate_summary_index_task(
|
||||
summary_record = None
|
||||
try:
|
||||
# Get existing summary record
|
||||
summary_record = (
|
||||
session.query(DocumentSegmentSummary)
|
||||
.filter_by(
|
||||
chunk_id=segment.id,
|
||||
dataset_id=dataset_id,
|
||||
summary_record = session.scalar(
|
||||
select(DocumentSegmentSummary)
|
||||
.where(
|
||||
DocumentSegmentSummary.chunk_id == segment.id,
|
||||
DocumentSegmentSummary.dataset_id == dataset_id,
|
||||
)
|
||||
.first()
|
||||
.limit(1)
|
||||
)
|
||||
|
||||
if not summary_record:
|
||||
|
||||
@ -0,0 +1,388 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import uuid
|
||||
from types import SimpleNamespace
|
||||
from typing import Any, cast
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from models.model import AccountTrialAppRecord, TrialApp
|
||||
from services import recommended_app_service as service_module
|
||||
from services.recommended_app_service import RecommendedAppService
|
||||
|
||||
# ── Helpers ────────────────────────────────────────────────────────────
|
||||
|
||||
|
||||
def _apps_response(
|
||||
recommended_apps: list[dict] | None = None,
|
||||
categories: list[str] | None = None,
|
||||
) -> dict:
|
||||
if recommended_apps is None:
|
||||
recommended_apps = [
|
||||
{"id": "app-1", "name": "Test App 1", "description": "d1", "category": "productivity"},
|
||||
{"id": "app-2", "name": "Test App 2", "description": "d2", "category": "communication"},
|
||||
]
|
||||
if categories is None:
|
||||
categories = ["productivity", "communication", "utilities"]
|
||||
return {"recommended_apps": recommended_apps, "categories": categories}
|
||||
|
||||
|
||||
def _app_detail(
|
||||
app_id: str = "app-123",
|
||||
name: str = "Test App",
|
||||
description: str = "Test description",
|
||||
**kwargs: Any,
|
||||
) -> dict:
|
||||
detail: dict[str, Any] = {
|
||||
"id": app_id,
|
||||
"name": name,
|
||||
"description": description,
|
||||
"category": kwargs.get("category", "productivity"),
|
||||
"icon": kwargs.get("icon", "🚀"),
|
||||
"model_config": kwargs.get("model_config", {}),
|
||||
}
|
||||
detail.update(kwargs)
|
||||
return detail
|
||||
|
||||
|
||||
def _recommendation_detail(result: dict[str, Any] | None) -> dict[str, Any] | None:
|
||||
return cast("dict[str, Any] | None", result)
|
||||
|
||||
|
||||
def _mock_factory_for_apps(
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
*,
|
||||
mode: str,
|
||||
result: dict[str, Any],
|
||||
fallback_result: dict[str, Any] | None = None,
|
||||
) -> tuple[MagicMock, MagicMock]:
|
||||
retrieval_instance = MagicMock()
|
||||
retrieval_instance.get_recommended_apps_and_categories.return_value = result
|
||||
retrieval_factory = MagicMock(return_value=retrieval_instance)
|
||||
monkeypatch.setattr(service_module.dify_config, "HOSTED_FETCH_APP_TEMPLATES_MODE", mode, raising=False)
|
||||
monkeypatch.setattr(
|
||||
service_module.RecommendAppRetrievalFactory,
|
||||
"get_recommend_app_factory",
|
||||
MagicMock(return_value=retrieval_factory),
|
||||
)
|
||||
builtin_instance = MagicMock()
|
||||
if fallback_result is not None:
|
||||
builtin_instance.fetch_recommended_apps_from_builtin.return_value = fallback_result
|
||||
monkeypatch.setattr(
|
||||
service_module.RecommendAppRetrievalFactory,
|
||||
"get_buildin_recommend_app_retrieval",
|
||||
MagicMock(return_value=builtin_instance),
|
||||
)
|
||||
return retrieval_instance, builtin_instance
|
||||
|
||||
|
||||
# ── Pure logic tests: get_recommended_apps_and_categories ──────────────
|
||||
|
||||
|
||||
class TestRecommendedAppServiceGetApps:
|
||||
@patch("services.recommended_app_service.RecommendAppRetrievalFactory", autospec=True)
|
||||
@patch("services.recommended_app_service.dify_config", autospec=True)
|
||||
def test_success_with_apps(self, mock_config, mock_factory_class):
|
||||
mock_config.HOSTED_FETCH_APP_TEMPLATES_MODE = "remote"
|
||||
expected = _apps_response()
|
||||
|
||||
mock_instance = MagicMock()
|
||||
mock_instance.get_recommended_apps_and_categories.return_value = expected
|
||||
mock_factory = MagicMock(return_value=mock_instance)
|
||||
mock_factory_class.get_recommend_app_factory.return_value = mock_factory
|
||||
|
||||
result = RecommendedAppService.get_recommended_apps_and_categories("en-US")
|
||||
|
||||
assert result == expected
|
||||
assert len(result["recommended_apps"]) == 2
|
||||
assert len(result["categories"]) == 3
|
||||
mock_factory_class.get_recommend_app_factory.assert_called_once_with("remote")
|
||||
mock_instance.get_recommended_apps_and_categories.assert_called_once_with("en-US")
|
||||
|
||||
@patch("services.recommended_app_service.RecommendAppRetrievalFactory", autospec=True)
|
||||
@patch("services.recommended_app_service.dify_config", autospec=True)
|
||||
def test_fallback_to_builtin_when_empty(self, mock_config, mock_factory_class):
|
||||
mock_config.HOSTED_FETCH_APP_TEMPLATES_MODE = "remote"
|
||||
empty_response = {"recommended_apps": [], "categories": []}
|
||||
builtin_response = _apps_response(
|
||||
recommended_apps=[{"id": "builtin-1", "name": "Builtin App", "category": "default"}]
|
||||
)
|
||||
|
||||
mock_remote_instance = MagicMock()
|
||||
mock_remote_instance.get_recommended_apps_and_categories.return_value = empty_response
|
||||
mock_factory_class.get_recommend_app_factory.return_value = MagicMock(return_value=mock_remote_instance)
|
||||
|
||||
mock_builtin_instance = MagicMock()
|
||||
mock_builtin_instance.fetch_recommended_apps_from_builtin.return_value = builtin_response
|
||||
mock_factory_class.get_buildin_recommend_app_retrieval.return_value = mock_builtin_instance
|
||||
|
||||
result = RecommendedAppService.get_recommended_apps_and_categories("zh-CN")
|
||||
|
||||
assert result == builtin_response
|
||||
assert result["recommended_apps"][0]["id"] == "builtin-1"
|
||||
mock_builtin_instance.fetch_recommended_apps_from_builtin.assert_called_once_with("en-US")
|
||||
|
||||
@patch("services.recommended_app_service.RecommendAppRetrievalFactory", autospec=True)
|
||||
@patch("services.recommended_app_service.dify_config", autospec=True)
|
||||
def test_fallback_when_none_recommended_apps(self, mock_config, mock_factory_class):
|
||||
mock_config.HOSTED_FETCH_APP_TEMPLATES_MODE = "db"
|
||||
none_response = {"recommended_apps": None, "categories": ["test"]}
|
||||
builtin_response = _apps_response()
|
||||
|
||||
mock_db_instance = MagicMock()
|
||||
mock_db_instance.get_recommended_apps_and_categories.return_value = none_response
|
||||
mock_factory_class.get_recommend_app_factory.return_value = MagicMock(return_value=mock_db_instance)
|
||||
|
||||
mock_builtin_instance = MagicMock()
|
||||
mock_builtin_instance.fetch_recommended_apps_from_builtin.return_value = builtin_response
|
||||
mock_factory_class.get_buildin_recommend_app_retrieval.return_value = mock_builtin_instance
|
||||
|
||||
result = RecommendedAppService.get_recommended_apps_and_categories("en-US")
|
||||
|
||||
assert result == builtin_response
|
||||
mock_builtin_instance.fetch_recommended_apps_from_builtin.assert_called_once()
|
||||
|
||||
@patch("services.recommended_app_service.RecommendAppRetrievalFactory", autospec=True)
|
||||
@patch("services.recommended_app_service.dify_config", autospec=True)
|
||||
def test_different_languages(self, mock_config, mock_factory_class):
|
||||
mock_config.HOSTED_FETCH_APP_TEMPLATES_MODE = "builtin"
|
||||
|
||||
for language in ["en-US", "zh-CN", "ja-JP", "fr-FR"]:
|
||||
lang_response = _apps_response(
|
||||
recommended_apps=[{"id": f"app-{language}", "name": f"App {language}", "category": "test"}]
|
||||
)
|
||||
mock_instance = MagicMock()
|
||||
mock_instance.get_recommended_apps_and_categories.return_value = lang_response
|
||||
mock_factory_class.get_recommend_app_factory.return_value = MagicMock(return_value=mock_instance)
|
||||
|
||||
result = RecommendedAppService.get_recommended_apps_and_categories(language)
|
||||
|
||||
assert result["recommended_apps"][0]["id"] == f"app-{language}"
|
||||
mock_instance.get_recommended_apps_and_categories.assert_called_with(language)
|
||||
|
||||
@patch("services.recommended_app_service.RecommendAppRetrievalFactory", autospec=True)
|
||||
@patch("services.recommended_app_service.dify_config", autospec=True)
|
||||
def test_uses_correct_factory_mode(self, mock_config, mock_factory_class):
|
||||
for mode in ["remote", "builtin", "db"]:
|
||||
mock_config.HOSTED_FETCH_APP_TEMPLATES_MODE = mode
|
||||
response = _apps_response()
|
||||
mock_instance = MagicMock()
|
||||
mock_instance.get_recommended_apps_and_categories.return_value = response
|
||||
mock_factory_class.get_recommend_app_factory.return_value = MagicMock(return_value=mock_instance)
|
||||
|
||||
RecommendedAppService.get_recommended_apps_and_categories("en-US")
|
||||
|
||||
mock_factory_class.get_recommend_app_factory.assert_called_with(mode)
|
||||
|
||||
|
||||
# ── Pure logic tests: get_recommend_app_detail ─────────────────────────
|
||||
|
||||
|
||||
class TestRecommendedAppServiceGetDetail:
|
||||
@patch("services.recommended_app_service.RecommendAppRetrievalFactory", autospec=True)
|
||||
@patch("services.recommended_app_service.dify_config", autospec=True)
|
||||
def test_success(self, mock_config, mock_factory_class):
|
||||
mock_config.HOSTED_FETCH_APP_TEMPLATES_MODE = "remote"
|
||||
expected = _app_detail(app_id="app-123", name="Productivity App", description="A great app")
|
||||
|
||||
mock_instance = MagicMock()
|
||||
mock_instance.get_recommend_app_detail.return_value = expected
|
||||
mock_factory_class.get_recommend_app_factory.return_value = MagicMock(return_value=mock_instance)
|
||||
|
||||
result = _recommendation_detail(RecommendedAppService.get_recommend_app_detail("app-123"))
|
||||
|
||||
assert result == expected
|
||||
assert result["id"] == "app-123"
|
||||
mock_instance.get_recommend_app_detail.assert_called_once_with("app-123")
|
||||
|
||||
@patch("services.recommended_app_service.RecommendAppRetrievalFactory", autospec=True)
|
||||
@patch("services.recommended_app_service.dify_config", autospec=True)
|
||||
def test_different_modes(self, mock_config, mock_factory_class):
|
||||
for mode in ["remote", "builtin", "db"]:
|
||||
mock_config.HOSTED_FETCH_APP_TEMPLATES_MODE = mode
|
||||
detail = _app_detail(app_id="test-app", name=f"App from {mode}")
|
||||
mock_instance = MagicMock()
|
||||
mock_instance.get_recommend_app_detail.return_value = detail
|
||||
mock_factory_class.get_recommend_app_factory.return_value = MagicMock(return_value=mock_instance)
|
||||
|
||||
result = _recommendation_detail(RecommendedAppService.get_recommend_app_detail("test-app"))
|
||||
|
||||
assert result["name"] == f"App from {mode}"
|
||||
mock_factory_class.get_recommend_app_factory.assert_called_with(mode)
|
||||
|
||||
@patch("services.recommended_app_service.RecommendAppRetrievalFactory", autospec=True)
|
||||
@patch("services.recommended_app_service.dify_config", autospec=True)
|
||||
def test_returns_none_when_not_found(self, mock_config, mock_factory_class):
|
||||
mock_config.HOSTED_FETCH_APP_TEMPLATES_MODE = "remote"
|
||||
mock_instance = MagicMock()
|
||||
mock_instance.get_recommend_app_detail.return_value = None
|
||||
mock_factory_class.get_recommend_app_factory.return_value = MagicMock(return_value=mock_instance)
|
||||
|
||||
result = _recommendation_detail(RecommendedAppService.get_recommend_app_detail("nonexistent"))
|
||||
|
||||
assert result is None
|
||||
mock_instance.get_recommend_app_detail.assert_called_once_with("nonexistent")
|
||||
|
||||
@patch("services.recommended_app_service.RecommendAppRetrievalFactory", autospec=True)
|
||||
@patch("services.recommended_app_service.dify_config", autospec=True)
|
||||
def test_returns_empty_dict(self, mock_config, mock_factory_class):
|
||||
mock_config.HOSTED_FETCH_APP_TEMPLATES_MODE = "builtin"
|
||||
mock_instance = MagicMock()
|
||||
mock_instance.get_recommend_app_detail.return_value = {}
|
||||
mock_factory_class.get_recommend_app_factory.return_value = MagicMock(return_value=mock_instance)
|
||||
|
||||
result = _recommendation_detail(RecommendedAppService.get_recommend_app_detail("app-empty"))
|
||||
|
||||
assert result == {}
|
||||
|
||||
@patch("services.recommended_app_service.RecommendAppRetrievalFactory", autospec=True)
|
||||
@patch("services.recommended_app_service.dify_config", autospec=True)
|
||||
def test_complex_model_config(self, mock_config, mock_factory_class):
|
||||
mock_config.HOSTED_FETCH_APP_TEMPLATES_MODE = "remote"
|
||||
complex_config = {
|
||||
"provider": "openai",
|
||||
"model": "gpt-4",
|
||||
"parameters": {"temperature": 0.7, "max_tokens": 2000, "top_p": 1.0},
|
||||
}
|
||||
expected = _app_detail(
|
||||
app_id="complex-app",
|
||||
name="Complex App",
|
||||
model_config=complex_config,
|
||||
workflows=["workflow-1", "workflow-2"],
|
||||
tools=["tool-1", "tool-2", "tool-3"],
|
||||
)
|
||||
mock_instance = MagicMock()
|
||||
mock_instance.get_recommend_app_detail.return_value = expected
|
||||
mock_factory_class.get_recommend_app_factory.return_value = MagicMock(return_value=mock_instance)
|
||||
|
||||
result = _recommendation_detail(RecommendedAppService.get_recommend_app_detail("complex-app"))
|
||||
|
||||
assert result["model_config"] == complex_config
|
||||
assert len(result["workflows"]) == 2
|
||||
assert len(result["tools"]) == 3
|
||||
|
||||
|
||||
# ── Integration tests: trial app features (real DB) ────────────────────
|
||||
|
||||
|
||||
class TestRecommendedAppServiceTrialFeatures:
|
||||
def test_get_apps_should_not_query_trial_table_when_disabled(
|
||||
self, db_session_with_containers: Session, monkeypatch: pytest.MonkeyPatch
|
||||
):
|
||||
expected = {"recommended_apps": [{"app_id": "app-1"}], "categories": ["all"]}
|
||||
retrieval_instance, builtin_instance = _mock_factory_for_apps(monkeypatch, mode="remote", result=expected)
|
||||
monkeypatch.setattr(
|
||||
service_module.FeatureService,
|
||||
"get_system_features",
|
||||
MagicMock(return_value=SimpleNamespace(enable_trial_app=False)),
|
||||
)
|
||||
|
||||
result = RecommendedAppService.get_recommended_apps_and_categories("en-US")
|
||||
|
||||
assert result == expected
|
||||
retrieval_instance.get_recommended_apps_and_categories.assert_called_once_with("en-US")
|
||||
builtin_instance.fetch_recommended_apps_from_builtin.assert_not_called()
|
||||
|
||||
def test_get_apps_should_enrich_can_trial_when_enabled(
|
||||
self, db_session_with_containers: Session, monkeypatch: pytest.MonkeyPatch
|
||||
):
|
||||
app_id_1 = str(uuid.uuid4())
|
||||
app_id_2 = str(uuid.uuid4())
|
||||
tenant_id = str(uuid.uuid4())
|
||||
|
||||
# app_id_1 has a TrialApp record; app_id_2 does not
|
||||
db_session_with_containers.add(TrialApp(app_id=app_id_1, tenant_id=tenant_id))
|
||||
db_session_with_containers.commit()
|
||||
|
||||
remote_result = {"recommended_apps": [], "categories": []}
|
||||
fallback_result = {
|
||||
"recommended_apps": [{"app_id": app_id_1}, {"app_id": app_id_2}],
|
||||
"categories": ["all"],
|
||||
}
|
||||
_, builtin_instance = _mock_factory_for_apps(
|
||||
monkeypatch, mode="remote", result=remote_result, fallback_result=fallback_result
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
service_module.FeatureService,
|
||||
"get_system_features",
|
||||
MagicMock(return_value=SimpleNamespace(enable_trial_app=True)),
|
||||
)
|
||||
|
||||
result = RecommendedAppService.get_recommended_apps_and_categories("ja-JP")
|
||||
|
||||
builtin_instance.fetch_recommended_apps_from_builtin.assert_called_once_with("en-US")
|
||||
assert result["recommended_apps"][0]["can_trial"] is True
|
||||
assert result["recommended_apps"][1]["can_trial"] is False
|
||||
|
||||
@pytest.mark.parametrize("has_trial_app", [True, False])
|
||||
def test_get_detail_should_set_can_trial_when_enabled(
|
||||
self,
|
||||
db_session_with_containers: Session,
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
has_trial_app: bool,
|
||||
):
|
||||
app_id = str(uuid.uuid4())
|
||||
tenant_id = str(uuid.uuid4())
|
||||
|
||||
if has_trial_app:
|
||||
db_session_with_containers.add(TrialApp(app_id=app_id, tenant_id=tenant_id))
|
||||
db_session_with_containers.commit()
|
||||
|
||||
detail = {"id": app_id, "name": "Test App"}
|
||||
retrieval_instance = MagicMock()
|
||||
retrieval_instance.get_recommend_app_detail.return_value = detail
|
||||
retrieval_factory = MagicMock(return_value=retrieval_instance)
|
||||
monkeypatch.setattr(service_module.dify_config, "HOSTED_FETCH_APP_TEMPLATES_MODE", "remote", raising=False)
|
||||
monkeypatch.setattr(
|
||||
service_module.RecommendAppRetrievalFactory,
|
||||
"get_recommend_app_factory",
|
||||
MagicMock(return_value=retrieval_factory),
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
service_module.FeatureService,
|
||||
"get_system_features",
|
||||
MagicMock(return_value=SimpleNamespace(enable_trial_app=True)),
|
||||
)
|
||||
|
||||
result = cast(dict[str, Any], RecommendedAppService.get_recommend_app_detail(app_id))
|
||||
|
||||
assert result["id"] == app_id
|
||||
assert result["can_trial"] is has_trial_app
|
||||
|
||||
def test_add_trial_app_record_increments_count_for_existing(self, db_session_with_containers: Session):
|
||||
app_id = str(uuid.uuid4())
|
||||
account_id = str(uuid.uuid4())
|
||||
|
||||
db_session_with_containers.add(AccountTrialAppRecord(app_id=app_id, account_id=account_id, count=3))
|
||||
db_session_with_containers.commit()
|
||||
|
||||
RecommendedAppService.add_trial_app_record(app_id, account_id)
|
||||
|
||||
db_session_with_containers.expire_all()
|
||||
record = db_session_with_containers.scalar(
|
||||
select(AccountTrialAppRecord)
|
||||
.where(AccountTrialAppRecord.app_id == app_id, AccountTrialAppRecord.account_id == account_id)
|
||||
.limit(1)
|
||||
)
|
||||
assert record is not None
|
||||
assert record.count == 4
|
||||
|
||||
def test_add_trial_app_record_creates_new_record(self, db_session_with_containers: Session):
|
||||
app_id = str(uuid.uuid4())
|
||||
account_id = str(uuid.uuid4())
|
||||
|
||||
RecommendedAppService.add_trial_app_record(app_id, account_id)
|
||||
|
||||
db_session_with_containers.expire_all()
|
||||
record = db_session_with_containers.scalar(
|
||||
select(AccountTrialAppRecord)
|
||||
.where(AccountTrialAppRecord.app_id == app_id, AccountTrialAppRecord.account_id == account_id)
|
||||
.limit(1)
|
||||
)
|
||||
assert record is not None
|
||||
assert record.app_id == app_id
|
||||
assert record.account_id == account_id
|
||||
assert record.count == 1
|
||||
@ -134,6 +134,7 @@ class TestAdvancedChatAppRunnerConversationVariables:
|
||||
|
||||
# Patch the necessary components
|
||||
with (
|
||||
patch("core.app.apps.advanced_chat.app_runner.sessionmaker") as mock_sessionmaker,
|
||||
patch("core.app.apps.advanced_chat.app_runner.Session") as mock_session_class,
|
||||
patch("core.app.apps.advanced_chat.app_runner.select") as mock_select,
|
||||
patch("core.app.apps.advanced_chat.app_runner.db") as mock_db,
|
||||
@ -150,7 +151,9 @@ class TestAdvancedChatAppRunnerConversationVariables:
|
||||
patch("core.app.apps.advanced_chat.app_runner.RedisChannel") as mock_redis_channel_class,
|
||||
):
|
||||
# Setup mocks
|
||||
mock_session_class.return_value.__enter__.return_value = mock_session
|
||||
mock_sessionmaker.return_value.begin.return_value.__enter__.return_value = mock_session
|
||||
mock_sessionmaker.return_value.begin.return_value.__exit__ = MagicMock(return_value=False)
|
||||
mock_session_class.return_value.__enter__.return_value = MagicMock()
|
||||
mock_db.session.query.return_value.where.return_value.first.return_value = MagicMock() # App exists
|
||||
mock_db.engine = MagicMock()
|
||||
|
||||
@ -177,7 +180,6 @@ class TestAdvancedChatAppRunnerConversationVariables:
|
||||
# Note: Since we're mocking ConversationVariable.from_variable,
|
||||
# we can't directly check the id, but we can verify add_all was called
|
||||
assert mock_session.add_all.called, "Session add_all should have been called"
|
||||
assert mock_session.commit.called, "Session commit should have been called"
|
||||
|
||||
def test_no_variables_creates_all(self):
|
||||
"""Test that all conversation variables are created when none exist in DB."""
|
||||
@ -278,6 +280,7 @@ class TestAdvancedChatAppRunnerConversationVariables:
|
||||
|
||||
# Patch the necessary components
|
||||
with (
|
||||
patch("core.app.apps.advanced_chat.app_runner.sessionmaker") as mock_sessionmaker,
|
||||
patch("core.app.apps.advanced_chat.app_runner.Session") as mock_session_class,
|
||||
patch("core.app.apps.advanced_chat.app_runner.select") as mock_select,
|
||||
patch("core.app.apps.advanced_chat.app_runner.db") as mock_db,
|
||||
@ -295,7 +298,9 @@ class TestAdvancedChatAppRunnerConversationVariables:
|
||||
patch("core.app.apps.advanced_chat.app_runner.RedisChannel") as mock_redis_channel_class,
|
||||
):
|
||||
# Setup mocks
|
||||
mock_session_class.return_value.__enter__.return_value = mock_session
|
||||
mock_sessionmaker.return_value.begin.return_value.__enter__.return_value = mock_session
|
||||
mock_sessionmaker.return_value.begin.return_value.__exit__ = MagicMock(return_value=False)
|
||||
mock_session_class.return_value.__enter__.return_value = MagicMock()
|
||||
mock_db.session.query.return_value.where.return_value.first.return_value = MagicMock() # App exists
|
||||
mock_db.engine = MagicMock()
|
||||
|
||||
@ -326,7 +331,6 @@ class TestAdvancedChatAppRunnerConversationVariables:
|
||||
# Verify that all variables were created
|
||||
assert len(added_items) == 2, "Should have added both variables"
|
||||
assert mock_session.add_all.called, "Session add_all should have been called"
|
||||
assert mock_session.commit.called, "Session commit should have been called"
|
||||
|
||||
def test_all_variables_exist_no_changes(self):
|
||||
"""Test that no changes are made when all variables already exist in DB."""
|
||||
@ -429,6 +433,7 @@ class TestAdvancedChatAppRunnerConversationVariables:
|
||||
|
||||
# Patch the necessary components
|
||||
with (
|
||||
patch("core.app.apps.advanced_chat.app_runner.sessionmaker") as mock_sessionmaker,
|
||||
patch("core.app.apps.advanced_chat.app_runner.Session") as mock_session_class,
|
||||
patch("core.app.apps.advanced_chat.app_runner.select") as mock_select,
|
||||
patch("core.app.apps.advanced_chat.app_runner.db") as mock_db,
|
||||
@ -445,7 +450,9 @@ class TestAdvancedChatAppRunnerConversationVariables:
|
||||
patch("core.app.apps.advanced_chat.app_runner.RedisChannel") as mock_redis_channel_class,
|
||||
):
|
||||
# Setup mocks
|
||||
mock_session_class.return_value.__enter__.return_value = mock_session
|
||||
mock_sessionmaker.return_value.begin.return_value.__enter__.return_value = mock_session
|
||||
mock_sessionmaker.return_value.begin.return_value.__exit__ = MagicMock(return_value=False)
|
||||
mock_session_class.return_value.__enter__.return_value = MagicMock()
|
||||
mock_db.session.query.return_value.where.return_value.first.return_value = MagicMock() # App exists
|
||||
mock_db.engine = MagicMock()
|
||||
|
||||
@ -465,4 +472,3 @@ class TestAdvancedChatAppRunnerConversationVariables:
|
||||
|
||||
# Verify that no variables were added
|
||||
assert not mock_session.add_all.called, "Session add_all should not have been called"
|
||||
assert mock_session.commit.called, "Session commit should still be called"
|
||||
|
||||
@ -93,6 +93,16 @@ def _patch_common_run_deps(runner: AdvancedChatAppRunner):
|
||||
scalar=lambda *a, **k: MagicMock(),
|
||||
),
|
||||
),
|
||||
sessionmaker=MagicMock(
|
||||
return_value=MagicMock(
|
||||
begin=MagicMock(
|
||||
return_value=MagicMock(
|
||||
__enter__=lambda s: MagicMock(scalars=MagicMock(return_value=MagicMock(all=lambda: []))),
|
||||
__exit__=lambda *a, **k: False,
|
||||
),
|
||||
),
|
||||
),
|
||||
),
|
||||
select=MagicMock(),
|
||||
db=MagicMock(engine=MagicMock()),
|
||||
RedisChannel=MagicMock(),
|
||||
|
||||
@ -2,6 +2,7 @@ from __future__ import annotations
|
||||
|
||||
from contextlib import contextmanager
|
||||
from types import SimpleNamespace
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
from graphon.enums import BuiltinNodeTypes, WorkflowExecutionStatus
|
||||
@ -610,33 +611,33 @@ class TestWorkflowGenerateTaskPipeline:
|
||||
|
||||
def test_database_session_rolls_back_on_error(self, monkeypatch):
|
||||
pipeline = _make_pipeline()
|
||||
calls = {"commit": 0, "rollback": 0}
|
||||
|
||||
class _Session:
|
||||
def __init__(self, *args, **kwargs):
|
||||
_ = args, kwargs
|
||||
calls = {"enter": 0, "exit_exc": None}
|
||||
|
||||
class _BeginContext:
|
||||
def __enter__(self):
|
||||
return self
|
||||
calls["enter"] += 1
|
||||
return MagicMock()
|
||||
|
||||
def __exit__(self, exc_type, exc, tb):
|
||||
calls["exit_exc"] = exc_type
|
||||
return False
|
||||
|
||||
def commit(self):
|
||||
calls["commit"] += 1
|
||||
class _Sessionmaker:
|
||||
def __init__(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
def rollback(self):
|
||||
calls["rollback"] += 1
|
||||
def begin(self):
|
||||
return _BeginContext()
|
||||
|
||||
monkeypatch.setattr("core.app.apps.workflow.generate_task_pipeline.Session", _Session)
|
||||
monkeypatch.setattr("core.app.apps.workflow.generate_task_pipeline.sessionmaker", _Sessionmaker)
|
||||
monkeypatch.setattr("core.app.apps.workflow.generate_task_pipeline.db", SimpleNamespace(engine=object()))
|
||||
|
||||
with pytest.raises(RuntimeError, match="db error"):
|
||||
with pipeline._database_session():
|
||||
raise RuntimeError("db error")
|
||||
|
||||
assert calls["commit"] == 0
|
||||
assert calls["rollback"] == 1
|
||||
assert calls["enter"] == 1
|
||||
assert calls["exit_exc"] is RuntimeError
|
||||
|
||||
def test_node_retry_and_started_handlers_cover_none_and_value(self):
|
||||
pipeline = _make_pipeline()
|
||||
|
||||
@ -71,7 +71,7 @@ def test_vector_methods_delegate_to_underlying_implementation():
|
||||
assert vector.search_by_full_text("hello", top_k=2) == runner.search_by_full_text.return_value
|
||||
vector.delete()
|
||||
|
||||
runner._create_collection_if_not_exists.assert_called_once_with(2)
|
||||
runner.create_collection_if_not_exists.assert_called_once_with(2)
|
||||
runner.add_texts.assert_any_call(texts, [[0.1, 0.2]])
|
||||
runner.delete_by_ids.assert_called_once_with(["d1"])
|
||||
runner.delete_by_metadata_field.assert_called_once_with("document_id", "doc-1")
|
||||
|
||||
@ -249,7 +249,7 @@ def test_create_collection_if_not_exists_creates_when_missing(monkeypatch):
|
||||
vector._client = MagicMock()
|
||||
vector._client.describe_collection.side_effect = stubs.TeaException(statusCode=404)
|
||||
|
||||
vector._create_collection_if_not_exists(embedding_dimension=1024)
|
||||
vector.create_collection_if_not_exists(embedding_dimension=1024)
|
||||
|
||||
vector._client.create_collection.assert_called_once()
|
||||
openapi_module.redis_client.set.assert_called_once()
|
||||
@ -268,7 +268,7 @@ def test_create_collection_if_not_exists_skips_when_cached(monkeypatch):
|
||||
vector.config = _config()
|
||||
vector._client = MagicMock()
|
||||
|
||||
vector._create_collection_if_not_exists(embedding_dimension=1024)
|
||||
vector.create_collection_if_not_exists(embedding_dimension=1024)
|
||||
|
||||
vector._client.describe_collection.assert_not_called()
|
||||
vector._client.create_collection.assert_not_called()
|
||||
@ -290,7 +290,7 @@ def test_create_collection_if_not_exists_raises_on_non_404_errors(monkeypatch):
|
||||
vector._client.describe_collection.side_effect = stubs.TeaException(statusCode=500)
|
||||
|
||||
with pytest.raises(ValueError, match="failed to create collection collection_1"):
|
||||
vector._create_collection_if_not_exists(embedding_dimension=512)
|
||||
vector.create_collection_if_not_exists(embedding_dimension=512)
|
||||
|
||||
|
||||
def test_openapi_add_delete_and_search_methods(monkeypatch):
|
||||
|
||||
@ -374,7 +374,7 @@ def test_create_collection_if_not_exists_creates_table_indexes_and_cache(monkeyp
|
||||
|
||||
vector._get_cursor = _cursor_context
|
||||
|
||||
vector._create_collection_if_not_exists(embedding_dimension=3)
|
||||
vector.create_collection_if_not_exists(embedding_dimension=3)
|
||||
|
||||
assert any("CREATE TABLE IF NOT EXISTS dify.collection" in call.args[0] for call in cursor.execute.call_args_list)
|
||||
assert any("CREATE INDEX collection_embedding_idx" in call.args[0] for call in cursor.execute.call_args_list)
|
||||
@ -404,7 +404,7 @@ def test_create_collection_if_not_exists_raises_for_non_existing_error(monkeypat
|
||||
vector._get_cursor = _cursor_context
|
||||
|
||||
with pytest.raises(RuntimeError, match="permission denied"):
|
||||
vector._create_collection_if_not_exists(embedding_dimension=3)
|
||||
vector.create_collection_if_not_exists(embedding_dimension=3)
|
||||
|
||||
|
||||
def test_delete_methods_raise_when_error_is_not_missing_table():
|
||||
|
||||
@ -4909,15 +4909,17 @@ class TestInternalHooksCoverage:
|
||||
session_ctx.__enter__.return_value = session
|
||||
session_ctx.__exit__.return_value = False
|
||||
|
||||
sessionmaker_ctx = MagicMock()
|
||||
sessionmaker_ctx.begin.return_value = session_ctx
|
||||
|
||||
with (
|
||||
patch("core.rag.retrieval.dataset_retrieval.db", SimpleNamespace(engine=Mock())),
|
||||
patch("core.rag.retrieval.dataset_retrieval.Session", return_value=session_ctx),
|
||||
patch("core.rag.retrieval.dataset_retrieval.sessionmaker", return_value=sessionmaker_ctx),
|
||||
patch.object(retrieval, "_send_trace_task") as mock_trace,
|
||||
):
|
||||
retrieval._on_retrieval_end(flask_app=app, documents=docs, message_id="m1", timer={"cost": 1})
|
||||
|
||||
query.update.assert_called_once()
|
||||
session.commit.assert_called_once()
|
||||
mock_trace.assert_called_once()
|
||||
|
||||
def test_retriever_variants(self, retrieval: DatasetRetrieval) -> None:
|
||||
|
||||
@ -1,53 +1,125 @@
|
||||
from unittest.mock import patch
|
||||
|
||||
from redis import RedisError
|
||||
from redis.retry import Retry
|
||||
|
||||
from extensions.ext_redis import redis_fallback
|
||||
from extensions.ext_redis import (
|
||||
_get_base_redis_params,
|
||||
_get_cluster_connection_health_params,
|
||||
_get_connection_health_params,
|
||||
redis_fallback,
|
||||
)
|
||||
|
||||
|
||||
def test_redis_fallback_success():
|
||||
@redis_fallback(default_return=None)
|
||||
def test_func():
|
||||
return "success"
|
||||
class TestGetConnectionHealthParams:
|
||||
@patch("extensions.ext_redis.dify_config")
|
||||
def test_includes_all_health_params(self, mock_config):
|
||||
mock_config.REDIS_RETRY_RETRIES = 3
|
||||
mock_config.REDIS_RETRY_BACKOFF_BASE = 1.0
|
||||
mock_config.REDIS_RETRY_BACKOFF_CAP = 10.0
|
||||
mock_config.REDIS_SOCKET_TIMEOUT = 5.0
|
||||
mock_config.REDIS_SOCKET_CONNECT_TIMEOUT = 5.0
|
||||
mock_config.REDIS_HEALTH_CHECK_INTERVAL = 30
|
||||
|
||||
assert test_func() == "success"
|
||||
params = _get_connection_health_params()
|
||||
|
||||
assert "retry" in params
|
||||
assert "socket_timeout" in params
|
||||
assert "socket_connect_timeout" in params
|
||||
assert "health_check_interval" in params
|
||||
assert isinstance(params["retry"], Retry)
|
||||
assert params["retry"]._retries == 3
|
||||
assert params["socket_timeout"] == 5.0
|
||||
assert params["socket_connect_timeout"] == 5.0
|
||||
assert params["health_check_interval"] == 30
|
||||
|
||||
|
||||
def test_redis_fallback_error():
|
||||
@redis_fallback(default_return="fallback")
|
||||
def test_func():
|
||||
raise RedisError("Redis error")
|
||||
class TestGetClusterConnectionHealthParams:
|
||||
@patch("extensions.ext_redis.dify_config")
|
||||
def test_excludes_health_check_interval(self, mock_config):
|
||||
mock_config.REDIS_RETRY_RETRIES = 3
|
||||
mock_config.REDIS_RETRY_BACKOFF_BASE = 1.0
|
||||
mock_config.REDIS_RETRY_BACKOFF_CAP = 10.0
|
||||
mock_config.REDIS_SOCKET_TIMEOUT = 5.0
|
||||
mock_config.REDIS_SOCKET_CONNECT_TIMEOUT = 5.0
|
||||
mock_config.REDIS_HEALTH_CHECK_INTERVAL = 30
|
||||
|
||||
assert test_func() == "fallback"
|
||||
params = _get_cluster_connection_health_params()
|
||||
|
||||
assert "retry" in params
|
||||
assert "socket_timeout" in params
|
||||
assert "socket_connect_timeout" in params
|
||||
assert "health_check_interval" not in params
|
||||
|
||||
|
||||
def test_redis_fallback_none_default():
|
||||
@redis_fallback()
|
||||
def test_func():
|
||||
raise RedisError("Redis error")
|
||||
class TestGetBaseRedisParams:
|
||||
@patch("extensions.ext_redis.dify_config")
|
||||
def test_includes_retry_and_health_params(self, mock_config):
|
||||
mock_config.REDIS_USERNAME = None
|
||||
mock_config.REDIS_PASSWORD = None
|
||||
mock_config.REDIS_DB = 0
|
||||
mock_config.REDIS_SERIALIZATION_PROTOCOL = 3
|
||||
mock_config.REDIS_ENABLE_CLIENT_SIDE_CACHE = False
|
||||
mock_config.REDIS_RETRY_RETRIES = 3
|
||||
mock_config.REDIS_RETRY_BACKOFF_BASE = 1.0
|
||||
mock_config.REDIS_RETRY_BACKOFF_CAP = 10.0
|
||||
mock_config.REDIS_SOCKET_TIMEOUT = 5.0
|
||||
mock_config.REDIS_SOCKET_CONNECT_TIMEOUT = 5.0
|
||||
mock_config.REDIS_HEALTH_CHECK_INTERVAL = 30
|
||||
|
||||
assert test_func() is None
|
||||
params = _get_base_redis_params()
|
||||
|
||||
assert "retry" in params
|
||||
assert isinstance(params["retry"], Retry)
|
||||
assert params["socket_timeout"] == 5.0
|
||||
assert params["socket_connect_timeout"] == 5.0
|
||||
assert params["health_check_interval"] == 30
|
||||
# Existing params still present
|
||||
assert params["db"] == 0
|
||||
assert params["encoding"] == "utf-8"
|
||||
|
||||
|
||||
def test_redis_fallback_with_args():
|
||||
@redis_fallback(default_return=0)
|
||||
def test_func(x, y):
|
||||
raise RedisError("Redis error")
|
||||
class TestRedisFallback:
|
||||
def test_redis_fallback_success(self):
|
||||
@redis_fallback(default_return=None)
|
||||
def test_func():
|
||||
return "success"
|
||||
|
||||
assert test_func(1, 2) == 0
|
||||
assert test_func() == "success"
|
||||
|
||||
def test_redis_fallback_error(self):
|
||||
@redis_fallback(default_return="fallback")
|
||||
def test_func():
|
||||
raise RedisError("Redis error")
|
||||
|
||||
def test_redis_fallback_with_kwargs():
|
||||
@redis_fallback(default_return={})
|
||||
def test_func(x=None, y=None):
|
||||
raise RedisError("Redis error")
|
||||
assert test_func() == "fallback"
|
||||
|
||||
assert test_func(x=1, y=2) == {}
|
||||
def test_redis_fallback_none_default(self):
|
||||
@redis_fallback()
|
||||
def test_func():
|
||||
raise RedisError("Redis error")
|
||||
|
||||
assert test_func() is None
|
||||
|
||||
def test_redis_fallback_preserves_function_metadata():
|
||||
@redis_fallback(default_return=None)
|
||||
def test_func():
|
||||
"""Test function docstring"""
|
||||
pass
|
||||
def test_redis_fallback_with_args(self):
|
||||
@redis_fallback(default_return=0)
|
||||
def test_func(x, y):
|
||||
raise RedisError("Redis error")
|
||||
|
||||
assert test_func.__name__ == "test_func"
|
||||
assert test_func.__doc__ == "Test function docstring"
|
||||
assert test_func(1, 2) == 0
|
||||
|
||||
def test_redis_fallback_with_kwargs(self):
|
||||
@redis_fallback(default_return={})
|
||||
def test_func(x=None, y=None):
|
||||
raise RedisError("Redis error")
|
||||
|
||||
assert test_func(x=1, y=2) == {}
|
||||
|
||||
def test_redis_fallback_preserves_function_metadata(self):
|
||||
@redis_fallback(default_return=None)
|
||||
def test_func():
|
||||
"""Test function docstring"""
|
||||
pass
|
||||
|
||||
assert test_func.__name__ == "test_func"
|
||||
assert test_func.__doc__ == "Test function docstring"
|
||||
|
||||
@ -6,12 +6,12 @@ MODULE = "services.plugin.plugin_auto_upgrade_service"
|
||||
|
||||
|
||||
def _patched_session():
|
||||
"""Patch Session(db.engine) to return a mock session as context manager."""
|
||||
"""Patch sessionmaker(bind=db.engine).begin() to return a mock session as context manager."""
|
||||
session = MagicMock()
|
||||
session_cls = MagicMock()
|
||||
session_cls.return_value.__enter__ = MagicMock(return_value=session)
|
||||
session_cls.return_value.__exit__ = MagicMock(return_value=False)
|
||||
patcher = patch(f"{MODULE}.Session", session_cls)
|
||||
mock_sessionmaker = MagicMock()
|
||||
mock_sessionmaker.return_value.begin.return_value.__enter__ = MagicMock(return_value=session)
|
||||
mock_sessionmaker.return_value.begin.return_value.__exit__ = MagicMock(return_value=False)
|
||||
patcher = patch(f"{MODULE}.sessionmaker", mock_sessionmaker)
|
||||
db_patcher = patch(f"{MODULE}.db")
|
||||
return patcher, db_patcher, session
|
||||
|
||||
@ -61,7 +61,6 @@ class TestChangeStrategy:
|
||||
|
||||
assert result is True
|
||||
session.add.assert_called_once()
|
||||
session.commit.assert_called_once()
|
||||
|
||||
def test_updates_existing_strategy(self):
|
||||
p1, p2, session = _patched_session()
|
||||
@ -86,7 +85,6 @@ class TestChangeStrategy:
|
||||
assert existing.upgrade_mode == TenantPluginAutoUpgradeStrategy.UpgradeMode.PARTIAL
|
||||
assert existing.exclude_plugins == ["p1"]
|
||||
assert existing.include_plugins == ["p2"]
|
||||
session.commit.assert_called_once()
|
||||
|
||||
|
||||
class TestExcludePlugin:
|
||||
@ -127,7 +125,6 @@ class TestExcludePlugin:
|
||||
|
||||
assert result is True
|
||||
assert existing.exclude_plugins == ["p-existing", "p-new"]
|
||||
session.commit.assert_called_once()
|
||||
|
||||
def test_removes_from_include_list_in_partial_mode(self):
|
||||
p1, p2, session = _patched_session()
|
||||
|
||||
@ -6,12 +6,12 @@ MODULE = "services.plugin.plugin_permission_service"
|
||||
|
||||
|
||||
def _patched_session():
|
||||
"""Patch Session(db.engine) to return a mock session as context manager."""
|
||||
"""Patch sessionmaker(bind=db.engine).begin() to return a mock session as context manager."""
|
||||
session = MagicMock()
|
||||
session_cls = MagicMock()
|
||||
session_cls.return_value.__enter__ = MagicMock(return_value=session)
|
||||
session_cls.return_value.__exit__ = MagicMock(return_value=False)
|
||||
patcher = patch(f"{MODULE}.Session", session_cls)
|
||||
mock_sessionmaker = MagicMock()
|
||||
mock_sessionmaker.return_value.begin.return_value.__enter__ = MagicMock(return_value=session)
|
||||
mock_sessionmaker.return_value.begin.return_value.__exit__ = MagicMock(return_value=False)
|
||||
patcher = patch(f"{MODULE}.sessionmaker", mock_sessionmaker)
|
||||
db_patcher = patch(f"{MODULE}.db")
|
||||
return patcher, db_patcher, session
|
||||
|
||||
@ -55,7 +55,6 @@ class TestChangePermission:
|
||||
)
|
||||
|
||||
session.add.assert_called_once()
|
||||
session.commit.assert_called_once()
|
||||
|
||||
def test_updates_existing_permission(self):
|
||||
p1, p2, session = _patched_session()
|
||||
@ -71,5 +70,4 @@ class TestChangePermission:
|
||||
|
||||
assert existing.install_permission == TenantPluginPermission.InstallPermission.ADMINS
|
||||
assert existing.debug_permission == TenantPluginPermission.DebugPermission.ADMINS
|
||||
session.commit.assert_called_once()
|
||||
session.add.assert_not_called()
|
||||
|
||||
@ -275,48 +275,46 @@ def test_process_tenant_processes_all_batches(monkeypatch: pytest.MonkeyPatch) -
|
||||
msg_session_1.query.side_effect = lambda model: (
|
||||
make_query_with_batches([[msg1], []]) if model == service_module.Message else MagicMock()
|
||||
)
|
||||
msg_session_1.commit.return_value = None
|
||||
|
||||
msg_session_2 = MagicMock()
|
||||
msg_session_2.query.side_effect = lambda model: (
|
||||
make_query_with_batches([[]]) if model == service_module.Message else MagicMock()
|
||||
)
|
||||
msg_session_2.commit.return_value = None
|
||||
|
||||
conv_session_1 = MagicMock()
|
||||
conv_session_1.query.side_effect = lambda model: (
|
||||
make_query_with_batches([[conv1], []]) if model == service_module.Conversation else MagicMock()
|
||||
)
|
||||
conv_session_1.commit.return_value = None
|
||||
|
||||
conv_session_2 = MagicMock()
|
||||
conv_session_2.query.side_effect = lambda model: (
|
||||
make_query_with_batches([[]]) if model == service_module.Conversation else MagicMock()
|
||||
)
|
||||
conv_session_2.commit.return_value = None
|
||||
|
||||
wal_session_1 = MagicMock()
|
||||
wal_session_1.query.side_effect = lambda model: (
|
||||
make_query_with_batches([[log1], []]) if model == service_module.WorkflowAppLog else MagicMock()
|
||||
)
|
||||
wal_session_1.commit.return_value = None
|
||||
|
||||
wal_session_2 = MagicMock()
|
||||
wal_session_2.query.side_effect = lambda model: (
|
||||
make_query_with_batches([[]]) if model == service_module.WorkflowAppLog else MagicMock()
|
||||
)
|
||||
wal_session_2.commit.return_value = None
|
||||
|
||||
session_wrappers = [
|
||||
_session_wrapper_for_no_autoflush(msg_session_1),
|
||||
_session_wrapper_for_no_autoflush(msg_session_2),
|
||||
_session_wrapper_for_no_autoflush(conv_session_1),
|
||||
_session_wrapper_for_no_autoflush(conv_session_2),
|
||||
_session_wrapper_for_no_autoflush(wal_session_1),
|
||||
_session_wrapper_for_no_autoflush(wal_session_2),
|
||||
_sessionmaker_wrapper_for_begin(msg_session_1),
|
||||
_sessionmaker_wrapper_for_begin(msg_session_2),
|
||||
_sessionmaker_wrapper_for_begin(conv_session_1),
|
||||
_sessionmaker_wrapper_for_begin(conv_session_2),
|
||||
_sessionmaker_wrapper_for_begin(wal_session_1),
|
||||
_sessionmaker_wrapper_for_begin(wal_session_2),
|
||||
]
|
||||
|
||||
monkeypatch.setattr(service_module, "Session", lambda _engine: session_wrappers.pop(0))
|
||||
def fake_sessionmaker(*args, **kwargs):
|
||||
if kwargs.get("autoflush") is False:
|
||||
return session_wrappers.pop(0)
|
||||
return object()
|
||||
|
||||
monkeypatch.setattr(service_module, "sessionmaker", fake_sessionmaker)
|
||||
|
||||
def fake_select(*_args, **_kwargs):
|
||||
stmt = MagicMock()
|
||||
@ -333,8 +331,6 @@ def test_process_tenant_processes_all_batches(monkeypatch: pytest.MonkeyPatch) -
|
||||
run_repo = MagicMock()
|
||||
run_repo.get_expired_runs_batch.side_effect = [[SimpleNamespace(id="wr-1", to_dict=lambda: {"id": "wr-1"})], []]
|
||||
run_repo.delete_runs_by_ids.return_value = 1
|
||||
|
||||
monkeypatch.setattr(service_module, "sessionmaker", lambda **_kwargs: object())
|
||||
monkeypatch.setattr(
|
||||
service_module.DifyAPIRepositoryFactory,
|
||||
"create_api_workflow_node_execution_repository",
|
||||
@ -574,13 +570,18 @@ def test_process_tenant_repo_loops_break_on_empty_second_batch(monkeypatch: pyte
|
||||
q_empty.limit.return_value = q_empty
|
||||
q_empty.all.return_value = []
|
||||
empty_session.query.return_value = q_empty
|
||||
empty_session.commit.return_value = None
|
||||
session_wrappers = [
|
||||
_session_wrapper_for_no_autoflush(empty_session),
|
||||
_session_wrapper_for_no_autoflush(empty_session),
|
||||
_session_wrapper_for_no_autoflush(empty_session),
|
||||
_sessionmaker_wrapper_for_begin(empty_session),
|
||||
_sessionmaker_wrapper_for_begin(empty_session),
|
||||
_sessionmaker_wrapper_for_begin(empty_session),
|
||||
]
|
||||
monkeypatch.setattr(service_module, "Session", lambda _engine: session_wrappers.pop(0))
|
||||
|
||||
def fake_sessionmaker(*args, **kwargs):
|
||||
if kwargs.get("autoflush") is False:
|
||||
return session_wrappers.pop(0)
|
||||
return object()
|
||||
|
||||
monkeypatch.setattr(service_module, "sessionmaker", fake_sessionmaker)
|
||||
|
||||
def fake_select(*_args, **_kwargs):
|
||||
stmt = MagicMock()
|
||||
@ -606,8 +607,6 @@ def test_process_tenant_repo_loops_break_on_empty_second_batch(monkeypatch: pyte
|
||||
[],
|
||||
]
|
||||
run_repo.delete_runs_by_ids.return_value = 2
|
||||
|
||||
monkeypatch.setattr(service_module, "sessionmaker", lambda **_kwargs: object())
|
||||
monkeypatch.setattr(
|
||||
service_module.DifyAPIRepositoryFactory,
|
||||
"create_api_workflow_node_execution_repository",
|
||||
|
||||
@ -1,628 +0,0 @@
|
||||
"""
|
||||
Comprehensive unit tests for RecommendedAppService.
|
||||
|
||||
This test suite provides complete coverage of recommended app operations in Dify,
|
||||
following TDD principles with the Arrange-Act-Assert pattern.
|
||||
|
||||
## Test Coverage
|
||||
|
||||
### 1. Get Recommended Apps and Categories (TestRecommendedAppServiceGetApps)
|
||||
Tests fetching recommended apps with categories:
|
||||
- Successful retrieval with recommended apps
|
||||
- Fallback to builtin when no recommended apps
|
||||
- Different language support
|
||||
- Factory mode selection (remote, builtin, db)
|
||||
- Empty result handling
|
||||
|
||||
### 2. Get Recommend App Detail (TestRecommendedAppServiceGetDetail)
|
||||
Tests fetching individual app details:
|
||||
- Successful app detail retrieval
|
||||
- Different factory modes
|
||||
- App not found scenarios
|
||||
- Language-specific details
|
||||
|
||||
## Testing Approach
|
||||
|
||||
- **Mocking Strategy**: All external dependencies (dify_config, RecommendAppRetrievalFactory)
|
||||
are mocked for fast, isolated unit tests
|
||||
- **Factory Pattern**: Tests verify correct factory selection based on mode
|
||||
- **Fixtures**: Mock objects are configured per test method
|
||||
- **Assertions**: Each test verifies return values and factory method calls
|
||||
|
||||
## Key Concepts
|
||||
|
||||
**Factory Modes:**
|
||||
- remote: Fetch from remote API
|
||||
- builtin: Use built-in templates
|
||||
- db: Fetch from database
|
||||
|
||||
**Fallback Logic:**
|
||||
- If remote/db returns no apps, fallback to builtin en-US templates
|
||||
- Ensures users always see some recommended apps
|
||||
"""
|
||||
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from services.recommended_app_service import RecommendedAppService
|
||||
|
||||
|
||||
class RecommendedAppServiceTestDataFactory:
|
||||
"""
|
||||
Factory for creating test data and mock objects.
|
||||
|
||||
Provides reusable methods to create consistent mock objects for testing
|
||||
recommended app operations.
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def create_recommended_apps_response(
|
||||
recommended_apps: list[dict] | None = None,
|
||||
categories: list[str] | None = None,
|
||||
) -> dict:
|
||||
"""
|
||||
Create a mock response for recommended apps.
|
||||
|
||||
Args:
|
||||
recommended_apps: List of recommended app dictionaries
|
||||
categories: List of category names
|
||||
|
||||
Returns:
|
||||
Dictionary with recommended_apps and categories
|
||||
"""
|
||||
if recommended_apps is None:
|
||||
recommended_apps = [
|
||||
{
|
||||
"id": "app-1",
|
||||
"name": "Test App 1",
|
||||
"description": "Test description 1",
|
||||
"category": "productivity",
|
||||
},
|
||||
{
|
||||
"id": "app-2",
|
||||
"name": "Test App 2",
|
||||
"description": "Test description 2",
|
||||
"category": "communication",
|
||||
},
|
||||
]
|
||||
if categories is None:
|
||||
categories = ["productivity", "communication", "utilities"]
|
||||
|
||||
return {
|
||||
"recommended_apps": recommended_apps,
|
||||
"categories": categories,
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def create_app_detail_response(
|
||||
app_id: str = "app-123",
|
||||
name: str = "Test App",
|
||||
description: str = "Test description",
|
||||
**kwargs,
|
||||
) -> dict:
|
||||
"""
|
||||
Create a mock response for app detail.
|
||||
|
||||
Args:
|
||||
app_id: App identifier
|
||||
name: App name
|
||||
description: App description
|
||||
**kwargs: Additional fields
|
||||
|
||||
Returns:
|
||||
Dictionary with app details
|
||||
"""
|
||||
detail = {
|
||||
"id": app_id,
|
||||
"name": name,
|
||||
"description": description,
|
||||
"category": kwargs.get("category", "productivity"),
|
||||
"icon": kwargs.get("icon", "🚀"),
|
||||
"model_config": kwargs.get("model_config", {}),
|
||||
}
|
||||
detail.update(kwargs)
|
||||
return detail
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def factory():
|
||||
"""Provide the test data factory to all tests."""
|
||||
return RecommendedAppServiceTestDataFactory
|
||||
|
||||
|
||||
class TestRecommendedAppServiceGetApps:
|
||||
"""Test get_recommended_apps_and_categories operations."""
|
||||
|
||||
@patch("services.recommended_app_service.RecommendAppRetrievalFactory", autospec=True)
|
||||
@patch("services.recommended_app_service.dify_config", autospec=True)
|
||||
def test_get_recommended_apps_success_with_apps(self, mock_config, mock_factory_class, factory):
|
||||
"""Test successful retrieval of recommended apps when apps are returned."""
|
||||
# Arrange
|
||||
mock_config.HOSTED_FETCH_APP_TEMPLATES_MODE = "remote"
|
||||
|
||||
expected_response = factory.create_recommended_apps_response()
|
||||
|
||||
# Mock factory and retrieval instance
|
||||
mock_retrieval_instance = MagicMock()
|
||||
mock_retrieval_instance.get_recommended_apps_and_categories.return_value = expected_response
|
||||
|
||||
mock_factory = MagicMock()
|
||||
mock_factory.return_value = mock_retrieval_instance
|
||||
mock_factory_class.get_recommend_app_factory.return_value = mock_factory
|
||||
|
||||
# Act
|
||||
result = RecommendedAppService.get_recommended_apps_and_categories("en-US")
|
||||
|
||||
# Assert
|
||||
assert result == expected_response
|
||||
assert len(result["recommended_apps"]) == 2
|
||||
assert len(result["categories"]) == 3
|
||||
mock_factory_class.get_recommend_app_factory.assert_called_once_with("remote")
|
||||
mock_retrieval_instance.get_recommended_apps_and_categories.assert_called_once_with("en-US")
|
||||
|
||||
@patch("services.recommended_app_service.RecommendAppRetrievalFactory", autospec=True)
|
||||
@patch("services.recommended_app_service.dify_config", autospec=True)
|
||||
def test_get_recommended_apps_fallback_to_builtin_when_empty(self, mock_config, mock_factory_class, factory):
|
||||
"""Test fallback to builtin when no recommended apps are returned."""
|
||||
# Arrange
|
||||
mock_config.HOSTED_FETCH_APP_TEMPLATES_MODE = "remote"
|
||||
|
||||
# Remote returns empty recommended_apps
|
||||
empty_response = {"recommended_apps": [], "categories": []}
|
||||
|
||||
# Builtin fallback response
|
||||
builtin_response = factory.create_recommended_apps_response(
|
||||
recommended_apps=[{"id": "builtin-1", "name": "Builtin App", "category": "default"}]
|
||||
)
|
||||
|
||||
# Mock remote retrieval instance (returns empty)
|
||||
mock_remote_instance = MagicMock()
|
||||
mock_remote_instance.get_recommended_apps_and_categories.return_value = empty_response
|
||||
|
||||
mock_remote_factory = MagicMock()
|
||||
mock_remote_factory.return_value = mock_remote_instance
|
||||
mock_factory_class.get_recommend_app_factory.return_value = mock_remote_factory
|
||||
|
||||
# Mock builtin retrieval instance
|
||||
mock_builtin_instance = MagicMock()
|
||||
mock_builtin_instance.fetch_recommended_apps_from_builtin.return_value = builtin_response
|
||||
mock_factory_class.get_buildin_recommend_app_retrieval.return_value = mock_builtin_instance
|
||||
|
||||
# Act
|
||||
result = RecommendedAppService.get_recommended_apps_and_categories("zh-CN")
|
||||
|
||||
# Assert
|
||||
assert result == builtin_response
|
||||
assert len(result["recommended_apps"]) == 1
|
||||
assert result["recommended_apps"][0]["id"] == "builtin-1"
|
||||
# Verify fallback was called with en-US (hardcoded)
|
||||
mock_builtin_instance.fetch_recommended_apps_from_builtin.assert_called_once_with("en-US")
|
||||
|
||||
@patch("services.recommended_app_service.RecommendAppRetrievalFactory", autospec=True)
|
||||
@patch("services.recommended_app_service.dify_config", autospec=True)
|
||||
def test_get_recommended_apps_fallback_when_none_recommended_apps(self, mock_config, mock_factory_class, factory):
|
||||
"""Test fallback when recommended_apps key is None."""
|
||||
# Arrange
|
||||
mock_config.HOSTED_FETCH_APP_TEMPLATES_MODE = "db"
|
||||
|
||||
# Response with None recommended_apps
|
||||
none_response = {"recommended_apps": None, "categories": ["test"]}
|
||||
|
||||
# Builtin fallback response
|
||||
builtin_response = factory.create_recommended_apps_response()
|
||||
|
||||
# Mock db retrieval instance (returns None)
|
||||
mock_db_instance = MagicMock()
|
||||
mock_db_instance.get_recommended_apps_and_categories.return_value = none_response
|
||||
|
||||
mock_db_factory = MagicMock()
|
||||
mock_db_factory.return_value = mock_db_instance
|
||||
mock_factory_class.get_recommend_app_factory.return_value = mock_db_factory
|
||||
|
||||
# Mock builtin retrieval instance
|
||||
mock_builtin_instance = MagicMock()
|
||||
mock_builtin_instance.fetch_recommended_apps_from_builtin.return_value = builtin_response
|
||||
mock_factory_class.get_buildin_recommend_app_retrieval.return_value = mock_builtin_instance
|
||||
|
||||
# Act
|
||||
result = RecommendedAppService.get_recommended_apps_and_categories("en-US")
|
||||
|
||||
# Assert
|
||||
assert result == builtin_response
|
||||
mock_builtin_instance.fetch_recommended_apps_from_builtin.assert_called_once()
|
||||
|
||||
@patch("services.recommended_app_service.RecommendAppRetrievalFactory", autospec=True)
|
||||
@patch("services.recommended_app_service.dify_config", autospec=True)
|
||||
def test_get_recommended_apps_with_different_languages(self, mock_config, mock_factory_class, factory):
|
||||
"""Test retrieval with different language codes."""
|
||||
# Arrange
|
||||
mock_config.HOSTED_FETCH_APP_TEMPLATES_MODE = "builtin"
|
||||
|
||||
languages = ["en-US", "zh-CN", "ja-JP", "fr-FR"]
|
||||
|
||||
for language in languages:
|
||||
# Create language-specific response
|
||||
lang_response = factory.create_recommended_apps_response(
|
||||
recommended_apps=[{"id": f"app-{language}", "name": f"App {language}", "category": "test"}]
|
||||
)
|
||||
|
||||
# Mock retrieval instance
|
||||
mock_instance = MagicMock()
|
||||
mock_instance.get_recommended_apps_and_categories.return_value = lang_response
|
||||
|
||||
mock_factory = MagicMock()
|
||||
mock_factory.return_value = mock_instance
|
||||
mock_factory_class.get_recommend_app_factory.return_value = mock_factory
|
||||
|
||||
# Act
|
||||
result = RecommendedAppService.get_recommended_apps_and_categories(language)
|
||||
|
||||
# Assert
|
||||
assert result["recommended_apps"][0]["id"] == f"app-{language}"
|
||||
mock_instance.get_recommended_apps_and_categories.assert_called_with(language)
|
||||
|
||||
@patch("services.recommended_app_service.RecommendAppRetrievalFactory", autospec=True)
|
||||
@patch("services.recommended_app_service.dify_config", autospec=True)
|
||||
def test_get_recommended_apps_uses_correct_factory_mode(self, mock_config, mock_factory_class, factory):
|
||||
"""Test that correct factory is selected based on mode."""
|
||||
# Arrange
|
||||
modes = ["remote", "builtin", "db"]
|
||||
|
||||
for mode in modes:
|
||||
mock_config.HOSTED_FETCH_APP_TEMPLATES_MODE = mode
|
||||
|
||||
response = factory.create_recommended_apps_response()
|
||||
|
||||
# Mock retrieval instance
|
||||
mock_instance = MagicMock()
|
||||
mock_instance.get_recommended_apps_and_categories.return_value = response
|
||||
|
||||
mock_factory = MagicMock()
|
||||
mock_factory.return_value = mock_instance
|
||||
mock_factory_class.get_recommend_app_factory.return_value = mock_factory
|
||||
|
||||
# Act
|
||||
RecommendedAppService.get_recommended_apps_and_categories("en-US")
|
||||
|
||||
# Assert
|
||||
mock_factory_class.get_recommend_app_factory.assert_called_with(mode)
|
||||
|
||||
|
||||
class TestRecommendedAppServiceGetDetail:
|
||||
"""Test get_recommend_app_detail operations."""
|
||||
|
||||
@patch("services.recommended_app_service.RecommendAppRetrievalFactory", autospec=True)
|
||||
@patch("services.recommended_app_service.dify_config", autospec=True)
|
||||
def test_get_recommend_app_detail_success(self, mock_config, mock_factory_class, factory):
|
||||
"""Test successful retrieval of app detail."""
|
||||
# Arrange
|
||||
mock_config.HOSTED_FETCH_APP_TEMPLATES_MODE = "remote"
|
||||
app_id = "app-123"
|
||||
|
||||
expected_detail = factory.create_app_detail_response(
|
||||
app_id=app_id,
|
||||
name="Productivity App",
|
||||
description="A great productivity app",
|
||||
category="productivity",
|
||||
)
|
||||
|
||||
# Mock retrieval instance
|
||||
mock_instance = MagicMock()
|
||||
mock_instance.get_recommend_app_detail.return_value = expected_detail
|
||||
|
||||
mock_factory = MagicMock()
|
||||
mock_factory.return_value = mock_instance
|
||||
mock_factory_class.get_recommend_app_factory.return_value = mock_factory
|
||||
|
||||
# Act
|
||||
result = _recommendation_detail(RecommendedAppService.get_recommend_app_detail(app_id))
|
||||
|
||||
# Assert
|
||||
assert result == expected_detail
|
||||
assert result["id"] == app_id
|
||||
assert result["name"] == "Productivity App"
|
||||
mock_instance.get_recommend_app_detail.assert_called_once_with(app_id)
|
||||
|
||||
@patch("services.recommended_app_service.RecommendAppRetrievalFactory", autospec=True)
|
||||
@patch("services.recommended_app_service.dify_config", autospec=True)
|
||||
def test_get_recommend_app_detail_with_different_modes(self, mock_config, mock_factory_class, factory):
|
||||
"""Test app detail retrieval with different factory modes."""
|
||||
# Arrange
|
||||
modes = ["remote", "builtin", "db"]
|
||||
app_id = "test-app"
|
||||
|
||||
for mode in modes:
|
||||
mock_config.HOSTED_FETCH_APP_TEMPLATES_MODE = mode
|
||||
|
||||
detail = factory.create_app_detail_response(app_id=app_id, name=f"App from {mode}")
|
||||
|
||||
# Mock retrieval instance
|
||||
mock_instance = MagicMock()
|
||||
mock_instance.get_recommend_app_detail.return_value = detail
|
||||
|
||||
mock_factory = MagicMock()
|
||||
mock_factory.return_value = mock_instance
|
||||
mock_factory_class.get_recommend_app_factory.return_value = mock_factory
|
||||
|
||||
# Act
|
||||
result = _recommendation_detail(RecommendedAppService.get_recommend_app_detail(app_id))
|
||||
|
||||
# Assert
|
||||
assert result["name"] == f"App from {mode}"
|
||||
mock_factory_class.get_recommend_app_factory.assert_called_with(mode)
|
||||
|
||||
@patch("services.recommended_app_service.RecommendAppRetrievalFactory", autospec=True)
|
||||
@patch("services.recommended_app_service.dify_config", autospec=True)
|
||||
def test_get_recommend_app_detail_returns_none_when_not_found(self, mock_config, mock_factory_class, factory):
|
||||
"""Test that None is returned when app is not found."""
|
||||
# Arrange
|
||||
mock_config.HOSTED_FETCH_APP_TEMPLATES_MODE = "remote"
|
||||
app_id = "nonexistent-app"
|
||||
|
||||
# Mock retrieval instance returning None
|
||||
mock_instance = MagicMock()
|
||||
mock_instance.get_recommend_app_detail.return_value = None
|
||||
|
||||
mock_factory = MagicMock()
|
||||
mock_factory.return_value = mock_instance
|
||||
mock_factory_class.get_recommend_app_factory.return_value = mock_factory
|
||||
|
||||
# Act
|
||||
result = _recommendation_detail(RecommendedAppService.get_recommend_app_detail(app_id))
|
||||
|
||||
# Assert
|
||||
assert result is None
|
||||
mock_instance.get_recommend_app_detail.assert_called_once_with(app_id)
|
||||
|
||||
@patch("services.recommended_app_service.RecommendAppRetrievalFactory", autospec=True)
|
||||
@patch("services.recommended_app_service.dify_config", autospec=True)
|
||||
def test_get_recommend_app_detail_returns_empty_dict(self, mock_config, mock_factory_class, factory):
|
||||
"""Test handling of empty dict response."""
|
||||
# Arrange
|
||||
mock_config.HOSTED_FETCH_APP_TEMPLATES_MODE = "builtin"
|
||||
app_id = "app-empty"
|
||||
|
||||
# Mock retrieval instance returning empty dict
|
||||
mock_instance = MagicMock()
|
||||
mock_instance.get_recommend_app_detail.return_value = {}
|
||||
|
||||
mock_factory = MagicMock()
|
||||
mock_factory.return_value = mock_instance
|
||||
mock_factory_class.get_recommend_app_factory.return_value = mock_factory
|
||||
|
||||
# Act
|
||||
result = _recommendation_detail(RecommendedAppService.get_recommend_app_detail(app_id))
|
||||
|
||||
# Assert
|
||||
assert result == {}
|
||||
|
||||
@patch("services.recommended_app_service.RecommendAppRetrievalFactory", autospec=True)
|
||||
@patch("services.recommended_app_service.dify_config", autospec=True)
|
||||
def test_get_recommend_app_detail_with_complex_model_config(self, mock_config, mock_factory_class, factory):
|
||||
"""Test app detail with complex model configuration."""
|
||||
# Arrange
|
||||
mock_config.HOSTED_FETCH_APP_TEMPLATES_MODE = "remote"
|
||||
app_id = "complex-app"
|
||||
|
||||
complex_model_config = {
|
||||
"provider": "openai",
|
||||
"model": "gpt-4",
|
||||
"parameters": {
|
||||
"temperature": 0.7,
|
||||
"max_tokens": 2000,
|
||||
"top_p": 1.0,
|
||||
},
|
||||
}
|
||||
|
||||
expected_detail = factory.create_app_detail_response(
|
||||
app_id=app_id,
|
||||
name="Complex App",
|
||||
model_config=complex_model_config,
|
||||
workflows=["workflow-1", "workflow-2"],
|
||||
tools=["tool-1", "tool-2", "tool-3"],
|
||||
)
|
||||
|
||||
# Mock retrieval instance
|
||||
mock_instance = MagicMock()
|
||||
mock_instance.get_recommend_app_detail.return_value = expected_detail
|
||||
|
||||
mock_factory = MagicMock()
|
||||
mock_factory.return_value = mock_instance
|
||||
mock_factory_class.get_recommend_app_factory.return_value = mock_factory
|
||||
|
||||
# Act
|
||||
result = _recommendation_detail(RecommendedAppService.get_recommend_app_detail(app_id))
|
||||
|
||||
# Assert
|
||||
assert result["model_config"] == complex_model_config
|
||||
assert len(result["workflows"]) == 2
|
||||
assert len(result["tools"]) == 3
|
||||
|
||||
|
||||
# === Merged from test_recommended_app_service_additional.py ===
|
||||
|
||||
|
||||
from types import SimpleNamespace
|
||||
from typing import Any, cast
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
|
||||
from services import recommended_app_service as service_module
|
||||
from services.recommended_app_service import RecommendedAppService
|
||||
|
||||
|
||||
def _recommendation_detail(result: dict[str, Any] | None) -> dict[str, Any]:
|
||||
return cast(dict[str, Any], result)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mocked_db_session(monkeypatch: pytest.MonkeyPatch) -> MagicMock:
|
||||
# Arrange
|
||||
session = MagicMock()
|
||||
monkeypatch.setattr(service_module, "db", SimpleNamespace(session=session))
|
||||
|
||||
# Assert
|
||||
return session
|
||||
|
||||
|
||||
def _mock_factory_for_apps(
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
*,
|
||||
mode: str,
|
||||
result: dict[str, Any],
|
||||
fallback_result: dict[str, Any] | None = None,
|
||||
) -> tuple[MagicMock, MagicMock]:
|
||||
retrieval_instance = MagicMock()
|
||||
retrieval_instance.get_recommended_apps_and_categories.return_value = result
|
||||
retrieval_factory = MagicMock(return_value=retrieval_instance)
|
||||
monkeypatch.setattr(service_module.dify_config, "HOSTED_FETCH_APP_TEMPLATES_MODE", mode, raising=False)
|
||||
monkeypatch.setattr(
|
||||
service_module.RecommendAppRetrievalFactory,
|
||||
"get_recommend_app_factory",
|
||||
MagicMock(return_value=retrieval_factory),
|
||||
)
|
||||
|
||||
builtin_instance = MagicMock()
|
||||
if fallback_result is not None:
|
||||
builtin_instance.fetch_recommended_apps_from_builtin.return_value = fallback_result
|
||||
monkeypatch.setattr(
|
||||
service_module.RecommendAppRetrievalFactory,
|
||||
"get_buildin_recommend_app_retrieval",
|
||||
MagicMock(return_value=builtin_instance),
|
||||
)
|
||||
return retrieval_instance, builtin_instance
|
||||
|
||||
|
||||
def test_get_recommended_apps_and_categories_should_not_query_trial_table_when_trial_feature_disabled(
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
mocked_db_session: MagicMock,
|
||||
) -> None:
|
||||
# Arrange
|
||||
expected = {"recommended_apps": [{"app_id": "app-1"}], "categories": ["all"]}
|
||||
retrieval_instance, builtin_instance = _mock_factory_for_apps(
|
||||
monkeypatch,
|
||||
mode="remote",
|
||||
result=expected,
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
service_module.FeatureService,
|
||||
"get_system_features",
|
||||
MagicMock(return_value=SimpleNamespace(enable_trial_app=False)),
|
||||
)
|
||||
|
||||
# Act
|
||||
result = RecommendedAppService.get_recommended_apps_and_categories("en-US")
|
||||
|
||||
# Assert
|
||||
assert result == expected
|
||||
retrieval_instance.get_recommended_apps_and_categories.assert_called_once_with("en-US")
|
||||
builtin_instance.fetch_recommended_apps_from_builtin.assert_not_called()
|
||||
mocked_db_session.scalar.assert_not_called()
|
||||
|
||||
|
||||
def test_get_recommended_apps_and_categories_should_fallback_and_enrich_can_trial_when_trial_feature_enabled(
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
mocked_db_session: MagicMock,
|
||||
) -> None:
|
||||
# Arrange
|
||||
remote_result = {"recommended_apps": [], "categories": []}
|
||||
fallback_result = {"recommended_apps": [{"app_id": "app-1"}, {"app_id": "app-2"}], "categories": ["all"]}
|
||||
_, builtin_instance = _mock_factory_for_apps(
|
||||
monkeypatch,
|
||||
mode="remote",
|
||||
result=remote_result,
|
||||
fallback_result=fallback_result,
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
service_module.FeatureService,
|
||||
"get_system_features",
|
||||
MagicMock(return_value=SimpleNamespace(enable_trial_app=True)),
|
||||
)
|
||||
mocked_db_session.scalar.side_effect = [SimpleNamespace(id="trial-app"), None]
|
||||
|
||||
# Act
|
||||
result = RecommendedAppService.get_recommended_apps_and_categories("ja-JP")
|
||||
|
||||
# Assert
|
||||
builtin_instance.fetch_recommended_apps_from_builtin.assert_called_once_with("en-US")
|
||||
assert result["recommended_apps"][0]["can_trial"] is True
|
||||
assert result["recommended_apps"][1]["can_trial"] is False
|
||||
assert mocked_db_session.scalar.call_count == 2
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("trial_query_result", "expected_can_trial"),
|
||||
[
|
||||
(SimpleNamespace(id="trial"), True),
|
||||
(None, False),
|
||||
],
|
||||
)
|
||||
def test_get_recommend_app_detail_should_set_can_trial_when_trial_feature_enabled(
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
mocked_db_session: MagicMock,
|
||||
trial_query_result: Any,
|
||||
expected_can_trial: bool,
|
||||
) -> None:
|
||||
# Arrange
|
||||
detail = {"id": "app-1", "name": "Test App"}
|
||||
retrieval_instance = MagicMock()
|
||||
retrieval_instance.get_recommend_app_detail.return_value = detail
|
||||
retrieval_factory = MagicMock(return_value=retrieval_instance)
|
||||
monkeypatch.setattr(service_module.dify_config, "HOSTED_FETCH_APP_TEMPLATES_MODE", "remote", raising=False)
|
||||
monkeypatch.setattr(
|
||||
service_module.RecommendAppRetrievalFactory,
|
||||
"get_recommend_app_factory",
|
||||
MagicMock(return_value=retrieval_factory),
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
service_module.FeatureService,
|
||||
"get_system_features",
|
||||
MagicMock(return_value=SimpleNamespace(enable_trial_app=True)),
|
||||
)
|
||||
mocked_db_session.scalar.return_value = trial_query_result
|
||||
|
||||
# Act
|
||||
result = cast(dict[str, Any], RecommendedAppService.get_recommend_app_detail("app-1"))
|
||||
|
||||
# Assert
|
||||
assert result["id"] == "app-1"
|
||||
assert result["can_trial"] is expected_can_trial
|
||||
mocked_db_session.scalar.assert_called_once()
|
||||
|
||||
|
||||
def test_add_trial_app_record_should_increment_count_when_existing_record_found(
|
||||
mocked_db_session: MagicMock,
|
||||
) -> None:
|
||||
# Arrange
|
||||
existing_record = SimpleNamespace(count=3)
|
||||
mocked_db_session.scalar.return_value = existing_record
|
||||
|
||||
# Act
|
||||
RecommendedAppService.add_trial_app_record("app-1", "account-1")
|
||||
|
||||
# Assert
|
||||
assert existing_record.count == 4
|
||||
mocked_db_session.scalar.assert_called_once()
|
||||
mocked_db_session.commit.assert_called_once()
|
||||
mocked_db_session.add.assert_not_called()
|
||||
|
||||
|
||||
def test_add_trial_app_record_should_create_new_record_when_no_existing_record(
|
||||
mocked_db_session: MagicMock,
|
||||
) -> None:
|
||||
# Arrange
|
||||
mocked_db_session.scalar.return_value = None
|
||||
|
||||
# Act
|
||||
RecommendedAppService.add_trial_app_record("app-2", "account-2")
|
||||
|
||||
# Assert
|
||||
mocked_db_session.scalar.assert_called_once()
|
||||
mocked_db_session.add.assert_called_once()
|
||||
added = mocked_db_session.add.call_args.args[0]
|
||||
assert added.app_id == "app-2"
|
||||
assert added.account_id == "account-2"
|
||||
assert added.count == 1
|
||||
mocked_db_session.commit.assert_called_once()
|
||||
@ -63,6 +63,12 @@ def mock_session(mocker: MockerFixture) -> MagicMock:
|
||||
mock_session_cm.__enter__.return_value = mock_session_instance
|
||||
mock_session_cm.__exit__.return_value = False
|
||||
mocker.patch("services.trigger.trigger_provider_service.Session", return_value=mock_session_cm)
|
||||
mock_begin_cm = MagicMock()
|
||||
mock_begin_cm.__enter__.return_value = mock_session_instance
|
||||
mock_begin_cm.__exit__.return_value = False
|
||||
mock_sessionmaker_instance = MagicMock()
|
||||
mock_sessionmaker_instance.begin.return_value = mock_begin_cm
|
||||
mocker.patch("services.trigger.trigger_provider_service.sessionmaker", return_value=mock_sessionmaker_instance)
|
||||
return mock_session_instance
|
||||
|
||||
|
||||
@ -212,7 +218,6 @@ def test_add_trigger_subscription_should_create_subscription_successfully_for_ap
|
||||
# Assert
|
||||
assert result["result"] == "success"
|
||||
mock_session.add.assert_called_once()
|
||||
mock_session.commit.assert_called_once()
|
||||
|
||||
|
||||
def test_add_trigger_subscription_should_store_empty_credentials_for_unauthorized_type(
|
||||
@ -406,7 +411,7 @@ def test_update_trigger_subscription_should_update_fields_and_clear_cache(
|
||||
assert subscription.credentials == {"api_key": "new-key"}
|
||||
assert subscription.credential_expires_at == 100
|
||||
assert subscription.expires_at == 200
|
||||
mock_session.commit.assert_called_once()
|
||||
|
||||
mock_delete_cache.assert_called_once()
|
||||
|
||||
|
||||
@ -593,7 +598,7 @@ def test_refresh_oauth_token_should_refresh_and_persist_new_credentials(
|
||||
assert result == {"result": "success", "expires_at": 12345}
|
||||
assert subscription.credentials == {"access_token": "new"}
|
||||
assert subscription.credential_expires_at == 12345
|
||||
mock_session.commit.assert_called_once()
|
||||
|
||||
cache.delete.assert_called_once()
|
||||
|
||||
|
||||
@ -664,7 +669,7 @@ def test_refresh_subscription_should_refresh_and_persist_properties(
|
||||
assert result == {"result": "success", "expires_at": 999}
|
||||
assert subscription.properties == {"p": "new-enc"}
|
||||
assert subscription.expires_at == 999
|
||||
mock_session.commit.assert_called_once()
|
||||
|
||||
prop_cache.delete.assert_called_once()
|
||||
|
||||
|
||||
@ -838,7 +843,6 @@ def test_save_custom_oauth_client_params_should_create_record_and_clear_params_w
|
||||
assert fake_model.encrypted_oauth_params == "{}"
|
||||
assert fake_model.enabled is True
|
||||
mock_session.add.assert_called_once_with(fake_model)
|
||||
mock_session.commit.assert_called_once()
|
||||
|
||||
|
||||
def test_save_custom_oauth_client_params_should_merge_hidden_values_and_delete_cache(
|
||||
@ -870,7 +874,6 @@ def test_save_custom_oauth_client_params_should_merge_hidden_values_and_delete_c
|
||||
assert result == {"result": "success"}
|
||||
assert json.loads(custom_client.encrypted_oauth_params) == {"client_id": "new-id"}
|
||||
cache.delete.assert_called_once()
|
||||
mock_session.commit.assert_called_once()
|
||||
|
||||
|
||||
def test_get_custom_oauth_client_params_should_return_empty_when_record_missing(
|
||||
@ -921,7 +924,6 @@ def test_delete_custom_oauth_client_params_should_delete_record_and_commit(
|
||||
|
||||
# Assert
|
||||
assert result == {"result": "success"}
|
||||
mock_session.commit.assert_called_once()
|
||||
|
||||
|
||||
@pytest.mark.parametrize("exists", [True, False])
|
||||
|
||||
@ -617,6 +617,20 @@ class _SessionContext:
|
||||
return False
|
||||
|
||||
|
||||
class _SessionmakerContext:
|
||||
def __init__(self, session: Any) -> None:
|
||||
self._session = session
|
||||
|
||||
def begin(self) -> "_SessionmakerContext":
|
||||
return self
|
||||
|
||||
def __enter__(self) -> Any:
|
||||
return self._session
|
||||
|
||||
def __exit__(self, exc_type: Any, exc: Any, tb: Any) -> bool:
|
||||
return False
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def flask_app() -> Flask:
|
||||
return Flask(__name__)
|
||||
@ -625,6 +639,7 @@ def flask_app() -> Flask:
|
||||
def _patch_session(monkeypatch: pytest.MonkeyPatch, session: Any) -> None:
|
||||
monkeypatch.setattr(service_module, "db", SimpleNamespace(engine=MagicMock(), session=MagicMock()))
|
||||
monkeypatch.setattr(service_module, "Session", lambda *args, **kwargs: _SessionContext(session))
|
||||
monkeypatch.setattr(service_module, "sessionmaker", lambda *args, **kwargs: _SessionmakerContext(session))
|
||||
|
||||
|
||||
def _workflow_trigger(**kwargs: Any) -> WorkflowWebhookTrigger:
|
||||
@ -1241,7 +1256,6 @@ def test_sync_webhook_relationships_should_create_missing_records_and_delete_sta
|
||||
# Assert
|
||||
assert len(fake_session.added) == 1
|
||||
assert len(fake_session.deleted) == 1
|
||||
assert fake_session.commit_count == 2
|
||||
redis_set_mock.assert_called_once()
|
||||
redis_delete_mock.assert_called_once()
|
||||
lock.release.assert_called_once()
|
||||
|
||||
60
api/uv.lock
generated
60
api/uv.lock
generated
@ -1137,41 +1137,41 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "cryptography"
|
||||
version = "46.0.6"
|
||||
version = "46.0.7"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "cffi", marker = "platform_python_implementation != 'PyPy'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/a4/ba/04b1bd4218cbc58dc90ce967106d51582371b898690f3ae0402876cc4f34/cryptography-46.0.6.tar.gz", hash = "sha256:27550628a518c5c6c903d84f637fbecf287f6cb9ced3804838a1295dc1fd0759", size = 750542, upload-time = "2026-03-25T23:34:53.396Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/47/93/ac8f3d5ff04d54bc814e961a43ae5b0b146154c89c61b47bb07557679b18/cryptography-46.0.7.tar.gz", hash = "sha256:e4cfd68c5f3e0bfdad0d38e023239b96a2fe84146481852dffbcca442c245aa5", size = 750652, upload-time = "2026-04-08T01:57:54.692Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/47/23/9285e15e3bc57325b0a72e592921983a701efc1ee8f91c06c5f0235d86d9/cryptography-46.0.6-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:64235194bad039a10bb6d2d930ab3323baaec67e2ce36215fd0952fad0930ca8", size = 7176401, upload-time = "2026-03-25T23:33:22.096Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/60/f8/e61f8f13950ab6195b31913b42d39f0f9afc7d93f76710f299b5ec286ae6/cryptography-46.0.6-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:26031f1e5ca62fcb9d1fcb34b2b60b390d1aacaa15dc8b895a9ed00968b97b30", size = 4275275, upload-time = "2026-03-25T23:33:23.844Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/19/69/732a736d12c2631e140be2348b4ad3d226302df63ef64d30dfdb8db7ad1c/cryptography-46.0.6-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9a693028b9cbe51b5a1136232ee8f2bc242e4e19d456ded3fa7c86e43c713b4a", size = 4425320, upload-time = "2026-03-25T23:33:25.703Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d4/12/123be7292674abf76b21ac1fc0e1af50661f0e5b8f0ec8285faac18eb99e/cryptography-46.0.6-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:67177e8a9f421aa2d3a170c3e56eca4e0128883cf52a071a7cbf53297f18b175", size = 4278082, upload-time = "2026-03-25T23:33:27.423Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5b/ba/d5e27f8d68c24951b0a484924a84c7cdaed7502bac9f18601cd357f8b1d2/cryptography-46.0.6-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:d9528b535a6c4f8ff37847144b8986a9a143585f0540fbcb1a98115b543aa463", size = 4926514, upload-time = "2026-03-25T23:33:29.206Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/34/71/1ea5a7352ae516d5512d17babe7e1b87d9db5150b21f794b1377eac1edc0/cryptography-46.0.6-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:22259338084d6ae497a19bae5d4c66b7ca1387d3264d1c2c0e72d9e9b6a77b97", size = 4457766, upload-time = "2026-03-25T23:33:30.834Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/01/59/562be1e653accee4fdad92c7a2e88fced26b3fdfce144047519bbebc299e/cryptography-46.0.6-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:760997a4b950ff00d418398ad73fbc91aa2894b5c1db7ccb45b4f68b42a63b3c", size = 3986535, upload-time = "2026-03-25T23:33:33.02Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d6/8b/b1ebfeb788bf4624d36e45ed2662b8bd43a05ff62157093c1539c1288a18/cryptography-46.0.6-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:3dfa6567f2e9e4c5dceb8ccb5a708158a2a871052fa75c8b78cb0977063f1507", size = 4277618, upload-time = "2026-03-25T23:33:34.567Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/dd/52/a005f8eabdb28df57c20f84c44d397a755782d6ff6d455f05baa2785bd91/cryptography-46.0.6-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:cdcd3edcbc5d55757e5f5f3d330dd00007ae463a7e7aa5bf132d1f22a4b62b19", size = 4890802, upload-time = "2026-03-25T23:33:37.034Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ec/4d/8e7d7245c79c617d08724e2efa397737715ca0ec830ecb3c91e547302555/cryptography-46.0.6-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:d4e4aadb7fc1f88687f47ca20bb7227981b03afaae69287029da08096853b738", size = 4457425, upload-time = "2026-03-25T23:33:38.904Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1d/5c/f6c3596a1430cec6f949085f0e1a970638d76f81c3ea56d93d564d04c340/cryptography-46.0.6-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2b417edbe8877cda9022dde3a008e2deb50be9c407eef034aeeb3a8b11d9db3c", size = 4405530, upload-time = "2026-03-25T23:33:40.842Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7e/c9/9f9cea13ee2dbde070424e0c4f621c091a91ffcc504ffea5e74f0e1daeff/cryptography-46.0.6-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:380343e0653b1c9d7e1f55b52aaa2dbb2fdf2730088d48c43ca1c7c0abb7cc2f", size = 4667896, upload-time = "2026-03-25T23:33:42.781Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ad/b5/1895bc0821226f129bc74d00eccfc6a5969e2028f8617c09790bf89c185e/cryptography-46.0.6-cp311-abi3-win32.whl", hash = "sha256:bcb87663e1f7b075e48c3be3ecb5f0b46c8fc50b50a97cf264e7f60242dca3f2", size = 3026348, upload-time = "2026-03-25T23:33:45.021Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c3/f8/c9bcbf0d3e6ad288b9d9aa0b1dee04b063d19e8c4f871855a03ab3a297ab/cryptography-46.0.6-cp311-abi3-win_amd64.whl", hash = "sha256:6739d56300662c468fddb0e5e291f9b4d084bead381667b9e654c7dd81705124", size = 3483896, upload-time = "2026-03-25T23:33:46.649Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c4/cc/f330e982852403da79008552de9906804568ae9230da8432f7496ce02b71/cryptography-46.0.6-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:12cae594e9473bca1a7aceb90536060643128bb274fcea0fc459ab90f7d1ae7a", size = 7162776, upload-time = "2026-03-25T23:34:13.308Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/49/b3/dc27efd8dcc4bff583b3f01d4a3943cd8b5821777a58b3a6a5f054d61b79/cryptography-46.0.6-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:639301950939d844a9e1c4464d7e07f902fe9a7f6b215bb0d4f28584729935d8", size = 4270529, upload-time = "2026-03-25T23:34:15.019Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e6/05/e8d0e6eb4f0d83365b3cb0e00eb3c484f7348db0266652ccd84632a3d58d/cryptography-46.0.6-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ed3775295fb91f70b4027aeba878d79b3e55c0b3e97eaa4de71f8f23a9f2eb77", size = 4414827, upload-time = "2026-03-25T23:34:16.604Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2f/97/daba0f5d2dc6d855e2dcb70733c812558a7977a55dd4a6722756628c44d1/cryptography-46.0.6-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:8927ccfbe967c7df312ade694f987e7e9e22b2425976ddbf28271d7e58845290", size = 4271265, upload-time = "2026-03-25T23:34:18.586Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/89/06/fe1fce39a37ac452e58d04b43b0855261dac320a2ebf8f5260dd55b201a9/cryptography-46.0.6-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:b12c6b1e1651e42ab5de8b1e00dc3b6354fdfd778e7fa60541ddacc27cd21410", size = 4916800, upload-time = "2026-03-25T23:34:20.561Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ff/8a/b14f3101fe9c3592603339eb5d94046c3ce5f7fc76d6512a2d40efd9724e/cryptography-46.0.6-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:063b67749f338ca9c5a0b7fe438a52c25f9526b851e24e6c9310e7195aad3b4d", size = 4448771, upload-time = "2026-03-25T23:34:22.406Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/01/b3/0796998056a66d1973fd52ee89dc1bb3b6581960a91ad4ac705f182d398f/cryptography-46.0.6-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:02fad249cb0e090b574e30b276a3da6a149e04ee2f049725b1f69e7b8351ec70", size = 3978333, upload-time = "2026-03-25T23:34:24.281Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c5/3d/db200af5a4ffd08918cd55c08399dc6c9c50b0bc72c00a3246e099d3a849/cryptography-46.0.6-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:7e6142674f2a9291463e5e150090b95a8519b2fb6e6aaec8917dd8d094ce750d", size = 4271069, upload-time = "2026-03-25T23:34:25.895Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d7/18/61acfd5b414309d74ee838be321c636fe71815436f53c9f0334bf19064fa/cryptography-46.0.6-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:456b3215172aeefb9284550b162801d62f5f264a081049a3e94307fe20792cfa", size = 4878358, upload-time = "2026-03-25T23:34:27.67Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8b/65/5bf43286d566f8171917cae23ac6add941654ccf085d739195a4eacf1674/cryptography-46.0.6-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:341359d6c9e68834e204ceaf25936dffeafea3829ab80e9503860dcc4f4dac58", size = 4448061, upload-time = "2026-03-25T23:34:29.375Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e0/25/7e49c0fa7205cf3597e525d156a6bce5b5c9de1fd7e8cb01120e459f205a/cryptography-46.0.6-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9a9c42a2723999a710445bc0d974e345c32adfd8d2fac6d8a251fa829ad31cfb", size = 4399103, upload-time = "2026-03-25T23:34:32.036Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/44/46/466269e833f1c4718d6cd496ffe20c56c9c8d013486ff66b4f69c302a68d/cryptography-46.0.6-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6617f67b1606dfd9fe4dbfa354a9508d4a6d37afe30306fe6c101b7ce3274b72", size = 4659255, upload-time = "2026-03-25T23:34:33.679Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0a/09/ddc5f630cc32287d2c953fc5d32705e63ec73e37308e5120955316f53827/cryptography-46.0.6-cp38-abi3-win32.whl", hash = "sha256:7f6690b6c55e9c5332c0b59b9c8a3fb232ebf059094c17f9019a51e9827df91c", size = 3010660, upload-time = "2026-03-25T23:34:35.418Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1b/82/ca4893968aeb2709aacfb57a30dec6fa2ab25b10fa9f064b8882ce33f599/cryptography-46.0.6-cp38-abi3-win_amd64.whl", hash = "sha256:79e865c642cfc5c0b3eb12af83c35c5aeff4fa5c672dc28c43721c2c9fdd2f0f", size = 3471160, upload-time = "2026-03-25T23:34:37.191Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0b/5d/4a8f770695d73be252331e60e526291e3df0c9b27556a90a6b47bccca4c2/cryptography-46.0.7-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:ea42cbe97209df307fdc3b155f1b6fa2577c0defa8f1f7d3be7d31d189108ad4", size = 7179869, upload-time = "2026-04-08T01:56:17.157Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5f/45/6d80dc379b0bbc1f9d1e429f42e4cb9e1d319c7a8201beffd967c516ea01/cryptography-46.0.7-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b36a4695e29fe69215d75960b22577197aca3f7a25b9cf9d165dcfe9d80bc325", size = 4275492, upload-time = "2026-04-08T01:56:19.36Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4a/9a/1765afe9f572e239c3469f2cb429f3ba7b31878c893b246b4b2994ffe2fe/cryptography-46.0.7-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5ad9ef796328c5e3c4ceed237a183f5d41d21150f972455a9d926593a1dcb308", size = 4426670, upload-time = "2026-04-08T01:56:21.415Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8f/3e/af9246aaf23cd4ee060699adab1e47ced3f5f7e7a8ffdd339f817b446462/cryptography-46.0.7-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:73510b83623e080a2c35c62c15298096e2a5dc8d51c3b4e1740211839d0dea77", size = 4280275, upload-time = "2026-04-08T01:56:23.539Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0f/54/6bbbfc5efe86f9d71041827b793c24811a017c6ac0fd12883e4caa86b8ed/cryptography-46.0.7-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:cbd5fb06b62bd0721e1170273d3f4d5a277044c47ca27ee257025146c34cbdd1", size = 4928402, upload-time = "2026-04-08T01:56:25.624Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2d/cf/054b9d8220f81509939599c8bdbc0c408dbd2bdd41688616a20731371fe0/cryptography-46.0.7-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:420b1e4109cc95f0e5700eed79908cef9268265c773d3a66f7af1eef53d409ef", size = 4459985, upload-time = "2026-04-08T01:56:27.309Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f9/46/4e4e9c6040fb01c7467d47217d2f882daddeb8828f7df800cb806d8a2288/cryptography-46.0.7-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:24402210aa54baae71d99441d15bb5a1919c195398a87b563df84468160a65de", size = 3990652, upload-time = "2026-04-08T01:56:29.095Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/36/5f/313586c3be5a2fbe87e4c9a254207b860155a8e1f3cca99f9910008e7d08/cryptography-46.0.7-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:8a469028a86f12eb7d2fe97162d0634026d92a21f3ae0ac87ed1c4a447886c83", size = 4279805, upload-time = "2026-04-08T01:56:30.928Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/69/33/60dfc4595f334a2082749673386a4d05e4f0cf4df8248e63b2c3437585f2/cryptography-46.0.7-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:9694078c5d44c157ef3162e3bf3946510b857df5a3955458381d1c7cfc143ddb", size = 4892883, upload-time = "2026-04-08T01:56:32.614Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c7/0b/333ddab4270c4f5b972f980adef4faa66951a4aaf646ca067af597f15563/cryptography-46.0.7-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:42a1e5f98abb6391717978baf9f90dc28a743b7d9be7f0751a6f56a75d14065b", size = 4459756, upload-time = "2026-04-08T01:56:34.306Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d2/14/633913398b43b75f1234834170947957c6b623d1701ffc7a9600da907e89/cryptography-46.0.7-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:91bbcb08347344f810cbe49065914fe048949648f6bd5c2519f34619142bbe85", size = 4410244, upload-time = "2026-04-08T01:56:35.977Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/10/f2/19ceb3b3dc14009373432af0c13f46aa08e3ce334ec6eff13492e1812ccd/cryptography-46.0.7-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5d1c02a14ceb9148cc7816249f64f623fbfee39e8c03b3650d842ad3f34d637e", size = 4674868, upload-time = "2026-04-08T01:56:38.034Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1a/bb/a5c213c19ee94b15dfccc48f363738633a493812687f5567addbcbba9f6f/cryptography-46.0.7-cp311-abi3-win32.whl", hash = "sha256:d23c8ca48e44ee015cd0a54aeccdf9f09004eba9fc96f38c911011d9ff1bd457", size = 3026504, upload-time = "2026-04-08T01:56:39.666Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2b/02/7788f9fefa1d060ca68717c3901ae7fffa21ee087a90b7f23c7a603c32ae/cryptography-46.0.7-cp311-abi3-win_amd64.whl", hash = "sha256:397655da831414d165029da9bc483bed2fe0e75dde6a1523ec2fe63f3c46046b", size = 3488363, upload-time = "2026-04-08T01:56:41.893Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a7/7f/cd42fc3614386bc0c12f0cb3c4ae1fc2bbca5c9662dfed031514911d513d/cryptography-46.0.7-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:462ad5cb1c148a22b2e3bcc5ad52504dff325d17daf5df8d88c17dda1f75f2a4", size = 7165618, upload-time = "2026-04-08T01:57:10.645Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a5/d0/36a49f0262d2319139d2829f773f1b97ef8aef7f97e6e5bd21455e5a8fb5/cryptography-46.0.7-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:84d4cced91f0f159a7ddacad249cc077e63195c36aac40b4150e7a57e84fffe7", size = 4270628, upload-time = "2026-04-08T01:57:12.885Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8a/6c/1a42450f464dda6ffbe578a911f773e54dd48c10f9895a23a7e88b3e7db5/cryptography-46.0.7-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:128c5edfe5e5938b86b03941e94fac9ee793a94452ad1365c9fc3f4f62216832", size = 4415405, upload-time = "2026-04-08T01:57:14.923Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9a/92/4ed714dbe93a066dc1f4b4581a464d2d7dbec9046f7c8b7016f5286329e2/cryptography-46.0.7-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5e51be372b26ef4ba3de3c167cd3d1022934bc838ae9eaad7e644986d2a3d163", size = 4272715, upload-time = "2026-04-08T01:57:16.638Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b7/e6/a26b84096eddd51494bba19111f8fffe976f6a09f132706f8f1bf03f51f7/cryptography-46.0.7-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:cdf1a610ef82abb396451862739e3fc93b071c844399e15b90726ef7470eeaf2", size = 4918400, upload-time = "2026-04-08T01:57:19.021Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c7/08/ffd537b605568a148543ac3c2b239708ae0bd635064bab41359252ef88ed/cryptography-46.0.7-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1d25aee46d0c6f1a501adcddb2d2fee4b979381346a78558ed13e50aa8a59067", size = 4450634, upload-time = "2026-04-08T01:57:21.185Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/16/01/0cd51dd86ab5b9befe0d031e276510491976c3a80e9f6e31810cce46c4ad/cryptography-46.0.7-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:cdfbe22376065ffcf8be74dc9a909f032df19bc58a699456a21712d6e5eabfd0", size = 3985233, upload-time = "2026-04-08T01:57:22.862Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/92/49/819d6ed3a7d9349c2939f81b500a738cb733ab62fbecdbc1e38e83d45e12/cryptography-46.0.7-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:abad9dac36cbf55de6eb49badd4016806b3165d396f64925bf2999bcb67837ba", size = 4271955, upload-time = "2026-04-08T01:57:24.814Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/80/07/ad9b3c56ebb95ed2473d46df0847357e01583f4c52a85754d1a55e29e4d0/cryptography-46.0.7-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:935ce7e3cfdb53e3536119a542b839bb94ec1ad081013e9ab9b7cfd478b05006", size = 4879888, upload-time = "2026-04-08T01:57:26.88Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b8/c7/201d3d58f30c4c2bdbe9b03844c291feb77c20511cc3586daf7edc12a47b/cryptography-46.0.7-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:35719dc79d4730d30f1c2b6474bd6acda36ae2dfae1e3c16f2051f215df33ce0", size = 4449961, upload-time = "2026-04-08T01:57:29.068Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a5/ef/649750cbf96f3033c3c976e112265c33906f8e462291a33d77f90356548c/cryptography-46.0.7-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:7bbc6ccf49d05ac8f7d7b5e2e2c33830d4fe2061def88210a126d130d7f71a85", size = 4401696, upload-time = "2026-04-08T01:57:31.029Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/41/52/a8908dcb1a389a459a29008c29966c1d552588d4ae6d43f3a1a4512e0ebe/cryptography-46.0.7-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a1529d614f44b863a7b480c6d000fe93b59acee9c82ffa027cfadc77521a9f5e", size = 4664256, upload-time = "2026-04-08T01:57:33.144Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4b/fa/f0ab06238e899cc3fb332623f337a7364f36f4bb3f2534c2bb95a35b132c/cryptography-46.0.7-cp38-abi3-win32.whl", hash = "sha256:f247c8c1a1fb45e12586afbb436ef21ff1e80670b2861a90353d9b025583d246", size = 3013001, upload-time = "2026-04-08T01:57:34.933Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d2/f1/00ce3bde3ca542d1acd8f8cfa38e446840945aa6363f9b74746394b14127/cryptography-46.0.7-cp38-abi3-win_amd64.whl", hash = "sha256:506c4ff91eff4f82bdac7633318a526b1d1309fc07ca76a3ad182cb5b686d6d3", size = 3472985, upload-time = "2026-04-08T01:57:36.714Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
||||
@ -1,8 +1,8 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(dirname "$(realpath "$0")")"
|
||||
ROOT="$(dirname "$SCRIPT_DIR")"
|
||||
|
||||
cd "$ROOT/docker"
|
||||
docker compose --env-file middleware.env -f docker-compose.middleware.yaml -p dify up -d
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(dirname "$(realpath "$0")")"
|
||||
ROOT="$(dirname "$SCRIPT_DIR")"
|
||||
|
||||
cd "$ROOT/docker"
|
||||
docker compose --env-file middleware.env -f docker-compose.middleware.yaml -p dify up -d
|
||||
|
||||
@ -378,6 +378,20 @@ REDIS_USE_CLUSTERS=false
|
||||
REDIS_CLUSTERS=
|
||||
REDIS_CLUSTERS_PASSWORD=
|
||||
|
||||
# Redis connection and retry configuration
|
||||
# max redis retry
|
||||
REDIS_RETRY_RETRIES=3
|
||||
# Base delay (in seconds) for exponential backoff on retries
|
||||
REDIS_RETRY_BACKOFF_BASE=1.0
|
||||
# Cap (in seconds) for exponential backoff on retries
|
||||
REDIS_RETRY_BACKOFF_CAP=10.0
|
||||
# Timeout (in seconds) for Redis socket operations
|
||||
REDIS_SOCKET_TIMEOUT=5.0
|
||||
# Timeout (in seconds) for establishing a Redis connection
|
||||
REDIS_SOCKET_CONNECT_TIMEOUT=5.0
|
||||
# Interval (in seconds) for Redis health checks
|
||||
REDIS_HEALTH_CHECK_INTERVAL=30
|
||||
|
||||
# ------------------------------
|
||||
# Celery Configuration
|
||||
# ------------------------------
|
||||
@ -1180,6 +1194,14 @@ MAX_ITERATIONS_NUM=99
|
||||
# The timeout for the text generation in millisecond
|
||||
TEXT_GENERATION_TIMEOUT_MS=60000
|
||||
|
||||
# Enable the experimental vinext runtime shipped in the image.
|
||||
EXPERIMENTAL_ENABLE_VINEXT=false
|
||||
|
||||
# Allow inline style attributes in Markdown rendering.
|
||||
# Enable this if your workflows use Jinja2 templates with styled HTML.
|
||||
# Only recommended for self-hosted deployments with trusted content.
|
||||
ALLOW_INLINE_STYLES=false
|
||||
|
||||
# Allow rendering unsafe URLs which have "data:" scheme.
|
||||
ALLOW_UNSAFE_DATA_SCHEME=false
|
||||
|
||||
|
||||
@ -162,9 +162,11 @@ services:
|
||||
NEXT_PUBLIC_SOCKET_URL: ${NEXT_PUBLIC_SOCKET_URL:-ws://localhost}
|
||||
SENTRY_DSN: ${WEB_SENTRY_DSN:-}
|
||||
NEXT_TELEMETRY_DISABLED: ${NEXT_TELEMETRY_DISABLED:-0}
|
||||
EXPERIMENTAL_ENABLE_VINEXT: ${EXPERIMENTAL_ENABLE_VINEXT:-false}
|
||||
TEXT_GENERATION_TIMEOUT_MS: ${TEXT_GENERATION_TIMEOUT_MS:-60000}
|
||||
CSP_WHITELIST: ${CSP_WHITELIST:-}
|
||||
ALLOW_EMBED: ${ALLOW_EMBED:-false}
|
||||
ALLOW_INLINE_STYLES: ${ALLOW_INLINE_STYLES:-false}
|
||||
ALLOW_UNSAFE_DATA_SCHEME: ${ALLOW_UNSAFE_DATA_SCHEME:-false}
|
||||
MARKETPLACE_API_URL: ${MARKETPLACE_API_URL:-https://marketplace.dify.ai}
|
||||
MARKETPLACE_URL: ${MARKETPLACE_URL:-https://marketplace.dify.ai}
|
||||
|
||||
@ -101,6 +101,12 @@ x-shared-env: &shared-api-worker-env
|
||||
REDIS_USE_CLUSTERS: ${REDIS_USE_CLUSTERS:-false}
|
||||
REDIS_CLUSTERS: ${REDIS_CLUSTERS:-}
|
||||
REDIS_CLUSTERS_PASSWORD: ${REDIS_CLUSTERS_PASSWORD:-}
|
||||
REDIS_RETRY_RETRIES: ${REDIS_RETRY_RETRIES:-3}
|
||||
REDIS_RETRY_BACKOFF_BASE: ${REDIS_RETRY_BACKOFF_BASE:-1.0}
|
||||
REDIS_RETRY_BACKOFF_CAP: ${REDIS_RETRY_BACKOFF_CAP:-10.0}
|
||||
REDIS_SOCKET_TIMEOUT: ${REDIS_SOCKET_TIMEOUT:-5.0}
|
||||
REDIS_SOCKET_CONNECT_TIMEOUT: ${REDIS_SOCKET_CONNECT_TIMEOUT:-5.0}
|
||||
REDIS_HEALTH_CHECK_INTERVAL: ${REDIS_HEALTH_CHECK_INTERVAL:-30}
|
||||
CELERY_BROKER_URL: ${CELERY_BROKER_URL:-redis://:difyai123456@redis:6379/1}
|
||||
CELERY_BACKEND: ${CELERY_BACKEND:-redis}
|
||||
BROKER_USE_SSL: ${BROKER_USE_SSL:-false}
|
||||
@ -511,6 +517,8 @@ x-shared-env: &shared-api-worker-env
|
||||
MAX_PARALLEL_LIMIT: ${MAX_PARALLEL_LIMIT:-10}
|
||||
MAX_ITERATIONS_NUM: ${MAX_ITERATIONS_NUM:-99}
|
||||
TEXT_GENERATION_TIMEOUT_MS: ${TEXT_GENERATION_TIMEOUT_MS:-60000}
|
||||
EXPERIMENTAL_ENABLE_VINEXT: ${EXPERIMENTAL_ENABLE_VINEXT:-false}
|
||||
ALLOW_INLINE_STYLES: ${ALLOW_INLINE_STYLES:-false}
|
||||
ALLOW_UNSAFE_DATA_SCHEME: ${ALLOW_UNSAFE_DATA_SCHEME:-false}
|
||||
MAX_TREE_DEPTH: ${MAX_TREE_DEPTH:-50}
|
||||
PGDATA: ${PGDATA:-/var/lib/postgresql/data/pgdata}
|
||||
@ -873,9 +881,11 @@ services:
|
||||
NEXT_PUBLIC_SOCKET_URL: ${NEXT_PUBLIC_SOCKET_URL:-ws://localhost}
|
||||
SENTRY_DSN: ${WEB_SENTRY_DSN:-}
|
||||
NEXT_TELEMETRY_DISABLED: ${NEXT_TELEMETRY_DISABLED:-0}
|
||||
EXPERIMENTAL_ENABLE_VINEXT: ${EXPERIMENTAL_ENABLE_VINEXT:-false}
|
||||
TEXT_GENERATION_TIMEOUT_MS: ${TEXT_GENERATION_TIMEOUT_MS:-60000}
|
||||
CSP_WHITELIST: ${CSP_WHITELIST:-}
|
||||
ALLOW_EMBED: ${ALLOW_EMBED:-false}
|
||||
ALLOW_INLINE_STYLES: ${ALLOW_INLINE_STYLES:-false}
|
||||
ALLOW_UNSAFE_DATA_SCHEME: ${ALLOW_UNSAFE_DATA_SCHEME:-false}
|
||||
MARKETPLACE_API_URL: ${MARKETPLACE_API_URL:-https://marketplace.dify.ai}
|
||||
MARKETPLACE_URL: ${MARKETPLACE_URL:-https://marketplace.dify.ai}
|
||||
|
||||
7
e2e/features/smoke/unauthenticated-entry.feature
Normal file
7
e2e/features/smoke/unauthenticated-entry.feature
Normal file
@ -0,0 +1,7 @@
|
||||
@smoke @unauthenticated
|
||||
Feature: Unauthenticated app console entry
|
||||
Scenario: Redirect to the sign-in page when opening the apps console without logging in
|
||||
Given I am not signed in
|
||||
When I open the apps console
|
||||
Then I should be redirected to the signin page
|
||||
And I should see the "Sign in" button
|
||||
@ -9,3 +9,10 @@ Given('I am signed in as the default E2E admin', async function (this: DifyWorld
|
||||
'text/plain',
|
||||
)
|
||||
})
|
||||
|
||||
Given('I am not signed in', async function (this: DifyWorld) {
|
||||
this.attach(
|
||||
'Using a clean browser context without the shared authenticated storage state.',
|
||||
'text/plain',
|
||||
)
|
||||
})
|
||||
|
||||
@ -10,6 +10,10 @@ Then('I should stay on the apps console', async function (this: DifyWorld) {
|
||||
await expect(this.getPage()).toHaveURL(/\/apps(?:\?.*)?$/)
|
||||
})
|
||||
|
||||
Then('I should be redirected to the signin page', async function (this: DifyWorld) {
|
||||
await expect(this.getPage()).toHaveURL(/\/signin(?:\?.*)?$/)
|
||||
})
|
||||
|
||||
Then('I should see the {string} button', async function (this: DifyWorld, label: string) {
|
||||
await expect(this.getPage().getByRole('button', { name: label })).toBeVisible()
|
||||
})
|
||||
|
||||
@ -46,7 +46,11 @@ BeforeAll(async () => {
|
||||
Before(async function (this: DifyWorld, { pickle }) {
|
||||
if (!browser) throw new Error('Shared Playwright browser is not available.')
|
||||
|
||||
await this.startAuthenticatedSession(browser)
|
||||
const isUnauthenticatedScenario = pickle.tags.some((tag) => tag.name === '@unauthenticated')
|
||||
|
||||
if (isUnauthenticatedScenario) await this.startUnauthenticatedSession(browser)
|
||||
else await this.startAuthenticatedSession(browser)
|
||||
|
||||
this.scenarioStartedAt = Date.now()
|
||||
|
||||
const tags = pickle.tags.map((tag) => tag.name).join(' ')
|
||||
|
||||
@ -25,12 +25,12 @@ export class DifyWorld extends World {
|
||||
this.pageErrors = []
|
||||
}
|
||||
|
||||
async startAuthenticatedSession(browser: Browser) {
|
||||
async startSession(browser: Browser, authenticated: boolean) {
|
||||
this.resetScenarioState()
|
||||
this.context = await browser.newContext({
|
||||
baseURL,
|
||||
locale: defaultLocale,
|
||||
storageState: authStatePath,
|
||||
...(authenticated ? { storageState: authStatePath } : {}),
|
||||
})
|
||||
this.context.setDefaultTimeout(30_000)
|
||||
this.page = await this.context.newPage()
|
||||
@ -44,6 +44,14 @@ export class DifyWorld extends World {
|
||||
})
|
||||
}
|
||||
|
||||
async startAuthenticatedSession(browser: Browser) {
|
||||
await this.startSession(browser, true)
|
||||
}
|
||||
|
||||
async startUnauthenticatedSession(browser: Browser) {
|
||||
await this.startSession(browser, false)
|
||||
}
|
||||
|
||||
getPage() {
|
||||
if (!this.page) throw new Error('Playwright page has not been initialized for this scenario.')
|
||||
|
||||
|
||||
@ -19,6 +19,7 @@
|
||||
"@types/node": "catalog:",
|
||||
"tsx": "catalog:",
|
||||
"typescript": "catalog:",
|
||||
"vite": "catalog:",
|
||||
"vite-plus": "catalog:"
|
||||
}
|
||||
}
|
||||
|
||||
@ -5,6 +5,7 @@
|
||||
"prepare": "vp config"
|
||||
},
|
||||
"devDependencies": {
|
||||
"vite": "catalog:",
|
||||
"vite-plus": "catalog:"
|
||||
},
|
||||
"engines": {
|
||||
|
||||
406
pnpm-lock.yaml
generated
406
pnpm-lock.yaml
generated
@ -249,6 +249,9 @@ catalogs:
|
||||
class-variance-authority:
|
||||
specifier: 0.7.1
|
||||
version: 0.7.1
|
||||
client-only:
|
||||
specifier: 0.0.1
|
||||
version: 0.0.1
|
||||
clsx:
|
||||
specifier: 2.1.1
|
||||
version: 2.1.1
|
||||
@ -324,9 +327,6 @@ catalogs:
|
||||
fast-deep-equal:
|
||||
specifier: 3.1.3
|
||||
version: 3.1.3
|
||||
foxact:
|
||||
specifier: 0.3.0
|
||||
version: 0.3.0
|
||||
happy-dom:
|
||||
specifier: 20.8.9
|
||||
version: 20.8.9
|
||||
@ -571,9 +571,12 @@ importers:
|
||||
|
||||
.:
|
||||
devDependencies:
|
||||
vite:
|
||||
specifier: npm:@voidzero-dev/vite-plus-core@0.1.16
|
||||
version: '@voidzero-dev/vite-plus-core@0.1.16(@types/node@25.5.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3)'
|
||||
vite-plus:
|
||||
specifier: 'catalog:'
|
||||
version: 0.1.16(@types/node@25.5.2)(happy-dom@20.8.9)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@6.0.2)(vite@8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@25.5.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3))(yaml@2.8.3)
|
||||
version: 0.1.16(@types/node@25.5.2)(@voidzero-dev/vite-plus-core@0.1.16(@types/node@25.5.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(happy-dom@20.8.9)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3)
|
||||
|
||||
e2e:
|
||||
devDependencies:
|
||||
@ -592,9 +595,12 @@ importers:
|
||||
typescript:
|
||||
specifier: 'catalog:'
|
||||
version: 6.0.2
|
||||
vite:
|
||||
specifier: npm:@voidzero-dev/vite-plus-core@0.1.16
|
||||
version: '@voidzero-dev/vite-plus-core@0.1.16(@types/node@25.5.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3)'
|
||||
vite-plus:
|
||||
specifier: 'catalog:'
|
||||
version: 0.1.16(@types/node@25.5.2)(happy-dom@20.8.9)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@6.0.2)(vite@8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@25.5.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3))(yaml@2.8.3)
|
||||
version: 0.1.16(@types/node@25.5.2)(@voidzero-dev/vite-plus-core@0.1.16(@types/node@25.5.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(happy-dom@20.8.9)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3)
|
||||
|
||||
packages/iconify-collections:
|
||||
devDependencies:
|
||||
@ -618,19 +624,22 @@ importers:
|
||||
version: 8.58.1(eslint@10.2.0(jiti@2.6.1))(typescript@6.0.2)
|
||||
'@vitest/coverage-v8':
|
||||
specifier: 'catalog:'
|
||||
version: 4.1.3(@voidzero-dev/vite-plus-test@0.1.16(@types/node@25.5.2)(happy-dom@20.8.9)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@6.0.2)(vite@8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@25.5.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3))(yaml@2.8.3))
|
||||
version: 4.1.3(@voidzero-dev/vite-plus-test@0.1.16(@types/node@25.5.2)(@voidzero-dev/vite-plus-core@0.1.16(@types/node@25.5.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(happy-dom@20.8.9)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))
|
||||
eslint:
|
||||
specifier: 'catalog:'
|
||||
version: 10.2.0(jiti@2.6.1)
|
||||
typescript:
|
||||
specifier: 'catalog:'
|
||||
version: 6.0.2
|
||||
vite:
|
||||
specifier: npm:@voidzero-dev/vite-plus-core@0.1.16
|
||||
version: '@voidzero-dev/vite-plus-core@0.1.16(@types/node@25.5.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3)'
|
||||
vite-plus:
|
||||
specifier: 'catalog:'
|
||||
version: 0.1.16(@types/node@25.5.2)(happy-dom@20.8.9)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@6.0.2)(vite@8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@25.5.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3))(yaml@2.8.3)
|
||||
version: 0.1.16(@types/node@25.5.2)(@voidzero-dev/vite-plus-core@0.1.16(@types/node@25.5.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(happy-dom@20.8.9)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3)
|
||||
vitest:
|
||||
specifier: npm:@voidzero-dev/vite-plus-test@0.1.16
|
||||
version: '@voidzero-dev/vite-plus-test@0.1.16(@types/node@25.5.2)(happy-dom@20.8.9)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@6.0.2)(vite@8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@25.5.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3))(yaml@2.8.3)'
|
||||
version: '@voidzero-dev/vite-plus-test@0.1.16(@types/node@25.5.2)(@voidzero-dev/vite-plus-core@0.1.16(@types/node@25.5.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(happy-dom@20.8.9)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3)'
|
||||
|
||||
web:
|
||||
dependencies:
|
||||
@ -730,6 +739,9 @@ importers:
|
||||
class-variance-authority:
|
||||
specifier: 'catalog:'
|
||||
version: 0.7.1
|
||||
client-only:
|
||||
specifier: 'catalog:'
|
||||
version: 0.0.1
|
||||
clsx:
|
||||
specifier: 'catalog:'
|
||||
version: 2.1.1
|
||||
@ -775,9 +787,6 @@ importers:
|
||||
fast-deep-equal:
|
||||
specifier: 'catalog:'
|
||||
version: 3.1.3
|
||||
foxact:
|
||||
specifier: 'catalog:'
|
||||
version: 0.3.0(react-dom@19.2.4(react@19.2.4))(react@19.2.4)
|
||||
hast-util-to-jsx-runtime:
|
||||
specifier: 'catalog:'
|
||||
version: 2.3.6
|
||||
@ -2520,9 +2529,6 @@ packages:
|
||||
'@oxc-project/types@0.121.0':
|
||||
resolution: {integrity: sha512-CGtOARQb9tyv7ECgdAlFxi0Fv7lmzvmlm2rpD/RdijOO9rfk/JvB1CjT8EnoD+tjna/IYgKKw3IV7objRb+aYw==}
|
||||
|
||||
'@oxc-project/types@0.122.0':
|
||||
resolution: {integrity: sha512-oLAl5kBpV4w69UtFZ9xqcmTi+GENWOcPF7FCrczTiBbmC0ibXxCwyvZGbO39rCVEuLGAZM84DH0pUIyyv/YJzA==}
|
||||
|
||||
'@oxc-project/types@0.123.0':
|
||||
resolution: {integrity: sha512-YtECP/y8Mj1lSHiUWGSRzy/C6teUKlS87dEfuVKT09LgQbUsBW1rNg+MiJ4buGu3yuADV60gbIvo9/HplA56Ew==}
|
||||
|
||||
@ -3290,104 +3296,6 @@ packages:
|
||||
resolution: {integrity: sha512-UuBOt7BOsKVOkFXRe4Ypd/lADuNIfqJXv8GvHqtXaTYXPPKkj2nS2zPllVsrtRjcomDhIJVBnZwfmlI222WH8g==}
|
||||
engines: {node: '>=14.0.0'}
|
||||
|
||||
'@rolldown/binding-android-arm64@1.0.0-rc.12':
|
||||
resolution: {integrity: sha512-pv1y2Fv0JybcykuiiD3qBOBdz6RteYojRFY1d+b95WVuzx211CRh+ytI/+9iVyWQ6koTh5dawe4S/yRfOFjgaA==}
|
||||
engines: {node: ^20.19.0 || >=22.12.0}
|
||||
cpu: [arm64]
|
||||
os: [android]
|
||||
|
||||
'@rolldown/binding-darwin-arm64@1.0.0-rc.12':
|
||||
resolution: {integrity: sha512-cFYr6zTG/3PXXF3pUO+umXxt1wkRK/0AYT8lDwuqvRC+LuKYWSAQAQZjCWDQpAH172ZV6ieYrNnFzVVcnSflAg==}
|
||||
engines: {node: ^20.19.0 || >=22.12.0}
|
||||
cpu: [arm64]
|
||||
os: [darwin]
|
||||
|
||||
'@rolldown/binding-darwin-x64@1.0.0-rc.12':
|
||||
resolution: {integrity: sha512-ZCsYknnHzeXYps0lGBz8JrF37GpE9bFVefrlmDrAQhOEi4IOIlcoU1+FwHEtyXGx2VkYAvhu7dyBf75EJQffBw==}
|
||||
engines: {node: ^20.19.0 || >=22.12.0}
|
||||
cpu: [x64]
|
||||
os: [darwin]
|
||||
|
||||
'@rolldown/binding-freebsd-x64@1.0.0-rc.12':
|
||||
resolution: {integrity: sha512-dMLeprcVsyJsKolRXyoTH3NL6qtsT0Y2xeuEA8WQJquWFXkEC4bcu1rLZZSnZRMtAqwtrF/Ib9Ddtpa/Gkge9Q==}
|
||||
engines: {node: ^20.19.0 || >=22.12.0}
|
||||
cpu: [x64]
|
||||
os: [freebsd]
|
||||
|
||||
'@rolldown/binding-linux-arm-gnueabihf@1.0.0-rc.12':
|
||||
resolution: {integrity: sha512-YqWjAgGC/9M1lz3GR1r1rP79nMgo3mQiiA+Hfo+pvKFK1fAJ1bCi0ZQVh8noOqNacuY1qIcfyVfP6HoyBRZ85Q==}
|
||||
engines: {node: ^20.19.0 || >=22.12.0}
|
||||
cpu: [arm]
|
||||
os: [linux]
|
||||
|
||||
'@rolldown/binding-linux-arm64-gnu@1.0.0-rc.12':
|
||||
resolution: {integrity: sha512-/I5AS4cIroLpslsmzXfwbe5OmWvSsrFuEw3mwvbQ1kDxJ822hFHIx+vsN/TAzNVyepI/j/GSzrtCIwQPeKCLIg==}
|
||||
engines: {node: ^20.19.0 || >=22.12.0}
|
||||
cpu: [arm64]
|
||||
os: [linux]
|
||||
libc: [glibc]
|
||||
|
||||
'@rolldown/binding-linux-arm64-musl@1.0.0-rc.12':
|
||||
resolution: {integrity: sha512-V6/wZztnBqlx5hJQqNWwFdxIKN0m38p8Jas+VoSfgH54HSj9tKTt1dZvG6JRHcjh6D7TvrJPWFGaY9UBVOaWPw==}
|
||||
engines: {node: ^20.19.0 || >=22.12.0}
|
||||
cpu: [arm64]
|
||||
os: [linux]
|
||||
libc: [musl]
|
||||
|
||||
'@rolldown/binding-linux-ppc64-gnu@1.0.0-rc.12':
|
||||
resolution: {integrity: sha512-AP3E9BpcUYliZCxa3w5Kwj9OtEVDYK6sVoUzy4vTOJsjPOgdaJZKFmN4oOlX0Wp0RPV2ETfmIra9x1xuayFB7g==}
|
||||
engines: {node: ^20.19.0 || >=22.12.0}
|
||||
cpu: [ppc64]
|
||||
os: [linux]
|
||||
libc: [glibc]
|
||||
|
||||
'@rolldown/binding-linux-s390x-gnu@1.0.0-rc.12':
|
||||
resolution: {integrity: sha512-nWwpvUSPkoFmZo0kQazZYOrT7J5DGOJ/+QHHzjvNlooDZED8oH82Yg67HvehPPLAg5fUff7TfWFHQS8IV1n3og==}
|
||||
engines: {node: ^20.19.0 || >=22.12.0}
|
||||
cpu: [s390x]
|
||||
os: [linux]
|
||||
libc: [glibc]
|
||||
|
||||
'@rolldown/binding-linux-x64-gnu@1.0.0-rc.12':
|
||||
resolution: {integrity: sha512-RNrafz5bcwRy+O9e6P8Z/OCAJW/A+qtBczIqVYwTs14pf4iV1/+eKEjdOUta93q2TsT/FI0XYDP3TCky38LMAg==}
|
||||
engines: {node: ^20.19.0 || >=22.12.0}
|
||||
cpu: [x64]
|
||||
os: [linux]
|
||||
libc: [glibc]
|
||||
|
||||
'@rolldown/binding-linux-x64-musl@1.0.0-rc.12':
|
||||
resolution: {integrity: sha512-Jpw/0iwoKWx3LJ2rc1yjFrj+T7iHZn2JDg1Yny1ma0luviFS4mhAIcd1LFNxK3EYu3DHWCps0ydXQ5i/rrJ2ig==}
|
||||
engines: {node: ^20.19.0 || >=22.12.0}
|
||||
cpu: [x64]
|
||||
os: [linux]
|
||||
libc: [musl]
|
||||
|
||||
'@rolldown/binding-openharmony-arm64@1.0.0-rc.12':
|
||||
resolution: {integrity: sha512-vRugONE4yMfVn0+7lUKdKvN4D5YusEiPilaoO2sgUWpCvrncvWgPMzK00ZFFJuiPgLwgFNP5eSiUlv2tfc+lpA==}
|
||||
engines: {node: ^20.19.0 || >=22.12.0}
|
||||
cpu: [arm64]
|
||||
os: [openharmony]
|
||||
|
||||
'@rolldown/binding-wasm32-wasi@1.0.0-rc.12':
|
||||
resolution: {integrity: sha512-ykGiLr/6kkiHc0XnBfmFJuCjr5ZYKKofkx+chJWDjitX+KsJuAmrzWhwyOMSHzPhzOHOy7u9HlFoa5MoAOJ/Zg==}
|
||||
engines: {node: '>=14.0.0'}
|
||||
cpu: [wasm32]
|
||||
|
||||
'@rolldown/binding-win32-arm64-msvc@1.0.0-rc.12':
|
||||
resolution: {integrity: sha512-5eOND4duWkwx1AzCxadcOrNeighiLwMInEADT0YM7xeEOOFcovWZCq8dadXgcRHSf3Ulh1kFo/qvzoFiCLOL1Q==}
|
||||
engines: {node: ^20.19.0 || >=22.12.0}
|
||||
cpu: [arm64]
|
||||
os: [win32]
|
||||
|
||||
'@rolldown/binding-win32-x64-msvc@1.0.0-rc.12':
|
||||
resolution: {integrity: sha512-PyqoipaswDLAZtot351MLhrlrh6lcZPo2LSYE+VDxbVk24LVKAGOuE4hb8xZQmrPAuEtTZW8E6D2zc5EUZX4Lw==}
|
||||
engines: {node: ^20.19.0 || >=22.12.0}
|
||||
cpu: [x64]
|
||||
os: [win32]
|
||||
|
||||
'@rolldown/pluginutils@1.0.0-rc.12':
|
||||
resolution: {integrity: sha512-HHMwmarRKvoFsJorqYlFeFRzXZqCt2ETQlEDOb9aqssrnVBB1/+xgTGtuTrIk5vzLNX1MjMtTf7W9z3tsSbrxw==}
|
||||
|
||||
'@rolldown/pluginutils@1.0.0-rc.13':
|
||||
resolution: {integrity: sha512-3ngTAv6F/Py35BsYbeeLeecvhMKdsKm4AoOETVhAA+Qc8nrA2I0kF7oa93mE9qnIurngOSpMnQ0x2nQY2FPviA==}
|
||||
|
||||
@ -5979,9 +5887,6 @@ packages:
|
||||
resolution: {integrity: sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==}
|
||||
engines: {node: '>=0.10.0'}
|
||||
|
||||
event-target-bus@1.0.0:
|
||||
resolution: {integrity: sha512-uPcWKbj/BJU3Tbw9XqhHqET4/LBOhvv3/SJWr7NksxA6TC5YqBpaZgawE9R+WpYFCBFSAE4Vun+xQS6w4ABdlA==}
|
||||
|
||||
events@3.3.0:
|
||||
resolution: {integrity: sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==}
|
||||
engines: {node: '>=0.8.x'}
|
||||
@ -6094,17 +5999,6 @@ packages:
|
||||
engines: {node: '>=18.3.0'}
|
||||
hasBin: true
|
||||
|
||||
foxact@0.3.0:
|
||||
resolution: {integrity: sha512-CSlMlC0KlKQQEO83iLeQCLuT1V0OqnMWj7mjLstIDV8baMe1w4F7z3cz3/T+6Z8W12jqkQj07rwlw4Gi39knGg==}
|
||||
peerDependencies:
|
||||
react: '*'
|
||||
react-dom: '*'
|
||||
peerDependenciesMeta:
|
||||
react:
|
||||
optional: true
|
||||
react-dom:
|
||||
optional: true
|
||||
|
||||
fs-constants@1.0.0:
|
||||
resolution: {integrity: sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==}
|
||||
|
||||
@ -7744,11 +7638,6 @@ packages:
|
||||
robust-predicates@3.0.3:
|
||||
resolution: {integrity: sha512-NS3levdsRIUOmiJ8FZWCP7LG3QpJyrs/TE0Zpf1yvZu8cAJJ6QMW92H1c7kWpdIHo8RvmLxN/o2JXTKHp74lUA==}
|
||||
|
||||
rolldown@1.0.0-rc.12:
|
||||
resolution: {integrity: sha512-yP4USLIMYrwpPHEFB5JGH1uxhcslv6/hL0OyvTuY+3qlOSJvZ7ntYnoWpehBxufkgN0cvXxppuTu5hHa/zPh+A==}
|
||||
engines: {node: ^20.19.0 || >=22.12.0}
|
||||
hasBin: true
|
||||
|
||||
rollup@4.59.0:
|
||||
resolution: {integrity: sha512-2oMpl67a3zCH9H79LeMcbDhXW/UmWG/y2zuqnF2jQq5uq9TbM9TVyXvA4+t+ne2IIkBdrLpAaRQAvo7YI/Yyeg==}
|
||||
engines: {node: '>=18.0.0', npm: '>=8.0.0'}
|
||||
@ -7823,9 +7712,6 @@ packages:
|
||||
resolution: {integrity: sha512-OwrZRZAfhHww0WEnKHDY8OM0U/Qs8OTfIDWhUD4BLpNJUfXK4cGmjiagGze086m+mhI+V2nD0gfbHEnJjb9STA==}
|
||||
engines: {node: '>=10'}
|
||||
|
||||
server-only@0.0.1:
|
||||
resolution: {integrity: sha512-qepMx2JxAa5jjfzxG79yPPq+8BuFToHd1hm7kI+Z4zAq1ftQiP7HcxMhDDItrbtwVeLg/cY2JnKnrcFkmiswNA==}
|
||||
|
||||
sharp@0.34.5:
|
||||
resolution: {integrity: sha512-Ou9I5Ft9WNcCbXrU9cMgPBcCK8LiwLqcbywW3t4oDV37n1pzpuNLsYiAV8eODnjbtQlSDwZ2cUEeQz4E54Hltg==}
|
||||
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
|
||||
@ -8494,49 +8380,6 @@ packages:
|
||||
peerDependencies:
|
||||
vite: '*'
|
||||
|
||||
vite@8.0.3:
|
||||
resolution: {integrity: sha512-B9ifbFudT1TFhfltfaIPgjo9Z3mDynBTJSUYxTjOQruf/zHH+ezCQKcoqO+h7a9Pw9Nm/OtlXAiGT1axBgwqrQ==}
|
||||
engines: {node: ^20.19.0 || >=22.12.0}
|
||||
hasBin: true
|
||||
peerDependencies:
|
||||
'@types/node': ^20.19.0 || >=22.12.0
|
||||
'@vitejs/devtools': ^0.1.0
|
||||
esbuild: 0.27.2
|
||||
jiti: '>=1.21.0'
|
||||
less: ^4.0.0
|
||||
sass: ^1.70.0
|
||||
sass-embedded: ^1.70.0
|
||||
stylus: '>=0.54.8'
|
||||
sugarss: ^5.0.0
|
||||
terser: ^5.16.0
|
||||
tsx: ^4.8.1
|
||||
yaml: 2.8.3
|
||||
peerDependenciesMeta:
|
||||
'@types/node':
|
||||
optional: true
|
||||
'@vitejs/devtools':
|
||||
optional: true
|
||||
esbuild:
|
||||
optional: true
|
||||
jiti:
|
||||
optional: true
|
||||
less:
|
||||
optional: true
|
||||
sass:
|
||||
optional: true
|
||||
sass-embedded:
|
||||
optional: true
|
||||
stylus:
|
||||
optional: true
|
||||
sugarss:
|
||||
optional: true
|
||||
terser:
|
||||
optional: true
|
||||
tsx:
|
||||
optional: true
|
||||
yaml:
|
||||
optional: true
|
||||
|
||||
vitefu@1.1.3:
|
||||
resolution: {integrity: sha512-ub4okH7Z5KLjb6hDyjqrGXqWtWvoYdU3IGm/NorpgHncKoLTCfRIbvlhBm7r0YstIaQRYlp4yEbFqDcKSzXSSg==}
|
||||
peerDependencies:
|
||||
@ -10309,8 +10152,6 @@ snapshots:
|
||||
|
||||
'@oxc-project/types@0.121.0': {}
|
||||
|
||||
'@oxc-project/types@0.122.0': {}
|
||||
|
||||
'@oxc-project/types@0.123.0': {}
|
||||
|
||||
'@oxc-resolver/binding-android-arm-eabi@11.19.1':
|
||||
@ -10875,58 +10716,6 @@ snapshots:
|
||||
|
||||
'@rgrove/parse-xml@4.2.0': {}
|
||||
|
||||
'@rolldown/binding-android-arm64@1.0.0-rc.12':
|
||||
optional: true
|
||||
|
||||
'@rolldown/binding-darwin-arm64@1.0.0-rc.12':
|
||||
optional: true
|
||||
|
||||
'@rolldown/binding-darwin-x64@1.0.0-rc.12':
|
||||
optional: true
|
||||
|
||||
'@rolldown/binding-freebsd-x64@1.0.0-rc.12':
|
||||
optional: true
|
||||
|
||||
'@rolldown/binding-linux-arm-gnueabihf@1.0.0-rc.12':
|
||||
optional: true
|
||||
|
||||
'@rolldown/binding-linux-arm64-gnu@1.0.0-rc.12':
|
||||
optional: true
|
||||
|
||||
'@rolldown/binding-linux-arm64-musl@1.0.0-rc.12':
|
||||
optional: true
|
||||
|
||||
'@rolldown/binding-linux-ppc64-gnu@1.0.0-rc.12':
|
||||
optional: true
|
||||
|
||||
'@rolldown/binding-linux-s390x-gnu@1.0.0-rc.12':
|
||||
optional: true
|
||||
|
||||
'@rolldown/binding-linux-x64-gnu@1.0.0-rc.12':
|
||||
optional: true
|
||||
|
||||
'@rolldown/binding-linux-x64-musl@1.0.0-rc.12':
|
||||
optional: true
|
||||
|
||||
'@rolldown/binding-openharmony-arm64@1.0.0-rc.12':
|
||||
optional: true
|
||||
|
||||
'@rolldown/binding-wasm32-wasi@1.0.0-rc.12(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)':
|
||||
dependencies:
|
||||
'@napi-rs/wasm-runtime': 1.1.2(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)
|
||||
transitivePeerDependencies:
|
||||
- '@emnapi/core'
|
||||
- '@emnapi/runtime'
|
||||
optional: true
|
||||
|
||||
'@rolldown/binding-win32-arm64-msvc@1.0.0-rc.12':
|
||||
optional: true
|
||||
|
||||
'@rolldown/binding-win32-x64-msvc@1.0.0-rc.12':
|
||||
optional: true
|
||||
|
||||
'@rolldown/pluginutils@1.0.0-rc.12': {}
|
||||
|
||||
'@rolldown/pluginutils@1.0.0-rc.13': {}
|
||||
|
||||
'@rolldown/pluginutils@1.0.0-rc.7': {}
|
||||
@ -12151,20 +11940,6 @@ snapshots:
|
||||
tinyrainbow: 3.1.0
|
||||
vitest: '@voidzero-dev/vite-plus-test@0.1.16(@types/node@25.5.2)(@voidzero-dev/vite-plus-core@0.1.16(@types/node@25.5.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(happy-dom@20.8.9)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3)'
|
||||
|
||||
'@vitest/coverage-v8@4.1.3(@voidzero-dev/vite-plus-test@0.1.16(@types/node@25.5.2)(happy-dom@20.8.9)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@6.0.2)(vite@8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@25.5.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3))(yaml@2.8.3))':
|
||||
dependencies:
|
||||
'@bcoe/v8-coverage': 1.0.2
|
||||
'@vitest/utils': 4.1.3
|
||||
ast-v8-to-istanbul: 1.0.0
|
||||
istanbul-lib-coverage: 3.2.2
|
||||
istanbul-lib-report: 3.0.1
|
||||
istanbul-reports: 3.2.0
|
||||
magicast: 0.5.2
|
||||
obug: 2.1.1
|
||||
std-env: 4.0.0
|
||||
tinyrainbow: 3.1.0
|
||||
vitest: '@voidzero-dev/vite-plus-test@0.1.16(@types/node@25.5.2)(happy-dom@20.8.9)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@6.0.2)(vite@8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@25.5.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3))(yaml@2.8.3)'
|
||||
|
||||
'@vitest/eslint-plugin@1.6.14(@typescript-eslint/eslint-plugin@8.58.1(@typescript-eslint/parser@8.58.1(eslint@10.2.0(jiti@2.6.1))(typescript@6.0.2))(eslint@10.2.0(jiti@2.6.1))(typescript@6.0.2))(@voidzero-dev/vite-plus-test@0.1.16(@types/node@25.5.2)(@voidzero-dev/vite-plus-core@0.1.16(@types/node@25.5.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(happy-dom@20.8.9)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(eslint@10.2.0(jiti@2.6.1))(typescript@6.0.2)':
|
||||
dependencies:
|
||||
'@typescript-eslint/scope-manager': 8.58.1
|
||||
@ -12283,46 +12058,6 @@ snapshots:
|
||||
- utf-8-validate
|
||||
- yaml
|
||||
|
||||
'@voidzero-dev/vite-plus-test@0.1.16(@types/node@25.5.2)(happy-dom@20.8.9)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@6.0.2)(vite@8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@25.5.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3))(yaml@2.8.3)':
|
||||
dependencies:
|
||||
'@standard-schema/spec': 1.1.0
|
||||
'@types/chai': 5.2.3
|
||||
'@voidzero-dev/vite-plus-core': 0.1.16(@types/node@25.5.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3)
|
||||
es-module-lexer: 1.7.0
|
||||
obug: 2.1.1
|
||||
pixelmatch: 7.1.0
|
||||
pngjs: 7.0.0
|
||||
sirv: 3.0.2
|
||||
std-env: 4.0.0
|
||||
tinybench: 2.9.0
|
||||
tinyexec: 1.0.4
|
||||
tinyglobby: 0.2.15
|
||||
vite: 8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@25.5.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3)
|
||||
ws: 8.20.0
|
||||
optionalDependencies:
|
||||
'@types/node': 25.5.2
|
||||
happy-dom: 20.8.9
|
||||
transitivePeerDependencies:
|
||||
- '@arethetypeswrong/core'
|
||||
- '@tsdown/css'
|
||||
- '@tsdown/exe'
|
||||
- '@vitejs/devtools'
|
||||
- bufferutil
|
||||
- esbuild
|
||||
- jiti
|
||||
- less
|
||||
- publint
|
||||
- sass
|
||||
- sass-embedded
|
||||
- stylus
|
||||
- sugarss
|
||||
- terser
|
||||
- tsx
|
||||
- typescript
|
||||
- unplugin-unused
|
||||
- utf-8-validate
|
||||
- yaml
|
||||
|
||||
'@voidzero-dev/vite-plus-win32-arm64-msvc@0.1.16':
|
||||
optional: true
|
||||
|
||||
@ -13856,8 +13591,6 @@ snapshots:
|
||||
|
||||
esutils@2.0.3: {}
|
||||
|
||||
event-target-bus@1.0.0: {}
|
||||
|
||||
events@3.3.0: {}
|
||||
|
||||
expand-template@2.0.3:
|
||||
@ -13965,15 +13698,6 @@ snapshots:
|
||||
dependencies:
|
||||
fd-package-json: 2.0.0
|
||||
|
||||
foxact@0.3.0(react-dom@19.2.4(react@19.2.4))(react@19.2.4):
|
||||
dependencies:
|
||||
client-only: 0.0.1
|
||||
event-target-bus: 1.0.0
|
||||
server-only: 0.0.1
|
||||
optionalDependencies:
|
||||
react: 19.2.4
|
||||
react-dom: 19.2.4(react@19.2.4)
|
||||
|
||||
fs-constants@1.0.0:
|
||||
optional: true
|
||||
|
||||
@ -16104,30 +15828,6 @@ snapshots:
|
||||
|
||||
robust-predicates@3.0.3: {}
|
||||
|
||||
rolldown@1.0.0-rc.12(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1):
|
||||
dependencies:
|
||||
'@oxc-project/types': 0.122.0
|
||||
'@rolldown/pluginutils': 1.0.0-rc.12
|
||||
optionalDependencies:
|
||||
'@rolldown/binding-android-arm64': 1.0.0-rc.12
|
||||
'@rolldown/binding-darwin-arm64': 1.0.0-rc.12
|
||||
'@rolldown/binding-darwin-x64': 1.0.0-rc.12
|
||||
'@rolldown/binding-freebsd-x64': 1.0.0-rc.12
|
||||
'@rolldown/binding-linux-arm-gnueabihf': 1.0.0-rc.12
|
||||
'@rolldown/binding-linux-arm64-gnu': 1.0.0-rc.12
|
||||
'@rolldown/binding-linux-arm64-musl': 1.0.0-rc.12
|
||||
'@rolldown/binding-linux-ppc64-gnu': 1.0.0-rc.12
|
||||
'@rolldown/binding-linux-s390x-gnu': 1.0.0-rc.12
|
||||
'@rolldown/binding-linux-x64-gnu': 1.0.0-rc.12
|
||||
'@rolldown/binding-linux-x64-musl': 1.0.0-rc.12
|
||||
'@rolldown/binding-openharmony-arm64': 1.0.0-rc.12
|
||||
'@rolldown/binding-wasm32-wasi': 1.0.0-rc.12(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)
|
||||
'@rolldown/binding-win32-arm64-msvc': 1.0.0-rc.12
|
||||
'@rolldown/binding-win32-x64-msvc': 1.0.0-rc.12
|
||||
transitivePeerDependencies:
|
||||
- '@emnapi/core'
|
||||
- '@emnapi/runtime'
|
||||
|
||||
rollup@4.59.0:
|
||||
dependencies:
|
||||
'@types/estree': 1.0.8
|
||||
@ -16233,8 +15933,6 @@ snapshots:
|
||||
|
||||
seroval@1.5.1: {}
|
||||
|
||||
server-only@0.0.1: {}
|
||||
|
||||
sharp@0.34.5:
|
||||
dependencies:
|
||||
'@img/colour': 1.1.0
|
||||
@ -16967,51 +16665,6 @@ snapshots:
|
||||
- vite
|
||||
- yaml
|
||||
|
||||
vite-plus@0.1.16(@types/node@25.5.2)(happy-dom@20.8.9)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@6.0.2)(vite@8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@25.5.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3))(yaml@2.8.3):
|
||||
dependencies:
|
||||
'@oxc-project/types': 0.123.0
|
||||
'@voidzero-dev/vite-plus-core': 0.1.16(@types/node@25.5.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3)
|
||||
'@voidzero-dev/vite-plus-test': 0.1.16(@types/node@25.5.2)(happy-dom@20.8.9)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@6.0.2)(vite@8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@25.5.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3))(yaml@2.8.3)
|
||||
oxfmt: 0.43.0
|
||||
oxlint: 1.58.0(oxlint-tsgolint@0.20.0)
|
||||
oxlint-tsgolint: 0.20.0
|
||||
optionalDependencies:
|
||||
'@voidzero-dev/vite-plus-darwin-arm64': 0.1.16
|
||||
'@voidzero-dev/vite-plus-darwin-x64': 0.1.16
|
||||
'@voidzero-dev/vite-plus-linux-arm64-gnu': 0.1.16
|
||||
'@voidzero-dev/vite-plus-linux-arm64-musl': 0.1.16
|
||||
'@voidzero-dev/vite-plus-linux-x64-gnu': 0.1.16
|
||||
'@voidzero-dev/vite-plus-linux-x64-musl': 0.1.16
|
||||
'@voidzero-dev/vite-plus-win32-arm64-msvc': 0.1.16
|
||||
'@voidzero-dev/vite-plus-win32-x64-msvc': 0.1.16
|
||||
transitivePeerDependencies:
|
||||
- '@arethetypeswrong/core'
|
||||
- '@edge-runtime/vm'
|
||||
- '@opentelemetry/api'
|
||||
- '@tsdown/css'
|
||||
- '@tsdown/exe'
|
||||
- '@types/node'
|
||||
- '@vitejs/devtools'
|
||||
- '@vitest/ui'
|
||||
- bufferutil
|
||||
- esbuild
|
||||
- happy-dom
|
||||
- jiti
|
||||
- jsdom
|
||||
- less
|
||||
- publint
|
||||
- sass
|
||||
- sass-embedded
|
||||
- stylus
|
||||
- sugarss
|
||||
- terser
|
||||
- tsx
|
||||
- typescript
|
||||
- unplugin-unused
|
||||
- utf-8-validate
|
||||
- vite
|
||||
- yaml
|
||||
|
||||
vite-tsconfig-paths@5.1.4(@voidzero-dev/vite-plus-core@0.1.16(@types/node@25.5.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(typescript@6.0.2):
|
||||
dependencies:
|
||||
debug: 4.4.3(supports-color@8.1.1)
|
||||
@ -17033,25 +16686,6 @@ snapshots:
|
||||
- supports-color
|
||||
- typescript
|
||||
|
||||
vite@8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@25.5.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3):
|
||||
dependencies:
|
||||
lightningcss: 1.32.0
|
||||
picomatch: 4.0.4
|
||||
postcss: 8.5.9
|
||||
rolldown: 1.0.0-rc.12(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)
|
||||
tinyglobby: 0.2.15
|
||||
optionalDependencies:
|
||||
'@types/node': 25.5.2
|
||||
fsevents: 2.3.3
|
||||
jiti: 2.6.1
|
||||
sass: 1.98.0
|
||||
terser: 5.46.1
|
||||
tsx: 4.21.0
|
||||
yaml: 2.8.3
|
||||
transitivePeerDependencies:
|
||||
- '@emnapi/core'
|
||||
- '@emnapi/runtime'
|
||||
|
||||
vitefu@1.1.3(@voidzero-dev/vite-plus-core@0.1.16(@types/node@25.5.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3)):
|
||||
optionalDependencies:
|
||||
vite: '@voidzero-dev/vite-plus-core@0.1.16(@types/node@25.5.2)(jiti@2.6.1)(sass@1.98.0)(terser@5.46.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3)'
|
||||
|
||||
@ -129,6 +129,7 @@ catalog:
|
||||
ahooks: 3.9.7
|
||||
autoprefixer: 10.4.27
|
||||
class-variance-authority: 0.7.1
|
||||
client-only: 0.0.1
|
||||
clsx: 2.1.1
|
||||
cmdk: 1.1.1
|
||||
code-inspector-plugin: 1.5.1
|
||||
@ -154,7 +155,6 @@ catalog:
|
||||
eslint-plugin-sonarjs: 4.0.2
|
||||
eslint-plugin-storybook: 10.3.5
|
||||
fast-deep-equal: 3.1.3
|
||||
foxact: 0.3.0
|
||||
happy-dom: 20.8.9
|
||||
hast-util-to-jsx-runtime: 2.3.6
|
||||
hono: 4.12.12
|
||||
|
||||
@ -62,6 +62,7 @@
|
||||
"@vitest/coverage-v8": "catalog:",
|
||||
"eslint": "catalog:",
|
||||
"typescript": "catalog:",
|
||||
"vite": "catalog:",
|
||||
"vite-plus": "catalog:",
|
||||
"vitest": "catalog:"
|
||||
}
|
||||
|
||||
@ -50,6 +50,9 @@ NEXT_PUBLIC_CSP_WHITELIST=
|
||||
# Default is not allow to embed into iframe to prevent Clickjacking: https://owasp.org/www-community/attacks/Clickjacking
|
||||
NEXT_PUBLIC_ALLOW_EMBED=
|
||||
|
||||
# Allow inline style attributes in Markdown rendering (self-hosted opt-in).
|
||||
NEXT_PUBLIC_ALLOW_INLINE_STYLES=false
|
||||
|
||||
# Allow rendering unsafe URLs which have "data:" scheme.
|
||||
NEXT_PUBLIC_ALLOW_UNSAFE_DATA_SCHEME=false
|
||||
|
||||
|
||||
@ -42,7 +42,7 @@ COPY . .
|
||||
|
||||
WORKDIR /app/web
|
||||
ENV NODE_OPTIONS="--max-old-space-size=4096"
|
||||
RUN pnpm build
|
||||
RUN pnpm build && pnpm build:vinext
|
||||
|
||||
|
||||
# production stage
|
||||
@ -56,6 +56,7 @@ ENV APP_API_URL=http://127.0.0.1:5001
|
||||
ENV MARKETPLACE_API_URL=https://marketplace.dify.ai
|
||||
ENV MARKETPLACE_URL=https://marketplace.dify.ai
|
||||
ENV PORT=3000
|
||||
ENV EXPERIMENTAL_ENABLE_VINEXT=false
|
||||
ENV NEXT_TELEMETRY_DISABLED=1
|
||||
|
||||
# set timezone
|
||||
@ -73,9 +74,10 @@ RUN addgroup -S -g ${dify_uid} dify && \
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY --from=builder --chown=dify:dify /app/web/public ./web/public
|
||||
COPY --from=builder --chown=dify:dify /app/web/.next/standalone ./
|
||||
COPY --from=builder --chown=dify:dify /app/web/.next/static ./web/.next/static
|
||||
COPY --from=builder --chown=dify:dify /app/web/public ./targets/next/web/public
|
||||
COPY --from=builder --chown=dify:dify /app/web/.next/standalone ./targets/next/
|
||||
COPY --from=builder --chown=dify:dify /app/web/.next/static ./targets/next/web/.next/static
|
||||
COPY --from=builder --chown=dify:dify /app/web/dist/standalone ./targets/vinext
|
||||
|
||||
COPY --chown=dify:dify --chmod=755 web/docker/entrypoint.sh ./entrypoint.sh
|
||||
|
||||
|
||||
@ -0,0 +1,10 @@
|
||||
import { render, screen } from '@testing-library/react'
|
||||
import HitHistoryNoData from '../hit-history-no-data'
|
||||
|
||||
describe('HitHistoryNoData', () => {
|
||||
it('should render the empty history message', () => {
|
||||
render(<HitHistoryNoData />)
|
||||
|
||||
expect(screen.getByText('appAnnotation.viewModal.noHitHistory')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
@ -0,0 +1,32 @@
|
||||
/* eslint-disable ts/no-explicit-any */
|
||||
import { fireEvent, render, screen, waitFor } from '@testing-library/react'
|
||||
import AccessControlDialog from '../access-control-dialog'
|
||||
|
||||
describe('AccessControlDialog', () => {
|
||||
it('should render dialog content when visible', () => {
|
||||
render(
|
||||
<AccessControlDialog show className="custom-dialog">
|
||||
<div>Dialog Content</div>
|
||||
</AccessControlDialog>,
|
||||
)
|
||||
|
||||
expect(screen.getByRole('dialog')).toBeInTheDocument()
|
||||
expect(screen.getByText('Dialog Content')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should trigger onClose when clicking the close control', async () => {
|
||||
const onClose = vi.fn()
|
||||
render(
|
||||
<AccessControlDialog show onClose={onClose}>
|
||||
<div>Dialog Content</div>
|
||||
</AccessControlDialog>,
|
||||
)
|
||||
|
||||
const closeButton = document.body.querySelector('div.absolute.right-5.top-5') as HTMLElement
|
||||
fireEvent.click(closeButton)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(onClose).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
})
|
||||
})
|
||||
@ -0,0 +1,45 @@
|
||||
import { fireEvent, render, screen } from '@testing-library/react'
|
||||
import useAccessControlStore from '@/context/access-control-store'
|
||||
import { AccessMode } from '@/models/access-control'
|
||||
import AccessControlItem from '../access-control-item'
|
||||
|
||||
describe('AccessControlItem', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
useAccessControlStore.setState({
|
||||
appId: '',
|
||||
specificGroups: [],
|
||||
specificMembers: [],
|
||||
currentMenu: AccessMode.PUBLIC,
|
||||
selectedGroupsForBreadcrumb: [],
|
||||
})
|
||||
})
|
||||
|
||||
it('should update current menu when selecting a different access type', () => {
|
||||
render(
|
||||
<AccessControlItem type={AccessMode.ORGANIZATION}>
|
||||
<span>Organization Only</span>
|
||||
</AccessControlItem>,
|
||||
)
|
||||
|
||||
const option = screen.getByText('Organization Only').parentElement as HTMLElement
|
||||
fireEvent.click(option)
|
||||
|
||||
expect(useAccessControlStore.getState().currentMenu).toBe(AccessMode.ORGANIZATION)
|
||||
})
|
||||
|
||||
it('should keep the selected state for the active access type', () => {
|
||||
useAccessControlStore.setState({
|
||||
currentMenu: AccessMode.ORGANIZATION,
|
||||
})
|
||||
|
||||
render(
|
||||
<AccessControlItem type={AccessMode.ORGANIZATION}>
|
||||
<span>Organization Only</span>
|
||||
</AccessControlItem>,
|
||||
)
|
||||
|
||||
const option = screen.getByText('Organization Only').parentElement as HTMLElement
|
||||
expect(option).toHaveClass('border-components-option-card-option-selected-border')
|
||||
})
|
||||
})
|
||||
@ -0,0 +1,130 @@
|
||||
import type { AccessControlAccount, AccessControlGroup, Subject } from '@/models/access-control'
|
||||
import { fireEvent, render, screen } from '@testing-library/react'
|
||||
import userEvent from '@testing-library/user-event'
|
||||
import useAccessControlStore from '@/context/access-control-store'
|
||||
import { SubjectType } from '@/models/access-control'
|
||||
import AddMemberOrGroupDialog from '../add-member-or-group-pop'
|
||||
|
||||
const mockUseSearchForWhiteListCandidates = vi.fn()
|
||||
const intersectionObserverMocks = vi.hoisted(() => ({
|
||||
callback: null as null | ((entries: Array<{ isIntersecting: boolean }>) => void),
|
||||
}))
|
||||
|
||||
vi.mock('@/context/app-context', () => ({
|
||||
useSelector: <T,>(selector: (value: { userProfile: { email: string } }) => T) => selector({
|
||||
userProfile: {
|
||||
email: 'member@example.com',
|
||||
},
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/service/access-control', () => ({
|
||||
useSearchForWhiteListCandidates: (...args: unknown[]) => mockUseSearchForWhiteListCandidates(...args),
|
||||
}))
|
||||
|
||||
const createGroup = (overrides: Partial<AccessControlGroup> = {}): AccessControlGroup => ({
|
||||
id: 'group-1',
|
||||
name: 'Group One',
|
||||
groupSize: 5,
|
||||
...overrides,
|
||||
} as AccessControlGroup)
|
||||
|
||||
const createMember = (overrides: Partial<AccessControlAccount> = {}): AccessControlAccount => ({
|
||||
id: 'member-1',
|
||||
name: 'Member One',
|
||||
email: 'member@example.com',
|
||||
avatar: '',
|
||||
avatarUrl: '',
|
||||
...overrides,
|
||||
} as AccessControlAccount)
|
||||
|
||||
describe('AddMemberOrGroupDialog', () => {
|
||||
const baseGroup = createGroup()
|
||||
const baseMember = createMember()
|
||||
const groupSubject: Subject = {
|
||||
subjectId: baseGroup.id,
|
||||
subjectType: SubjectType.GROUP,
|
||||
groupData: baseGroup,
|
||||
} as Subject
|
||||
const memberSubject: Subject = {
|
||||
subjectId: baseMember.id,
|
||||
subjectType: SubjectType.ACCOUNT,
|
||||
accountData: baseMember,
|
||||
} as Subject
|
||||
|
||||
beforeAll(() => {
|
||||
class MockIntersectionObserver {
|
||||
constructor(callback: (entries: Array<{ isIntersecting: boolean }>) => void) {
|
||||
intersectionObserverMocks.callback = callback
|
||||
}
|
||||
|
||||
observe = vi.fn(() => undefined)
|
||||
disconnect = vi.fn(() => undefined)
|
||||
unobserve = vi.fn(() => undefined)
|
||||
}
|
||||
|
||||
// @ts-expect-error test DOM typings do not guarantee IntersectionObserver here
|
||||
globalThis.IntersectionObserver = MockIntersectionObserver
|
||||
})
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
useAccessControlStore.setState({
|
||||
appId: 'app-1',
|
||||
specificGroups: [],
|
||||
specificMembers: [],
|
||||
currentMenu: SubjectType.GROUP as never,
|
||||
selectedGroupsForBreadcrumb: [],
|
||||
})
|
||||
mockUseSearchForWhiteListCandidates.mockReturnValue({
|
||||
isLoading: false,
|
||||
isFetchingNextPage: false,
|
||||
fetchNextPage: vi.fn(),
|
||||
data: {
|
||||
pages: [{ currPage: 1, subjects: [groupSubject, memberSubject], hasMore: false }],
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
it('should open the search popover and display candidates', async () => {
|
||||
const user = userEvent.setup()
|
||||
render(<AddMemberOrGroupDialog />)
|
||||
|
||||
await user.click(screen.getByText('common.operation.add'))
|
||||
|
||||
expect(screen.getByPlaceholderText('app.accessControlDialog.operateGroupAndMember.searchPlaceholder')).toBeInTheDocument()
|
||||
expect(screen.getByText(baseGroup.name)).toBeInTheDocument()
|
||||
expect(screen.getByText(baseMember.name)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should allow expanding groups and selecting members', async () => {
|
||||
const user = userEvent.setup()
|
||||
render(<AddMemberOrGroupDialog />)
|
||||
|
||||
await user.click(screen.getByText('common.operation.add'))
|
||||
await user.click(screen.getByText('app.accessControlDialog.operateGroupAndMember.expand'))
|
||||
|
||||
expect(useAccessControlStore.getState().selectedGroupsForBreadcrumb).toEqual([baseGroup])
|
||||
|
||||
const memberCheckbox = screen.getByText(baseMember.name).parentElement?.previousElementSibling as HTMLElement
|
||||
fireEvent.click(memberCheckbox)
|
||||
|
||||
expect(useAccessControlStore.getState().specificMembers).toEqual([baseMember])
|
||||
})
|
||||
|
||||
it('should show the empty state when no candidates are returned', async () => {
|
||||
mockUseSearchForWhiteListCandidates.mockReturnValue({
|
||||
isLoading: false,
|
||||
isFetchingNextPage: false,
|
||||
fetchNextPage: vi.fn(),
|
||||
data: { pages: [] },
|
||||
})
|
||||
|
||||
const user = userEvent.setup()
|
||||
render(<AddMemberOrGroupDialog />)
|
||||
|
||||
await user.click(screen.getByText('common.operation.add'))
|
||||
|
||||
expect(screen.getByText('app.accessControlDialog.operateGroupAndMember.noResult')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
@ -0,0 +1,121 @@
|
||||
/* eslint-disable ts/no-explicit-any */
|
||||
import type { App } from '@/types/app'
|
||||
import { fireEvent, render, screen, waitFor } from '@testing-library/react'
|
||||
import { toast } from '@/app/components/base/ui/toast'
|
||||
import useAccessControlStore from '@/context/access-control-store'
|
||||
import { AccessMode } from '@/models/access-control'
|
||||
import AccessControl from '../index'
|
||||
|
||||
const mockMutateAsync = vi.fn()
|
||||
const mockUseUpdateAccessMode = vi.fn(() => ({
|
||||
isPending: false,
|
||||
mutateAsync: mockMutateAsync,
|
||||
}))
|
||||
const mockUseAppWhiteListSubjects = vi.fn()
|
||||
const mockUseSearchForWhiteListCandidates = vi.fn()
|
||||
let mockWebappAuth = {
|
||||
enabled: true,
|
||||
allow_sso: true,
|
||||
allow_email_password_login: false,
|
||||
allow_email_code_login: false,
|
||||
}
|
||||
|
||||
vi.mock('@/context/global-public-context', () => ({
|
||||
useGlobalPublicStore: (selector: (state: { systemFeatures: { webapp_auth: typeof mockWebappAuth } }) => unknown) => selector({
|
||||
systemFeatures: {
|
||||
webapp_auth: mockWebappAuth,
|
||||
},
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/service/access-control', () => ({
|
||||
useAppWhiteListSubjects: (...args: unknown[]) => mockUseAppWhiteListSubjects(...args),
|
||||
useSearchForWhiteListCandidates: (...args: unknown[]) => mockUseSearchForWhiteListCandidates(...args),
|
||||
useUpdateAccessMode: () => mockUseUpdateAccessMode(),
|
||||
}))
|
||||
|
||||
describe('AccessControl', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
mockWebappAuth = {
|
||||
enabled: true,
|
||||
allow_sso: true,
|
||||
allow_email_password_login: false,
|
||||
allow_email_code_login: false,
|
||||
}
|
||||
useAccessControlStore.setState({
|
||||
appId: '',
|
||||
specificGroups: [],
|
||||
specificMembers: [],
|
||||
currentMenu: AccessMode.SPECIFIC_GROUPS_MEMBERS,
|
||||
selectedGroupsForBreadcrumb: [],
|
||||
})
|
||||
mockMutateAsync.mockResolvedValue(undefined)
|
||||
mockUseAppWhiteListSubjects.mockReturnValue({
|
||||
isPending: false,
|
||||
data: {
|
||||
groups: [],
|
||||
members: [],
|
||||
},
|
||||
})
|
||||
mockUseSearchForWhiteListCandidates.mockReturnValue({
|
||||
isLoading: false,
|
||||
isFetchingNextPage: false,
|
||||
fetchNextPage: vi.fn(),
|
||||
data: { pages: [] },
|
||||
})
|
||||
})
|
||||
|
||||
it('should initialize menu from the app and update access mode on confirm', async () => {
|
||||
const onClose = vi.fn()
|
||||
const onConfirm = vi.fn()
|
||||
const toastSpy = vi.spyOn(toast, 'success').mockReturnValue('toast-success')
|
||||
const app = {
|
||||
id: 'app-id-1',
|
||||
access_mode: AccessMode.PUBLIC,
|
||||
} as App
|
||||
|
||||
render(
|
||||
<AccessControl
|
||||
app={app}
|
||||
onClose={onClose}
|
||||
onConfirm={onConfirm}
|
||||
/>,
|
||||
)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(useAccessControlStore.getState().appId).toBe(app.id)
|
||||
expect(useAccessControlStore.getState().currentMenu).toBe(AccessMode.PUBLIC)
|
||||
})
|
||||
|
||||
fireEvent.click(screen.getByText('common.operation.confirm'))
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockMutateAsync).toHaveBeenCalledWith({
|
||||
appId: app.id,
|
||||
accessMode: AccessMode.PUBLIC,
|
||||
})
|
||||
expect(toastSpy).toHaveBeenCalledWith('app.accessControlDialog.updateSuccess')
|
||||
expect(onConfirm).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
})
|
||||
|
||||
it('should show the external-members option when SSO tip is visible', () => {
|
||||
mockWebappAuth = {
|
||||
enabled: false,
|
||||
allow_sso: false,
|
||||
allow_email_password_login: false,
|
||||
allow_email_code_login: false,
|
||||
}
|
||||
|
||||
render(
|
||||
<AccessControl
|
||||
app={{ id: 'app-id-2', access_mode: AccessMode.PUBLIC } as App}
|
||||
onClose={vi.fn()}
|
||||
/>,
|
||||
)
|
||||
|
||||
expect(screen.getByText('app.accessControlDialog.accessItems.external')).toBeInTheDocument()
|
||||
expect(screen.getByText('app.accessControlDialog.accessItems.anyone')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
@ -0,0 +1,97 @@
|
||||
import type { AccessControlAccount, AccessControlGroup } from '@/models/access-control'
|
||||
import { fireEvent, render, screen, waitFor } from '@testing-library/react'
|
||||
import useAccessControlStore from '@/context/access-control-store'
|
||||
import { AccessMode } from '@/models/access-control'
|
||||
import SpecificGroupsOrMembers from '../specific-groups-or-members'
|
||||
|
||||
const mockUseAppWhiteListSubjects = vi.fn()
|
||||
|
||||
vi.mock('@/service/access-control', () => ({
|
||||
useAppWhiteListSubjects: (...args: unknown[]) => mockUseAppWhiteListSubjects(...args),
|
||||
}))
|
||||
|
||||
vi.mock('../add-member-or-group-pop', () => ({
|
||||
default: () => <div data-testid="add-member-or-group-dialog" />,
|
||||
}))
|
||||
|
||||
const createGroup = (overrides: Partial<AccessControlGroup> = {}): AccessControlGroup => ({
|
||||
id: 'group-1',
|
||||
name: 'Group One',
|
||||
groupSize: 5,
|
||||
...overrides,
|
||||
} as AccessControlGroup)
|
||||
|
||||
const createMember = (overrides: Partial<AccessControlAccount> = {}): AccessControlAccount => ({
|
||||
id: 'member-1',
|
||||
name: 'Member One',
|
||||
email: 'member@example.com',
|
||||
avatar: '',
|
||||
avatarUrl: '',
|
||||
...overrides,
|
||||
} as AccessControlAccount)
|
||||
|
||||
describe('SpecificGroupsOrMembers', () => {
|
||||
const baseGroup = createGroup()
|
||||
const baseMember = createMember()
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
useAccessControlStore.setState({
|
||||
appId: '',
|
||||
specificGroups: [],
|
||||
specificMembers: [],
|
||||
currentMenu: AccessMode.SPECIFIC_GROUPS_MEMBERS,
|
||||
selectedGroupsForBreadcrumb: [],
|
||||
})
|
||||
mockUseAppWhiteListSubjects.mockReturnValue({
|
||||
isPending: false,
|
||||
data: {
|
||||
groups: [baseGroup],
|
||||
members: [baseMember],
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
it('should render the collapsed row when not in specific mode', () => {
|
||||
useAccessControlStore.setState({
|
||||
currentMenu: AccessMode.ORGANIZATION,
|
||||
})
|
||||
|
||||
render(<SpecificGroupsOrMembers />)
|
||||
|
||||
expect(screen.getByText('app.accessControlDialog.accessItems.specific')).toBeInTheDocument()
|
||||
expect(screen.queryByTestId('add-member-or-group-dialog')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show loading while whitelist subjects are pending', async () => {
|
||||
mockUseAppWhiteListSubjects.mockReturnValue({
|
||||
isPending: true,
|
||||
data: undefined,
|
||||
})
|
||||
|
||||
const { container } = render(<SpecificGroupsOrMembers />)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(container.querySelector('.spin-animation')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
it('should render fetched groups and members and support removal', async () => {
|
||||
useAccessControlStore.setState({ appId: 'app-1' })
|
||||
|
||||
render(<SpecificGroupsOrMembers />)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText(baseGroup.name)).toBeInTheDocument()
|
||||
expect(screen.getByText(baseMember.name)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
const groupRemove = screen.getByText(baseGroup.name).closest('div')?.querySelector('.h-4.w-4.cursor-pointer') as HTMLElement
|
||||
fireEvent.click(groupRemove)
|
||||
expect(useAccessControlStore.getState().specificGroups).toEqual([])
|
||||
|
||||
const memberRemove = screen.getByText(baseMember.name).closest('div')?.querySelector('.h-4.w-4.cursor-pointer') as HTMLElement
|
||||
fireEvent.click(memberRemove)
|
||||
expect(useAccessControlStore.getState().specificMembers).toEqual([])
|
||||
})
|
||||
})
|
||||
@ -0,0 +1,26 @@
|
||||
import { render, screen } from '@testing-library/react'
|
||||
import { InputVarType } from '@/app/components/workflow/types'
|
||||
import InputTypeIcon from '../input-type-icon'
|
||||
|
||||
const mockInputVarTypeIcon = vi.fn(({ type, className }: { type: InputVarType, className?: string }) => (
|
||||
<div data-testid="input-var-type-icon" data-type={type} className={className} />
|
||||
))
|
||||
|
||||
vi.mock('@/app/components/workflow/nodes/_base/components/input-var-type-icon', () => ({
|
||||
default: (props: { type: InputVarType, className?: string }) => mockInputVarTypeIcon(props),
|
||||
}))
|
||||
|
||||
describe('InputTypeIcon', () => {
|
||||
it('should map string variables to the workflow text-input icon', () => {
|
||||
render(<InputTypeIcon type="string" className="marker" />)
|
||||
|
||||
expect(screen.getByTestId('input-var-type-icon')).toHaveAttribute('data-type', InputVarType.textInput)
|
||||
expect(screen.getByTestId('input-var-type-icon')).toHaveClass('marker')
|
||||
})
|
||||
|
||||
it('should map select variables to the workflow select icon', () => {
|
||||
render(<InputTypeIcon type="select" className="marker" />)
|
||||
|
||||
expect(screen.getByTestId('input-var-type-icon')).toHaveAttribute('data-type', InputVarType.select)
|
||||
})
|
||||
})
|
||||
@ -0,0 +1,19 @@
|
||||
import { fireEvent, render, screen } from '@testing-library/react'
|
||||
import ModalFoot from '../modal-foot'
|
||||
|
||||
describe('ModalFoot', () => {
|
||||
it('should trigger cancel and confirm callbacks', () => {
|
||||
const onCancel = vi.fn()
|
||||
const onConfirm = vi.fn()
|
||||
|
||||
render(
|
||||
<ModalFoot onCancel={onCancel} onConfirm={onConfirm} />,
|
||||
)
|
||||
|
||||
fireEvent.click(screen.getByRole('button', { name: 'common.operation.cancel' }))
|
||||
fireEvent.click(screen.getByRole('button', { name: 'common.operation.save' }))
|
||||
|
||||
expect(onCancel).toHaveBeenCalledTimes(1)
|
||||
expect(onConfirm).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
})
|
||||
@ -0,0 +1,16 @@
|
||||
import { fireEvent, render, screen } from '@testing-library/react'
|
||||
import SelectVarType from '../select-var-type'
|
||||
|
||||
describe('SelectVarType', () => {
|
||||
it('should open the menu and return the selected variable type', () => {
|
||||
const onChange = vi.fn()
|
||||
|
||||
render(<SelectVarType onChange={onChange} />)
|
||||
|
||||
fireEvent.click(screen.getByText('common.operation.add'))
|
||||
fireEvent.click(screen.getByText('appDebug.variableConfig.checkbox'))
|
||||
|
||||
expect(onChange).toHaveBeenCalledWith('checkbox')
|
||||
expect(screen.queryByText('appDebug.variableConfig.checkbox')).not.toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user